Пример #1
0
    def testTzwinTimeOnlyUTCOffset(self):
        # For zones with DST, .utcoffset() should return None
        tw_est = tz.tzwin('Eastern Standard Time')
        self.assertIs(dt_time(14, 10, tzinfo=tw_est).utcoffset(), None)

        # This zone has no DST, so .utcoffset() returns standard offset
        tw_sast = tz.tzwin('South Africa Standard Time')
        self.assertEqual(dt_time(14, 10, tzinfo=tw_sast).utcoffset(),
                         timedelta(hours=2))
Пример #2
0
    def testTzwinTimeOnlyDST(self):
        # For zones with DST, .dst() should return None
        tw_est = tz.tzwin('Eastern Standard Time')
        self.assertIs(dt_time(14, 10, tzinfo=tw_est).dst(), None)

        # This zone has no DST, so .dst() can return 0
        tw_sast = tz.tzwin('South Africa Standard Time')
        self.assertEqual(dt_time(14, 10, tzinfo=tw_sast).dst(),
                         timedelta(0))
Пример #3
0
    def testTzwinLocalTimeOnlyDST(self):
        # For zones with DST, .dst() should return None
        with TZWinContext('Eastern Standard Time'):
            twl = tz.tzwinlocal()
            self.assertIs(dt_time(14, 10, tzinfo=twl).dst(), None)

        # This zone has no DST, so .dst() can return 0
        with TZWinContext('South Africa Standard Time'):
            twl = tz.tzwinlocal()
            self.assertEqual(dt_time(14, 10, tzinfo=twl).dst(), timedelta(0))
Пример #4
0
    def testTzwinTimeOnlyTZName(self):
        # For zones with DST, the name defaults to standard time
        tw_est = tz.tzwin('Eastern Standard Time')
        self.assertEqual(dt_time(14, 10, tzinfo=tw_est).tzname(),
                         'Eastern Standard Time')

        # For zones with no DST, this should work normally.
        tw_sast = tz.tzwin('South Africa Standard Time')
        self.assertEqual(dt_time(14, 10, tzinfo=tw_sast).tzname(),
                         'South Africa Standard Time')
Пример #5
0
    def testTzwinLocalTimeOnlyUTCOffset(self):
        # For zones with DST, .utcoffset() should return None
        with TZWinContext('Eastern Standard Time'):
            twl = tz.tzwinlocal()
            self.assertIs(dt_time(14, 10, tzinfo=twl).utcoffset(), None)

        # This zone has no DST, so .utcoffset() returns standard offset
        with TZWinContext('South Africa Standard Time'):
            twl = tz.tzwinlocal()
            self.assertEqual(dt_time(14, 10, tzinfo=twl).utcoffset(),
                             timedelta(hours=2))
Пример #6
0
    def testTzwinLocalTimeOnlyTZName(self):
        # For zones with DST, the name defaults to standard time
        with TZWinContext('Eastern Standard Time'):
            twl = tz.tzwinlocal()
            self.assertEqual(dt_time(14, 10, tzinfo=twl).tzname(),
                             'Eastern Standard Time')

        # For zones with no DST, this should work normally.
        with TZWinContext('South Africa Standard Time'):
            twl = tz.tzwinlocal()
            self.assertEqual(dt_time(14, 10, tzinfo=twl).tzname(),
                             'South Africa Standard Time')
Пример #7
0
def parse_iso_time(value):
    #NEEDS-TEST
    match = ISO_TIME_RE.match(value)
    if not match:
        raise InvalidFormat('invalid ISO-8601 time: "{}"'.format(value))

    # split out into time, secs, usecs, and tz
    tmstr = match.group(1)
    secs = match.group(3)
    usecs = match.group(4)
    tzstr = match.group(5)

    try:
        dt_args = time.strptime(tmstr, '%H:%M')[3:5]
    except ValueError:
        raise InvalidDate('invalid time: "{}"'.format(value))

    # append seconds, usecs, and tz
    dt_args += (int(secs) if secs else 0,)
    dt_args += (int(usecs) if usecs else 0,)
    dt_args += (parse_tz(tzstr),)

    try:
        return dt_time(*dt_args)
    except ValueError:
        raise InvalidDate('invalid date: "{}"'.format(value))
Пример #8
0
def setup_logging(log_file):
    # TODO: more advanced filters, logging info like when rooms go live to console
    # https://docs.python.org/3/library/logging.config.html#logging-config-dictschema
    log_backup_time = dt_time(tzinfo=TOKYO_TZ)
    log_filter = logging.Filter(name="showroom")

    file_log_handler = TimedRotatingFileHandler(log_file, encoding='utf8',
                                                when='midnight', atTime=log_backup_time)
    file_log_formatter = logging.Formatter(fmt='%(asctime)s %(name)-12s %(levelname)-8s %(threadName)s:\n%(message)s',
                                           datefmt='%m-%d %H:%M:%S')
    file_log_handler.setFormatter(file_log_formatter)
    # leave this in local time?
    file_log_handler.addFilter(log_filter)
    file_log_handler.setLevel(logging.DEBUG)

    console_handler = logging.StreamHandler()
    console_formatter = logging.Formatter(fmt='%(asctime)s %(message)s', datefmt=HHMM_FMT)
    console_formatter.converter = tokyotime

    console_handler.setLevel(logging.INFO)
    console_handler.setFormatter(console_formatter)
    console_handler.addFilter(log_filter)

    logger = logging.getLogger('showroom')
    logger.setLevel(logging.DEBUG)
    logger.propagate = False

    # at this moment, there shouldn't be any handlers in the showroom logger
    # however, i can't preclude the possibility of there ever being such handlers
    for handler in (file_log_handler, console_handler):
        if handler not in logger.handlers:
            logger.addHandler(handler)
Пример #9
0
 def get_strongholds_state(self, callback, clan_id, fields = None):
     """
     return data from WGCCFE backend using `stronghold state API method`_
     
             .. _stronghold state API method: http://rtd.wargaming.net/docs/wgccfe/en/latest/rst/
             strongholds.html#strongholds-state
     """
     url = '/strongholds/state/'
     get_params = {'clan_id': clan_id}
     return self._request_data(callback, url, get_data=get_params, converters={'clan_id': int,
      'defence_hour': lambda x: (dt_time(x, 0) if x >= 0 else None)})
def parse(datetime_string, localize=True):
    _utc_to_local = utc_to_local if localize else lambda x: x

    def _to_int(value):
        if value is None:
            return 0
        return int(value)

    # match time only '00:45:10'
    time_only_match = __RE_MATCH_TIME_ONLY__.match(datetime_string)
    if time_only_match:
        return _utc_to_local(datetime.combine(date.today(),
                                              dt_time(hour=_to_int(time_only_match.group('hour')),
                                                      minute=_to_int(time_only_match.group('minute')),
                                                      second=_to_int(time_only_match.group('second'))))
                             ).time()

    # match date only '2014-11-08'
    date_only_match = __RE_MATCH_DATE_ONLY__.match(datetime_string)
    if date_only_match:
        return _utc_to_local(date(_to_int(date_only_match.group('year')),
                                  _to_int(date_only_match.group('month')),
                                  _to_int(date_only_match.group('day'))))

    # full date time
    date_time_match = __RE_MATCH_DATETIME__.match(datetime_string)
    if date_time_match:
        return _utc_to_local(datetime(_to_int(date_time_match.group('year')),
                                      _to_int(date_time_match.group('month')),
                                      _to_int(date_time_match.group('day')),
                                      _to_int(date_time_match.group('hour')),
                                      _to_int(date_time_match.group('minute')),
                                      _to_int(date_time_match.group('second'))))

    # period - at the moment we support only hours, minutes and seconds (e.g. videos and audio)
    period_match = __RE_MATCH_PERIOD__.match(datetime_string)
    if period_match:
        return timedelta(hours=_to_int(period_match.group('hours')),
                         minutes=_to_int(period_match.group('minutes')),
                         seconds=_to_int(period_match.group('seconds')))

    # abbreviated match
    abbreviated_match = __RE_MATCH_ABBREVIATED__.match(datetime_string)
    if abbreviated_match:
        month = {'Jan': 1, 'Feb': 2, 'Mar': 3, 'Apr': 4, 'May': 5, 'June': 6, 'Jun': 6, 'July': 7, 'Jul': 7, 'Aug': 8,
                 'Sept': 9, 'Sep': 9, 'Oct': 10, 'Nov': 11, 'Dec': 12}
        return _utc_to_local(datetime(year=_to_int(abbreviated_match.group('year')),
                                      month=month[abbreviated_match.group('month')],
                                      day=_to_int(abbreviated_match.group('day')),
                                      hour=_to_int(abbreviated_match.group('hour')),
                                      minute=_to_int(abbreviated_match.group('minute')),
                                      second=_to_int(abbreviated_match.group('second'))))

    raise KodionException("Could not parse iso 8601 timestamp '%s'" % datetime_string)
Пример #11
0
def determine_break_from_interview_time(time_period, interview_type):
    if interview_type != models.InterviewType.ON_SITE:
        return None
    weekday = time_period.start_time.weekday()
    if weekday != 4:  # Friday
        return None

    tz_info = time_period.start_time.tzinfo
    start_of_break = dt_time(12, 0, tzinfo=tz_info)
    end_of_break = dt_time(13, 30, tzinfo=tz_info)

    date = time_period.start_time.date()
    start_of_break_dt = datetime.combine(date, start_of_break)
    end_of_break_dt = datetime.combine(date, end_of_break)
    break_time_period = TimePeriod(
        start_of_break_dt,
        end_of_break_dt,
    )
    if time_period.contains(break_time_period):
        print "Adding break %s" % break_time_period
        return break_time_period
    return None
Пример #12
0
    def cbNotificationTimeChanged(self, configElement=None):
        print "[Birthday Reminder] Changing timer times..."

        timeList = config.plugins.birthdayreminder.notificationTime.value
        notifyTime = dt_time(timeList[0], timeList[1])

        for timer in self.timer_list:
            day = date.fromtimestamp(timer.begin)
            newDateTime = datetime.combine(day, notifyTime)
            timer.begin = int(mktime(newDateTime.timetuple()))
            timer.end = timer.begin - 1

        self.calcNextActivation()
Пример #13
0
def get_day_end(day, tzinfo=None):
    """Return the latest datetime for a given day.

    :param day: A `date` or `datetime`.
    :param tzinfo: The timezone to display the resulting datetime. Not valid for
                   non-naive `datetime` objects.
    """
    if isinstance(day, datetime):
        if day.tzinfo and tzinfo:
            raise ValueError("datetime is not naive.")
        tzinfo = day.tzinfo
        day = day.date()
    end_dt = datetime.combine(day, dt_time(23, 59))
    return tzinfo.localize(end_dt) if tzinfo else end_dt
Пример #14
0
def parse_isotime(st):
    """
    st - string or Unicode with ISO 8601 time
    """
    m = TIME_PAT.match(st)
    if not m:
        return None
    gd = m.groupdict('0')
    #FIXME: does not handle time zones
    t = dt_time(int(gd['hour']), 
             int(gd['minute']),
             int(gd['second']),
             int(float(u'.' + gd['fract_second'])*1000000),
             )
    return t
Пример #15
0
def get_signups_per_day_for_range(day_from, day_until, still_active=False):
    """
    Return the signups per day for a range of days. Limited to accounts that
    are still active, if "still_active" is True.
    """
    noon = dt_time(12, 0)
    d1 = datetime.combine(day_from, noon).replace(tzinfo=pytz.utc)
    d2 = datetime.combine(day_until, noon).replace(tzinfo=pytz.utc)

    qs = User.objects.filter(date_joined__gte=d1, date_joined__lte=d2)
    if still_active:
        qs = qs.filter(is_active=True, last_login__isnull=False)
    qs = qs.extra({'day': "date(date_joined)"}).values('day')
    qs = qs.annotate(Count('id')).order_by('day')

    return qs
Пример #16
0
    def _get_row(self, row, field, convert, token_type):
        # id 'column' is expected first
        id = row[0]
        # and raw token 'lookup' second
        raw = row[1]
        if type(raw) is date:
            # force convert dates into datetimes... otherwise mongo barfs
            raw = datetime.combine(raw, dt_time()).replace(tzinfo=UTC)
        # convert based on driver defined conversion method
        # and cast to appropriate data type
        if convert:
            tokens = convert(self, raw)
        else:
            tokens = raw
        tokens = type_cast(tokens, token_type)

        return {'id': id, 'field': field, 'tokens': tokens}
Пример #17
0
def format_pretty_date(dt, locale=None, tzinfo=None):
    """Format a date in a pretty way using relative units if possible.

    :param dt: a date or datetime object. if a date is provided, its
               time is assumed to be midnight
    :param locale: the locale to use for formatting
    :param tzinfo: the timezone to use
    """
    if not isinstance(dt, datetime):
        dt = datetime.combine(dt, dt_time())
    return _format_pretty_datetime(dt, locale, tzinfo, {
        'last_day': _(u"'Yesterday'"),
        'same_day': _(u"'Today'"),
        'next_day': _(u"'Tomorrow'"),
        'last_week': _(u"'Last' EEEE"),
        'next_week': _(u"EEEE"),
        'other': _(u"{date_fmt}")
    })
Пример #18
0
    def addTimer(self, entry, preremind=False):
        if preremind:
            print "[Birthday Reminder] Adding preremind timer for", entry[0]
        else:
            print "[Birthday Reminder] Adding birthday timer for", entry[0]

        timeList = config.plugins.birthdayreminder.notificationTime.value
        notifyTime = dt_time(timeList[0], timeList[1])
        now = date.today()
        bDay = entry[1]

        if preremind:
            numDays = int(config.plugins.birthdayreminder.preremind.getValue())
            # set timer to feb 28th for birthdays on feb 29th
            try:
                dateThisYear = date(now.year, bDay.month, bDay.day) - timedelta(numDays)
            except ValueError:  # raised on feb 29th
                dateThisYear = date(now.year, bDay.month, bDay.day - 1) - timedelta(numDays)
        else:
            # set timer to feb 28th for birthdays on feb 29th
            try:
                dateThisYear = date(now.year, bDay.month, bDay.day)
            except ValueError:  # raised on feb 29th
                dateThisYear = date(now.year, bDay.month, bDay.day - 1)

        dateTimeThisYear = datetime.combine(dateThisYear, notifyTime)

        if dateThisYear >= now:  # check if the birthday is in this year
            begin = int(mktime(dateTimeThisYear.timetuple()))
        else:  # birthday is in the past, we need a timer for the next year
            # set timer to feb 28th for birthdays on feb 29th
            try:
                bDayNextYear = dateTimeThisYear.replace(year=dateThisYear.year + 1)
            except ValueError:  # raised on feb 29th
                bDayNextYear = dateTimeThisYear.replace(year=dateThisYear.year + 1, day=dateThisYear.day - 1)

            begin = int(mktime(bDayNextYear.timetuple()))

        end = begin - 1
        timerEntry = BirthdayTimerEntry(begin, end, preremind)
        timerEntry.bDay = entry
        self.addTimerEntry(timerEntry)
Пример #19
0
def create_plane_journey_from_flightradar_data(airports, departure_date):
    """
    We create a fake plane journey with only the approximate eqCO2 to be used in the computation in the front end
    :param query:
    :return: fake_journey
    """
    day_of_week = departure_date.weekday()
    hour_of_day = departure_date.hour
    relevant_flights = _FLIGHTRADAR_DATA[
        _FLIGHTRADAR_DATA.city_sky.isin(airports['departure'])
        & _FLIGHTRADAR_DATA.city_sky_arr.isin(airports['arrival'])]
    relevant_flights = relevant_flights[relevant_flights.day_of_week ==
                                        day_of_week]
    relevant_flights['hour_dep'] = relevant_flights.apply(
        lambda x: dt.strptime(x.hour_dep, '%H:%M:%S') + timedelta(hours=1),
        axis=1)
    relevant_flights['hour_dep_int'] = relevant_flights.apply(
        lambda x: x.hour_dep.hour, axis=1)
    response_flights = pd.DataFrame()
    for airport_dep in airports['departure']:
        for airport_arr in airports['arrival']:
            flights_df = relevant_flights[
                (relevant_flights.city_sky == airport_dep)
                & (relevant_flights.city_sky_arr == airport_arr) &
                (relevant_flights.hour_dep_int >= hour_of_day)]
            response_flights = response_flights.append(flights_df)
    # distance_m = distance(geoloc_dep, geoloc_arrival).m
    response_flights['local_range_km'] = response_flights.apply(
        lambda x: get_range_km(x.distance_m), axis=1)
    response_flights['local_emissions'] = response_flights.apply(
        lambda x: calculate_co2_emissions(
            constants.TYPE_PLANE, constants.DEFAULT_CITY, constants.
            DEFAULT_FUEL, constants.NB_SEATS_TEST, x.local_range_km) *
        constants.DEFAULT_NB_PASSENGERS * x.distance_m,
        axis=1)
    # merge global departure date and flight time to create flight actual departure datetime
    response_flights['flight_departure_date'] = response_flights.apply(
        lambda x: dt.combine(departure_date,
                             dt_time(x.hour_dep.hour, x.hour_dep.minute)),
        axis=1)
    response_flights['flight_arrival_date'] = response_flights.apply(
        lambda x: x.flight_departure_date + timedelta(seconds=x.flight_time_s),
        axis=1)

    journey_list = list()
    for index, flight in response_flights.iterrows():
        lst_sections = list()
        # We add a waiting period at the airport of x hours
        step = tmw.Journey_step(
            0,
            _type=constants.TYPE_WAIT,
            label=
            f'Arrive at the airport {format_timespan(_AIRPORT_WAITING_PERIOD)} before departure',
            distance_m=0,
            duration_s=_AIRPORT_WAITING_PERIOD,
            price_EUR=[],
            gCO2=0,
            departure_point=[flight.latitude, flight.longitude],
            arrival_point=[flight.latitude, flight.longitude],
            departure_date=flight.flight_departure_date -
            timedelta(seconds=_AIRPORT_WAITING_PERIOD),
            arrival_date=flight.flight_departure_date,
            geojson=[],
        )
        lst_sections.append(step)

        step = tmw.Journey_step(
            1,
            _type=constants.TYPE_PLANE,
            label=f'Flight {flight.flight_number} to {flight.airport_to_code}',
            distance_m=flight.distance_m,
            duration_s=flight.flight_time_s,
            price_EUR=[],
            gCO2=flight.local_emissions,
            departure_point=[flight.latitude, flight.longitude],
            arrival_point=[flight.latitude_arr, flight.longitude_arr],
            departure_stop_name=flight.airport_from,
            arrival_stop_name=flight.airport_to_code,
            departure_date=flight.flight_departure_date,
            arrival_date=flight.flight_arrival_date,
            trip_code=flight.flight_number,
            geojson=[],
        )
        lst_sections.append(step)
        departure_date_formated = dt.strptime(
            str(lst_sections[0].departure_date)[0:10], '%Y-%m-%d')
        departure_date_formated = str(departure_date_formated.year)[2:4]+\
                                  ('0'+str(departure_date_formated.month))[-2:]+\
                                  ('0'+str(departure_date_formated.day))[-2:]

        journey_flightradar = tmw.Journey(
            0,
            steps=lst_sections,
            departure_date=lst_sections[0].departure_date,
            arrival_date=lst_sections[-1].arrival_date,
            booking_link=
            f'https://www.skyscanner.fr/transport/vols/{flight.airport_from}/{flight.airport_to_code}/{departure_date_formated}/'
        )
        journey_flightradar.category = [constants.TYPE_PLANE]
        journey_flightradar.update()
        journey_flightradar.is_real_journey = False
        journey_list.append(journey_flightradar)

    return journey_list
Пример #20
0
    def __init__(self, interval, ticker, kwargs):

        if (interval % 5 != 0) or (7200 % interval != 0):
            raise ValueError(
                'interval must be a multiple of 5 secs and can divide 2 hours')

        # ugly!
        tickercsv = ticker + '.csv'
        if not tickercsv in listdir(kwargs['DATA_PATH'] +
                                    (kwargs['TODAY'] -
                                     timedelta(days=1)).strftime('%Y%m%d')):
            raise Exception('no data for %s' % ticker)

        dates = set(listdir(kwargs['DATA_PATH']))

        self.TODAY = kwargs['TODAY']
        # self.TODAY = datetime.strptime(today_for_test, "%Y-%m-%d")  # Tracey to notice
        self.T_START_TIME = kwargs['T_START_TIME']
        self.T_START_SEC = time.mktime(
            datetime.combine(
                self.TODAY, dt_time(hour=9, minute=30, second=0,
                                    microsecond=0)).timetuple())
        # self.T_START_TIME = self.TODAY.replace(hour = 9, minute = 30, second = 0, microsecond = 0)
        self.T_END_TIME = kwargs['T_END_TIME']
        self.T_END_SECS = int(
            (datetime.combine(self.TODAY, self.T_END_TIME) -
             datetime.combine(self.TODAY, self.T_START_TIME)).total_seconds())
        # self.T_END_TIME = self.TODAY.replace(hour = 15, minute = 00, second = 0, microsecond = 0)
        self.LASSO_LAMBDA = kwargs['LASSO_LAMBDA']
        self.N_TICK_THRESHOLD = kwargs['N_TICK_THRESHOLD']  # Tracey to notice
        self.DATA_PATH = kwargs['DATA_PATH']
        # self.DATA_PATH = './data_path/' # Tracey to notice
        self._interval = interval
        self._interval_timedelta = timedelta(seconds=self._interval)
        self._am_n_interval = int(self.HALFTIME.total_seconds() /
                                  self._interval_timedelta.total_seconds())
        self._n_interval = int(self._am_n_interval + ceil((
            (self.T_END_SECS - 60 * 60 * 3.5) / self._interval)))
        self._features_to_train = np.ones((11, 3), dtype=float)  # CA, M, L, A
        self._histo_volume = np.full((10, self._n_interval), 0,
                                     dtype=float)  # historical trading volume
        self._intraday_percentage = [
            1 / self._n_interval
        ] * self._n_interval  # notice .sum() =self._n_interval
        # self._AR_pars = np.array([1,0],dtype =float) # (u and phi)
        self._AR_pars = [0., 1.]
        self._CA_today = 0
        self._predicted_V = 0.
        self._is_V_predicted = 0
        self._last_update = 0
        self._iter = 0
        self._datetime_index = ([
            str(dt) for dt in datetime_range(
                self.T_START_TIME,
                dt_time(hour=11, minute=30, second=0, microsecond=0),
                timedelta(seconds=self._interval))
        ] + [
            str(dt) for dt in datetime_range(
                dt_time(hour=13, minute=0, second=0, microsecond=0),
                self.T_END_TIME, timedelta(seconds=self._interval))
        ])
        self._today_vol = [0.] * self._n_interval
        self._p_per = [0.] * self._n_interval
        self._p_vol = [0] * self._n_interval
        self._cum_vol = 0
        self._VWAP_log = {}

        histo_date = self.TODAY
        past_days = 0
        x_output = np.concatenate(
            (np.arange(0 + self._interval, 7200 + self._interval,
                       self._interval),
             np.arange(12600 + self._interval, self.T_END_SECS,
                       self._interval), np.array([self.T_END_SECS])),
            axis=0)

        iter = 1

        while iter < 11:

            if not bool(dates):
                raise Exception('Insufficient historical data')

            histo_date = histo_date - timedelta(days=1)
            past_days += 1

            # if histo_date.weekday() in set([5,6]):
            #    continue

            histo_date_str = histo_date.strftime("%Y%m%d")
            if histo_date_str not in dates:
                continue
            dates.remove(histo_date_str)

            try:
                dat = pd.read_csv(self.DATA_PATH + histo_date_str + '/' +
                                  tickercsv,
                                  header=0)
            except Exception:
                print 'Error in reading %s for %s, go to the previous day.' % (
                    tickercsv, str(histo_date))
                continue

            if dat.shape[0] < self.N_TICK_THRESHOLD:
                print '%s in %s has few data for prediction' % (
                    tickercsv, str(histo_date))
                continue

            if past_days > 20:
                warnings.warn(
                    'Lack historical data. Time span of data for predicting intraday_volume of today has exceeded 20 days.'
                    'We are using data %d days from today' % past_days)

            try:
                dat.Nano = dat.Nano / 1e9 - time.mktime(
                    datetime.combine(
                        histo_date,
                        dt_time(hour=9, minute=30, second=0,
                                microsecond=0)).timetuple())
                tmp_Volume = np.array(dat.Volume)
                dat.Volume = list(
                    np.append(tmp_Volume[0], tmp_Volume[1:] - tmp_Volume[:-1]))
                dat = dat.as_matrix(columns=['Nano', 'Volume'
                                             ])  # there will be Microsecond
                datCA = dat[dat[:, 0] < 0][:, 1].sum()  # Tracey to notice
                if datCA < 1:  # no data or no trade ?
                    continue

                self._features_to_train[10 - iter, 0] = datCA
                dat = dat[dat[:, 0] > 0]

                # Tracey by reviewing the data from ctp finds it impossible
                if any(7200 < t < 7230 for t in dat[:, 0]):  # tracey_to_notice
                    dat = np.vstack((dat[dat[:, 0] < 7200], [
                        7200, dat[(dat[:, 0] >= 7200) * (dat[:, 0] < 7230),
                                  1].sum()
                    ], dat[dat[:, 0] > 7230]))
                if any(t >= 198000 for t in dat[:, 0]):
                    dat = np.vstack((dat[dat[:, 0] < 19800],
                                     [19800,
                                      dat[dat[:, 0] >= 19800,
                                          1].sum()]))  # tracey to notice

                x_input = np.append(0, dat[:, 0])
                volume_cumsum = np.append(0, dat[:, 1].cumsum())
                # volume_cumsum = np.append(0, dat[:, 1]) # tracey to notice
                y_interp = scipy.interpolate.interp1d(
                    x_input, volume_cumsum)  # ,interval)
                intraday_volume = y_interp(x_output)
                intraday_volume = np.append(
                    intraday_volume[0],
                    (intraday_volume[1:] - intraday_volume[:-1]))
                self._histo_volume[10 - iter] = intraday_volume
            except Exception:
                print 'Error when read %s at %s, you may check its format' % (
                    ticker, histo_date_str)
                continue

            iter += 1

        volume_sums = np.zeros(5, dtype=float)
        while iter < 16:

            if not bool(dates):
                raise Exception('Insufficient historical data')

            histo_date = histo_date - timedelta(days=1)
            past_days += 1

            # if histo_date.weekday() in set([5,6]):
            #     continue

            histo_date_str = histo_date.strftime("%Y%m%d")
            if histo_date_str not in dates:
                continue
            dates.remove(histo_date_str)

            try:
                dat = pd.read_csv(self.DATA_PATH + histo_date_str + '/' +
                                  tickercsv,
                                  header=0)
            except Exception:
                print 'Error in reading %s for %s, go to the previous day.' % (
                    tickercsv, str(histo_date))
                continue

            if dat.shape[0] < self.N_TICK_THRESHOLD:
                print '%s in %s has few data for prediction' % (
                    tickercsv, str(histo_date))
                continue

            if past_days > 30:
                warnings.warn(
                    'Lack efficacious historical data. Time span of data for predicting total trading volume of today has exceeded 30 days.'
                )

            try:
                dat.Nano = dat.Nano / 1e9 - time.mktime(
                    datetime.combine(
                        histo_date,
                        dt_time(hour=9, minute=30, second=0,
                                microsecond=0)).timetuple())
                tmp_Volume = np.array(dat.Volume)
                dat.Volume = list(
                    np.append(tmp_Volume[0], tmp_Volume[1:] - tmp_Volume[:-1]))
                dat = dat.as_matrix(columns=['Nano', 'Volume'
                                             ])  # there will be Microsecond
                volume_sums[15 - iter] = dat[(dat[:, 0] > 0) *
                                             (dat[:, 0] < self.T_END_SECS),
                                             1].sum()
            except Exception:
                print 'Error when read %s at %s, you may check its format' % (
                    ticker, histo_date_str)
                continue

            iter += 1

        # preparing sample for predicting today's total volume
        self.volume_to_train = self._histo_volume.sum(axis=1)
        volume_sums = np.append(volume_sums, self.volume_to_train)
        self._features_to_train[:, 1] = rolling_mean(volume_sums)
        self._features_to_train[:, 2] = rolling_linear(volume_sums)

        # get intraday pattern and intialize intraday prediction
        intraday_mean = self._histo_volume.mean(axis=0)
        self._p_vol[0] = float(intraday_mean[0])
        self._intraday_percentage = list(
            np.divide(intraday_mean, intraday_mean.sum()) * self._n_interval)

        tmp = np.divide(intraday_mean, intraday_mean.sum()) * self._n_interval
        print tmp  # Tracey to notice
        if np.any(tmp < 0.1):
            warnings.warn(
                'adjust intraday trading volume pattern for irregular data')
            tmp[tmp >= 0.1] = tmp[tmp > 0.1] * (
                self._n_interval - 0.1 * len(tmp[tmp < 0.1])) / sum(
                    tmp[tmp >= 0.1])
            tmp[tmp < 0.1] = 0.1
        self._intraday_percentage = list(tmp)
        self._p_per[0] = self._intraday_percentage[0] / self._n_interval
        self._VWAP_log[self._datetime_index[0]] = get_log(
            None, self._p_vol[0], self._p_per[0])

        # compute AR
        arma = ARMA(self._histo_volume[-1] / self._intraday_percentage,
                    order=(1, 0))
        self._AR_pars = arma.fit().params.tolist()
Пример #21
0
class StagingDataAccessor(base.BaseDataAccessor):
    """
    obtain data directly from stagings
    
    It is not secure to use `StagingDataAccessor` in production.
    
    both function with fetchurl signature and stagings configuration
    should be passed during instantination
    
    :Example:
    
    >>> from client_request_lib.data_sources.fetcher import fetchURL
    >>> staging_accessor = StagingDataAccessor(
    ...     fetchURL, {'clans': 'http://wgccbe.ru.cwpp.iv/'})
    >>> requester = Requester(staging_accessor)
    >>> requester.login(str, 12312, 'sdfee23e2')
    >>> def printer (*args, **kwargs):
                    pprint(args)
    ...
    >>> requester.clans.get_account_applications_count_since(printer, 123)
    (
            {'total': 17},
            200,
            0
    )
    
    currently following backends are supported
    
            - ratings
            - clans
            - spa
            - exporter
            - strongholds
            - global_map
    """
    requests_before_logout = -1

    def __init__(self, url_fetcher, staging_hosts = {}, client_lang = None):
        """
        url_fetcher is fetch_url method with following signature
        staging_hosts is dict of staging hosts for example
        
        :param url_fetcher: fetchURL callback with following signature
                fetchURL(url, callback, headers={}, timeout=30, method='GET', postData='')
        :param staging_hosts: stagings hosts with backend name as a key
        :type url_fetcher: function
        :type staging_hosts: dict
        
        :Example:
        
        >>> from client_request_lib.data_sources.fetcher import fetchURL
        >>> staging_accessor = StagingDataAccessor(
        ...     fetchURL,
        ...     {
        ...         'ratings': 'http://wgrs.clan0101.wott.iv/',
        ...         'exporter': 'http://exp.clan0101.wott.iv/',
        ...         'global_map': 'https://wgcwx.clan0101.wott.iv/wgapi/',
        ...         'clans': 'http://wgccbe.clan0101.wott.iv/'
        ...         'spa': 'http://spa.clan0101.wott.iv/',
        ...         'strongholds': 'http://wgccfe.clan0101.wgnt.iv/clans/api/',
        ...     }
        ... )
        
        """
        self.client_lang = client_lang
        self._account = None
        self.url_fetcher = url_fetcher
        self.staging_hosts = staging_hosts
        return

    def login(self, callback, account_id, spa_token):
        self._account = account_id
        result, status_code = ('ok', 200)
        response_code = exceptions.ResponseCodes.NO_ERRORS
        callback(result, status_code, response_code)

    def logout(self, callback):
        self._account = None
        result, status_code = ('ok', 200)
        response_code = exceptions.ResponseCodes.NO_ERRORS
        callback(result, status_code, response_code)
        return

    def get_alive_status(self, callback):
        result, status_code = {'status': 'I am alive!'}, 200
        response_code = exceptions.ResponseCodes.NO_ERRORS
        callback(result, status_code, response_code)

    def _request_data(self, callback, service, url, method = 'GET', postData = None):
        service_host = self.staging_hosts[service].strip('/')
        url = '/'.join([service_host] + url.strip('/').split('/'))
        if '?' not in url:
            url = url + '/'
        args = [None, 30.0, method]
        if postData:
            args.append(json.dumps(postData))
        self.url_fetcher(url, callback, *args)
        return

    @mapped_fields({'efficiency': 'efficiency',
     'clan_id': 'clan_id',
     'battles_count_avg': 'battles_count_avg',
     'wins_ratio_avg': 'wins_ratio_avg',
     'xp_avg': 'xp_avg',
     'gm_elo_rating_6': 'gm_elo_rating_6',
     'gm_elo_rating_8': 'gm_elo_rating_8',
     'gm_elo_rating_10': 'gm_elo_rating_10',
     'gm_elo_rating_6_rank': 'gm_elo_rating_6_rank',
     'gm_elo_rating_8_rank': 'gm_elo_rating_8_rank',
     'gm_elo_rating_10_rank': 'gm_elo_rating_10_rank',
     'fb_elo_rating_8': 'fb_elo_rating_8',
     'fb_elo_rating_10': 'fb_elo_rating_10',
     'fb_battles_count_10_28d': 'fb_battles_count_10_28d',
     'fs_battles_count_10_28d': 'fs_battles_count_10_28d',
     'gm_battles_count_28d': 'gm_battles_count_28d',
     'fs_battles_count_28d': 'fs_battles_count_28d',
     'fb_battles_count_28d': 'fb_battles_count_28d'})
    def get_clans_ratings(self, callback, clan_ids, fields = None):
        """
        return data from ratings backend using `bulks API method`_
        
                .. _bulks API method: http://rtd.wargaming.net/docs/wgrs-api/en/latest/clans.html#bulks
        """
        get_params = {'project': 'api',
         'fields': ','.join(fields),
         'ids': ','.join(map(str, clan_ids))}
        url = 'api/wot/clans/bulks/?%s' % urlencode(get_params)

        @preprocess_callback(callback, 'ratings')
        def inner_callback(data):
            return data['data']

        return self._request_data(inner_callback, 'ratings', url)

    @convert_data({'created_at': from_iso})
    @mapped_fields({'name': 'name',
     'tag': 'tag',
     'motto': 'motto',
     'leader_id': 'leader_id',
     'members_count': 'members_count',
     'created_at': 'created_at',
     'clan_id': 'id',
     'treasury': 'treasury',
     'accepts_join_requests': 'accepts_join_requests'})
    def get_clans_info(self, callback, clan_ids, fields = None):
        """
        return data from WGCCBE backend using `clans API method`_
        
                .. _clans API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/api-common/clans.html
        """
        get_params = {'ids': ','.join(map(str, clan_ids)),
         'fields': ','.join(fields)}
        url = '/clans/?%s' % urlencode(get_params)

        @preprocess_callback(callback, 'clans')
        def inner_callback(data):
            return data['items']

        return self._request_data(inner_callback, 'clans', url)

    @mapped_fields({'id': 'id',
     'name': 'name'})
    def get_accounts_names(self, callback, account_ids, fields = None):
        """
        return data from SPA backend using `account id/name mappings API method`_
        
                .. _account id/name mappings API method: https://confluence.wargaming.net/display/
                WEBDEV/%5BWGNSPA%5D+-+SPA+HTTP+API+Examples#id-[WGNSPA]-SPAHTTPAPIExamples-Byids
        """
        get_params = {'id': account_ids}
        url = '/spa/accounts/names/?%s' % urlencode(get_params, doseq=True)

        @preprocess_callback(callback, 'spa')
        def inner_callback(data):
            return [ {'id': k,
             'name': v} for k, v in data.iteritems() ]

        return self._request_data(inner_callback, 'spa', url)

    @convert_data({'joined_at': from_iso})
    @mapped_fields({'account_id': 'id',
     'joined_at': 'joined_at',
     'clan_id': 'clan_id',
     'role_bw_flag': 'role.bw_flag',
     'role_name': 'role.name'})
    def get_clan_members(self, callback, clan_id, fields = None):
        """
        return data from WGCCBE backend using `clan members API method`_
        
                .. _clan members API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/api-common/
                clans_id_members.html
        """
        get_params = {'fields': ','.join(fields)}
        url = '/clans/%s/members?%s' % (clan_id, urlencode(get_params))
        return self._request_data(preprocess_callback(callback, 'clans'), 'clans', url)

    @convert_data({'favorite_primetime': lambda x: x and datetime.strptime(x, '%H:%M').time()})
    @mapped_fields({'favorite_arena_6': 'favorite_arena_6',
     'favorite_arena_8': 'favorite_arena_8',
     'favorite_arena_10': 'favorite_arena_10',
     'clan_id': 'clan_id',
     'favorite_primetime': 'favorite_primetime'})
    def get_clan_favorite_attributes(self, callback, clan_id, fields = None):
        """
        return data from WGCCBE backend using `favorite_attributes API method`_
        
                .. _favorite_attributes API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/statistics/
                favorite_attributes.html
        """
        url = '/gm/clans/%s/favorite_attributes' % clan_id

        @preprocess_callback(callback, 'clans')
        def inner_callback(backend_data):
            result = {}
            for field in ['clan_id', 'favorite_primetime']:
                if field in backend_data:
                    result[field] = backend_data[field]

            for data in backend_data.get('favorite_arenas', []):
                if data.get('frontlevel') in (6, 8, 10) and 'arena' in data:
                    result['favorite_arena_{}'.format(data['frontlevel'])] = data['arena']

            return result

        return self._request_data(inner_callback, 'clans', url)

    @convert_data({'joined_at': from_iso,
     'in_clan_cooldown_till': from_iso})
    @mapped_fields({'account_id': 'id',
     'joined_at': 'joined_at',
     'clan_id': 'clan_id',
     'role_bw_flag': 'role.bw_flag',
     'role_name': 'role.name',
     'in_clan_cooldown_till': 'in_clan_cooldown_till'})
    def get_accounts_clans(self, callback, account_ids, fields = None):
        """
        return data from WGCCBE backend using `accounts API method`_
        
                .. _accounts API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/api-common/accounts.html
        """
        get_params = {'fields': ','.join(fields),
         'ids': ','.join(map(str, account_ids))}
        url = '/accounts/?%s' % urlencode(get_params)

        @preprocess_callback(callback, 'clans')
        def inner_callback(data):
            return data['items']

        return self._request_data(inner_callback, 'clans', url)

    @mapped_fields({'total': 'total'}, accept_fields_argument=False)
    def get_account_applications_count_since(self, callback, account_id, since = None):
        """
        return data from WGCCBE backend using `applications API method`_
        
                .. _applications API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/wotx/applications.html
        """
        get_params = {'fields': 'id',
         'account_id': account_id,
         'created_after': since.isoformat()}
        url = '/applications/?%s' % urlencode(get_params)
        return self._request_data(preprocess_callback(callback, 'clans'), 'clans', url)

    @mapped_fields({'total': 'total'}, accept_fields_argument=False)
    def get_clan_invites_count_since(self, callback, clan_id, since = None):
        """
        return data from WGCCBE backend using `invites API method`_
        
                .. _invites API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/wotx/invites.html
        """
        get_params = {'fields': 'id',
         'clan_id': clan_id,
         'created_after': since.isoformat()}
        url = '/invites/?%s' % urlencode(get_params)
        return self._request_data(preprocess_callback(callback, 'clans'), 'clans', url)

    @convert_data({'created_at': from_iso,
     'updated_at': from_iso}, paginated=True)
    @mapped_fields({'status': 'status',
     'created_at': 'created_at',
     'updated_at': 'updated_at',
     'sender_id': 'sender_id',
     'id': 'id',
     'account_id': 'account_id',
     'clan_id': 'clan_id',
     'comment': 'data.comment',
     'status_changer_id': 'data.status_changer_id'}, paginated=True)
    def get_account_applications(self, callback, fields = None, statuses = None, get_total_count = False, limit = 18, offset = 0):
        """
        return data from WGCCBE backend using `applications API method`_
        
                .. _applications API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/wotx/applications.html
        """
        statuses = statuses or ['active',
         'declined',
         'accepted',
         'expired',
         'error',
         'deleted']
        get_params = {'fields': ','.join(fields),
         'account_id': self._account,
         'statuses': ','.join(statuses),
         'limit': limit,
         'offset': offset}
        url = '/applications/?%s' % urlencode(get_params)
        return self._request_data(preprocess_callback(callback, 'clans'), 'clans', url)

    @convert_data({'created_at': from_iso,
     'updated_at': from_iso}, paginated=True)
    @mapped_fields({'status': 'status',
     'created_at': 'created_at',
     'updated_at': 'updated_at',
     'sender_id': 'sender_id',
     'id': 'id',
     'account_id': 'account_id',
     'clan_id': 'clan_id',
     'comment': 'data.comment',
     'status_changer_id': 'data.status_changer_id'}, paginated=True)
    def get_clan_applications(self, callback, clan_id, fields = None, statuses = None, get_total_count = False, limit = 18, offset = 0):
        """
        return data from WGCCBE backend using `applications API method`_
        
                .. _applications API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/wotx/applications.html
        """
        statuses = statuses or ['active',
         'declined',
         'accepted',
         'expired',
         'error',
         'deleted']
        get_params = {'fields': ','.join(fields),
         'clan_id': clan_id,
         'statuses': ','.join(statuses),
         'limit': limit,
         'offset': offset}
        url = '/applications/?%s' % urlencode(get_params)
        return self._request_data(preprocess_callback(callback, 'clans'), 'clans', url)

    @mapped_fields({'clan_id': 'clan_id',
     'id': 'id',
     'account_id': 'account_id'})
    def create_applications(self, callback, clan_ids, comment, fields = None):
        """
        create applications for accounts into clan using `create applications API method`_
                .. _create applications API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/wotx/
                applications.html
        """
        url = '/applications/'
        data = {'account_id': self._account,
         'clan_ids': clan_ids,
         'comment': comment}

        @preprocess_callback(callback, 'clans')
        def inner_callback(data):
            return data.values()

        return self._request_data(inner_callback, 'clans', url, method='POST', postData=data)

    @mapped_fields({'transaction_id': 'transaction_id',
     'id': 'id',
     'account_id': 'account_id',
     'clan_id': 'clan_id'})
    def accept_application(self, callback, application_id, fields = None):
        """
        accept application for accounts into clan using `accept applications API method`_
                .. _accept applications API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/wotx/
                applications_id.html
        """
        url = '/applications/%s/' % application_id
        data = {'initiator_id': self._account,
         'status': 'accepted'}

        @preprocess_callback(callback, 'clans')
        def inner_callback(data):
            data = data or {}
            data['account_id'] = data.pop('account_ids')[0]
            data['id'] = application_id
            return data

        return self._request_data(inner_callback, 'clans', url, method='PATCH', postData=data)

    @mapped_fields({'transaction_id': 'transaction_id',
     'id': 'id',
     'account_id': 'account_id',
     'clan_id': 'clan_id'})
    def decline_application(self, callback, application_id, fields = None):
        """
        decline application for accounts into clan using `decline applications API method`_
                .. _decline applications API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/wotx/
                applications_id.html
        """
        url = '/applications/%s/' % application_id
        data = {'initiator_id': self._account,
         'status': 'declined'}

        @preprocess_callback(callback, 'clans')
        def inner_callback(data):
            data = data or {}
            data['id'] = application_id
            return data

        return self._request_data(inner_callback, 'clans', url, method='PATCH', postData=data)

    @mapped_fields({'clan_id': 'clan_id',
     'id': 'id',
     'account_id': 'account_id'})
    def create_invites(self, callback, clan_id, account_ids, comment, fields = None):
        """
        create applications for accounts into clan using `create invites API method`_
                .. _create invites API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/wotx/invites.html
        """
        url = '/invites/'
        data = {'initiator_id': self._account,
         'clan_id': clan_id,
         'account_ids': account_ids,
         'comment': comment}

        @preprocess_callback(callback, 'clans')
        def inner_callback(data):
            return data.values()

        return self._request_data(inner_callback, 'clans', url, method='POST', postData=data)

    @mapped_fields({'transaction_id': 'transaction_id',
     'id': 'id',
     'account_id': 'account_id',
     'clan_id': 'clan_id'})
    def accept_invite(self, callback, invite_id, fields = None):
        """
        accept application for accounts into clan using `accept invite API method`_
                .. _accept invite API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/wotx/invites_id.html
        """
        url = '/invites/%s/' % invite_id
        data = {'initiator_id': self._account,
         'status': 'accepted'}

        @preprocess_callback(callback, 'clans')
        def inner_callback(data):
            data = data or {}
            data['account_id'] = data.pop('account_ids')[0]
            data['id'] = invite_id
            return data

        return self._request_data(inner_callback, 'clans', url, method='PATCH', postData=data)

    @mapped_fields({'transaction_id': 'transaction_id',
     'id': 'id',
     'account_id': 'account_id',
     'clan_id': 'clan_id'})
    def decline_invite(self, callback, invite_id, fields = None):
        """
        decline application for accounts into clan using `decline invites API method`_
                .. _decline invites API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/wotx/invites_id.html
        """
        url = '/invites/%s/' % invite_id
        data = {'initiator_id': self._account,
         'status': 'declined'}

        @preprocess_callback(callback, 'clans')
        def inner_callback(data):
            data = data or {}
            data['id'] = invite_id
            return data

        return self._request_data(inner_callback, 'clans', url, method='PATCH', postData=data)

    @mapped_fields({'id': 'id',
     'clan_id': 'clan_id',
     'account_id': 'account_id'})
    def bulk_decline_invites(self, callback, invite_ids, fields = None):
        """
        decline invites for clan using `decline invites API method`_
                .. _decline invites API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/wgcc/invites.html#patch
        """
        url = '/invites/'
        data = {'initiator_id': self._account,
         'status': 'declined',
         'ids': invite_ids}

        @preprocess_callback(callback, 'clans')
        def inner_callback(data):
            data = data and data['items'] or {}
            return data

        return self._request_data(inner_callback, 'clans', url, method='PATCH', postData=data)

    @convert_data({'created_at': from_iso}, paginated=True)
    @mapped_fields({'name': 'name',
     'tag': 'tag',
     'motto': 'motto',
     'leader_id': 'leader_id',
     'members_count': 'members_count',
     'created_at': 'created_at',
     'clan_id': 'id',
     'treasury': 'treasury',
     'accepts_join_requests': 'accepts_join_requests'}, paginated=True)
    def search_clans(self, callback, search, get_total_count = False, fields = None, offset = 0, limit = 18):
        """
        return data from WGCCBE backend using `clans API method`_
        
                .. _clans API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/api-common/clans.html
        """
        get_params = {'search': search,
         'game': 'wot',
         'fields': ','.join(fields),
         'limit': limit,
         'offset': offset}
        url = '/clans/search/?%s' % urlencode(get_params)
        return self._request_data(preprocess_callback(callback, 'clans'), 'clans', url)

    @convert_data({'created_at': from_iso}, paginated=True)
    @mapped_fields({'name': 'name',
     'tag': 'tag',
     'motto': 'motto',
     'leader_id': 'leader_id',
     'members_count': 'members_count',
     'created_at': 'created_at',
     'clan_id': 'id',
     'treasury': 'treasury',
     'accepts_join_requests': 'accepts_join_requests'}, paginated=True)
    def get_recommended_clans(self, callback, get_total_count = False, fields = None, offset = 0, limit = 18):
        """
        return data from WGCCBE backend using `clans API method`_
        
                .. _clans API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/api-common/clans.html
        """
        get_params = {'game': 'wot',
         'fields': ','.join(fields),
         'limit': limit,
         'offset': offset}
        url = '/clans/?%s' % urlencode(get_params)
        return self._request_data(preprocess_callback(callback, 'clans'), 'clans', url)

    @convert_data({'created_at': from_iso,
     'updated_at': from_iso}, paginated=True)
    @mapped_fields({'status': 'status',
     'created_at': 'created_at',
     'updated_at': 'updated_at',
     'sender_id': 'sender_id',
     'id': 'id',
     'account_id': 'account_id',
     'clan_id': 'clan_id',
     'comment': 'data.comment',
     'status_changer_id': 'data.status_changer_id'}, paginated=True)
    def get_clan_invites(self, callback, clan_id, fields = None, statuses = None, get_total_count = False, limit = 18, offset = 0):
        """
        return data from WGCCBE backend using `invites API method`_
        
                .. _invites API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/wotx/invites.html
        """
        statuses = statuses or ['active',
         'declined',
         'accepted',
         'expired',
         'error',
         'deleted']
        get_params = {'fields': ','.join(fields),
         'clan_id': clan_id,
         'statuses': ','.join(statuses),
         'limit': limit,
         'offset': offset}
        url = '/invites/?%s' % urlencode(get_params)
        return self._request_data(preprocess_callback(callback, 'clans'), 'clans', url)

    @convert_data({'created_at': from_iso,
     'updated_at': from_iso}, paginated=True)
    @mapped_fields({'status': 'status',
     'created_at': 'created_at',
     'updated_at': 'updated_at',
     'sender_id': 'sender_id',
     'id': 'id',
     'account_id': 'account_id',
     'clan_id': 'clan_id',
     'comment': 'data.comment',
     'status_changer_id': 'data.status_changer_id'}, paginated=True)
    def get_account_invites(self, callback, fields = None, statuses = None, get_total_count = False, limit = 18, offset = 0):
        """
        return data from WGCCBE backend using `invites API method`_
        
                .. _invites API method: http://rtd.wargaming.net/docs/wgccbe/en/latest/wotx/invites.html
        """
        statuses = statuses or ['active',
         'declined',
         'accepted',
         'expired',
         'error',
         'deleted']
        get_params = {'fields': ','.join(fields),
         'account_id': self._account,
         'statuses': ','.join(statuses),
         'limit': limit,
         'offset': offset}
        url = '/invites/?%s' % urlencode(get_params)
        return self._request_data(preprocess_callback(callback, 'clans'), 'clans', url)

    @mapped_fields({'global_rating': 'summary.global_rating',
     'battle_avg_xp': 'summary.battle_avg_xp',
     'battles_count': 'summary.battles_count',
     'battle_avg_performance': 'summary.battle_avg_performance',
     'xp_amount': 'summary.xp_amount',
     'account_id': 'account_id'})
    def get_accounts_info(self, callback, account_ids, fields = None):
        """
        return data from exporter backend using `accounts detailed information`_
        
                .. _accounts detailed information: http://rtd.wargaming.net/docs/exporter/en/latest/
                api_wot.html#accounts-detailed-information
        """
        fields = [ i.split('.', 1) for i in fields if i != 'account_id' ]
        grouped = groupby(sorted(fields), key=lambda x: x[0])
        sections = [ '%s[%s]' % (k, ','.join([ j[1] for j in v ])) for k, v in grouped ]
        get_params = {'account_ids': ','.join(map(str, account_ids)),
         'sections': ','.join(sections)}
        url = '/wot/accounts/?%s' % urlencode(get_params)

        @preprocess_callback(callback, 'exporter')
        def inner_callback(data):
            new_data = []
            for account_id, values in data.items():
                values['account_id'] = account_id
                new_data.append(values)

            return new_data

        return self._request_data(inner_callback, 'exporter', url)

    @convert_data({'pillage_end_datetime': from_iso,
     'prime_time': lambda x: x and datetime.strptime(x, '%H:%M').time()})
    @mapped_fields({'front_name': 'frontname',
     'province_id': 'province_id',
     'front_name_localized': 'frontname_localized',
     'province_id_localized': 'province_id_localized',
     'revenue': 'daily_revenue',
     'hq_connected': 'hq_connected',
     'prime_time': 'primetime',
     'game_map': 'game_map',
     'periphery': 'periphery_id',
     'turns_owned': 'turns_owned',
     'pillage_cooldown': 'pillage_cooldown',
     'pillage_end_datetime': 'pillage_end_datetime',
     'arena_id': 'arena_id'})
    def get_clan_provinces(self, callback, clan_id, fields = None):
        """
        return data from WGCW backend using `clans provinces API method`_
        
                .. _clans provinces API method: http://rtd.wargaming.net/docs/wgcw/en/latest/api/
                wgapi.html?highlight=stats#clans-provinces
        """
        get_params = {'clans': ','.join(map(str, [clan_id]))}
        url = '/clans/provinces/?%s' % urlencode(get_params)

        @preprocess_callback(callback, 'global_map')
        def inner_callback(data):
            res = data['clans'] and data['clans'][0]['provinces']
            for i in res:
                i['frontname_localized'] = i['frontname']
                i['province_id_localized'] = i['province_id']

            return res

        return self._request_data(inner_callback, 'global_map', url)

    @mapped_fields({'battles_lost': 'battles_lost',
     'battles_played': 'battles_played',
     'battles_played_on_10_level': 'battles_played_on_10_level',
     'battles_played_on_6_level': 'battles_played_on_6_level',
     'battles_played_on_8_level': 'battles_played_on_8_level',
     'battles_won': 'battles_won',
     'battles_won_on_10_level': 'battles_won_on_10_level',
     'battles_won_on_6_level': 'battles_won_on_6_level',
     'battles_won_on_8_level': 'battles_won_on_8_level',
     'influence_points': 'influence_points',
     'provinces_captured': 'provinces_captured',
     'provinces_count': 'provinces_count'})
    def get_clan_globalmap_stats(self, callback, clan_id, fields = None):
        """
        return data from WGCW backend using `clans stats API method`_
        
                .. _clans stats API method: http://rtd.wargaming.net/docs/wgcw/en/latest/api/
                wgapi.html?highlight=stats#clans-stats
        """
        get_params = {'clans': ','.join(map(str, [clan_id]))}
        url = '/clans/stats?%s' % urlencode(get_params)

        @preprocess_callback(callback, 'global_map')
        def inner_callback(data):
            return data['clans'][0]['stats']

        return self._request_data(inner_callback, 'global_map', url)

    @mapped_fields({'front_name': 'id',
     'front_name_localized': 'id_localized',
     'min_vehicle_level': 'min_vehicle_level',
     'max_vehicle_level': 'max_vehicle_level'})
    def get_fronts_info(self, callback, front_names = None, fields = None):
        """
        return data from WGCW backend using `fronts info API method`_
        
                .. _fronts info API method: http://rtd.wargaming.net/docs/wgcw/en/latest/api/
                wgapi.html?highlight=stats#id1
        """
        url = '/fronts/'

        @preprocess_callback(callback, 'global_map')
        def inner_callback(data):
            res = data['fronts']
            for i in res:
                i['id_localized'] = i['id']

            return res

        return self._request_data(inner_callback, 'global_map', url)

    @convert_data({'defence_hour': lambda x: (dt_time(x, 0) if x >= 0 else None)})
    @mapped_fields({'buildings.direction': 'buildings.direction',
     'buildings.type': 'buildings.type',
     'buildings.level': 'buildings.level',
     'buildings.position': 'buildings.position',
     'defence_attack_efficiency': 'defence_attack_efficiency',
     'defence_battles_count': 'defence_battles_count',
     'defence_capture_enemy_building_total_count': 'defence_capture_enemy_building_total_count',
     'defence_combat_wins': 'defence_combat_wins',
     'defence_defence_efficiency': 'defence_defence_efficiency',
     'defence_enemy_base_capture_count': 'defence_enemy_base_capture_count',
     'defence_loss_own_building_total_count': 'defence_loss_own_building_total_count',
     'defence_resource_capture_count': 'defence_resource_capture_count',
     'defence_resource_loss_count': 'defence_resource_loss_count',
     'sortie_absolute_battles_count': 'sortie_absolute_battles_count',
     'sortie_battles_count': 'sortie_battles_count',
     'sortie_champion_battles_count': 'sortie_champion_battles_count',
     'sortie_middle_battles_count': 'sortie_middle_battles_count',
     'defence_attack_count': 'defence_attack_count',
     'defence_defence_count': 'defence_defence_count',
     'defence_success_attack_count': 'defence_success_attack_count',
     'defence_success_defence_count': 'defence_success_defence_count',
     'sortie_fort_resource_in_absolute': 'sortie_fort_resource_in_absolute',
     'sortie_fort_resource_in_champion': 'sortie_fort_resource_in_champion',
     'sortie_fort_resource_in_middle': 'sortie_fort_resource_in_middle',
     'sortie_losses': 'sortie_losses',
     'sortie_wins': 'sortie_wins',
     'level': 'level',
     'defence_hour': 'defence_hour',
     'defence_mode_is_activated': 'defence_mode_is_activated',
     'fb_battles_count_10': 'fb_battles_count_10',
     'fb_battles_count_8': 'fb_battles_count_8',
     'total_resource_amount': 'total_resource_amount'})
    def get_stronghold_info(self, callback, clan_id, fields = None):
        """
        return data from WGCCFE backend using `stronghold info API method`_
        
                .. _stronghold info API method: http://rtd.wargaming.net/docs/wgccfe/en/latest/rst/
                strongholds.html#strongholds-clan-id
        """
        get_params = urlencode({'performer_id': self._account})
        try:
            clan_id = int(clan_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = 'api/strongholds/%s/' % clan_id
        if self._account:
            url = '?'.join([url, get_params])

        @preprocess_callback(callback, 'strongholds')
        def inner_callback(data):
            return data['stronghold']

        return self._request_data(inner_callback, 'strongholds', url)

    @convert_data({'vacation_finish': timestamp_to_datetime,
     'vacation_start': timestamp_to_datetime})
    @mapped_fields({'buildings.type': 'buildings.type',
     'buildings.hp': 'buildings.hp',
     'buildings.direction': 'buildings.direction',
     'buildings.position': 'buildings.position',
     'buildings.storage': 'buildings.resource_amount',
     'buildings.level': 'buildings.level',
     'buildings_count': 'buildings_count',
     'clan_id': 'clan_id',
     'level': 'level',
     'clan_name': 'clan_name',
     'clan_tag': 'clan_tag',
     'directions': 'directions',
     'directions_count': 'directions_count',
     'off_day': 'off_day',
     'periphery_id': 'periphery_id',
     'vacation_finish': 'vacation_finish',
     'vacation_start': 'vacation_start',
     'sortie_wins_period': 'sortie_wins_period',
     'sortie_battles_wins_percentage_period': 'sortie_battles_wins_percentage_period',
     'sortie_battles_count_period': 'sortie_battles_count_period',
     'defence_battles_count_period': 'defence_battles_count_period'})
    def get_strongholds_statistics(self, callback, clan_id, fields = None):
        """
        return data from WGCCFE backend using `stronghold statistics API method`_
        
                .. _stronghold statistics API method: http://rtd.wargaming.net/docs/wgccfe/en/
                latest/rst/strongholds.html#strongholds-statistics-clan-id
        """
        get_params = urlencode({'performer_id': self._account})
        try:
            clan_id = int(clan_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/api/strongholds/statistics/%s/' % clan_id
        if self._account:
            url = '?'.join([url, get_params])

        @preprocess_callback(callback, 'strongholds')
        def inner_callback(data):
            return data[0]

        return self._request_data(inner_callback, 'strongholds', url)

    @convert_data({'defence_hour': lambda x: (dt_time(x, 0) if x >= 0 else None)})
    @mapped_fields({'clan_id': 'clan_id',
     'defence_hour': 'defence_hour'})
    def get_strongholds_state(self, callback, clan_id, fields = None):
        """
        return data from WGCCFE backend using `stronghold state API method`_
        
                .. _stronghold state API method: http://rtd.wargaming.net/docs/wgccfe/en/latest/
                rst/strongholds.html#strongholds-state
        """
        get_params = {'clan_id': clan_id}
        try:
            clan_id = int(clan_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        if self._account:
            get_params['performer_id'] = self._account
        url = '/api/strongholds/state/?%s' % urlencode(get_params)

        @preprocess_callback(callback, 'strongholds')
        def inner_callback(data):
            return data and data[0] or {}

        return self._request_data(inner_callback, 'strongholds', url)
Пример #22
0
import time
from datetime import date, datetime, time as dt_time, timedelta

UPDATE_TIME = dt_time(6, second=30)  # noqa: WPS432


def day_from_iso(iso_date):
    month_day = date.fromisoformat(iso_date).strftime('%B %d')  # noqa: WPS323
    month, day = month_day.split()
    return '{0} {1}'.format(_(month), day)


def hm_from_seconds(secs):
    return time.strftime('%Hh %Mm', time.gmtime(secs))


def is_before_update_time():  # noqa: N802,WPS114
    return datetime.utcnow().time() < UPDATE_TIME


def seconds_for_next_update():
    now = datetime.utcnow()
    today_6GMT = datetime.combine(now.date(), UPDATE_TIME)  # noqa: N806,WPS114
    if now <= today_6GMT:
        return (today_6GMT - now).seconds
    tommorow_6GMT = datetime.combine(  # noqa: N806,WPS114
        now.date() + timedelta(days=1),
        UPDATE_TIME,
    )
    return (tommorow_6GMT - now).seconds
Пример #23
0
 def testTimeOnlyRange(self):
     # tzrange returns None
     tz_range = tz.tzrange('dflt')
     self.assertIs(dt_time(13, 20, tzinfo=tz_range).utcoffset(), None)
Пример #24
0
class FaultsReviewNotice(models.Model):

    UNREVIEWED = 0

    NOTIFICATION_TYPES = ((UNREVIEWED,
                           _l("Notify about Faults awaiting review")), )

    TIME_CHOICES = [(dt_time(x // 60, x % 60), "%02d:%02d" % (x // 60, x % 60))
                    for x in range(0, 24 * 60, 15)]

    notification_type = models.IntegerField(
        verbose_name=_l("Notification Type"),
        choices=NOTIFICATION_TYPES,
        default=UNREVIEWED,
    )

    send_empty = models.BooleanField(
        verbose_name=_l("Send Empty Notices"),
        help_text=_l(
            "Check to send notices even if there's no unreviewed Service Events to currently notify about"
        ),
        default=False,
    )

    recurrences = RecurrenceField(
        verbose_name=_l("Recurrences"),
        help_text=_l(
            "Define the schedule this notification should be sent on."),
        default="",
    )

    time = models.TimeField(
        verbose_name=_l("Time of day"),
        help_text=_l(
            "Set the time of day this notice should be sent (00:00-23:59)."),
        choices=TIME_CHOICES,
    )

    recipients = models.ForeignKey(
        RecipientGroup,
        verbose_name=_l("Recipients"),
        help_text=_l(
            "Choose the group of recipients who should receive these notifications"
        ),
        on_delete=models.PROTECT,
    )

    units = models.ForeignKey(
        UnitGroup,
        verbose_name=_l("Unit Group filter"),
        help_text=_l(
            "Select which group of Units this notification should be limited to. Leave blank to include all units"
        ),
        null=True,
        blank=True,
        on_delete=models.PROTECT,
    )

    last_sent = models.DateTimeField(null=True, editable=False)

    class Meta:
        verbose_name = _l("Fault Review Notice")

    @property
    def is_unreviewed(self):
        return self.notification_type == self.UNREVIEWED

    def faults(self):
        """Return faults relevant to this notice"""

        faults = Fault.objects.unreviewed()

        if self.units_id:
            faults = faults.filter(unit__in=self.units.units.all())

        return faults.order_by(
            "unit__%s" % settings.ORDER_UNITS_BY,
            "fault_types__code",
        )

    def faults_by_unit_fault_type(self):

        faults = self.faults()
        return faults.values(
            "unit__name",
            "fault_types__code",
        ).order_by(
            "unit__name",
            "fault_types__code",
        ).annotate(
            Count("unit__name"),
            Count("fault_types__code"),
        )

    def send_required(self):
        return self.send_empty or self.faults().count() > 0
Пример #25
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        LOGGER.info('Start process')
        response.update_status('execution started at : {}'.format(dt.now()), 5)

        process_start_time = time.time()  # measure process execution time ...
        start_time = time.time()  # measure init ...

        ################################
        # reading in the input arguments
        ################################

        response.update_status('execution started at : %s ' % dt.now(), 5)
        start_time = time.time()  # measure init ...

        ################################
        # reading in the input arguments
        ################################

        try:
            response.update_status('read input parameter : %s ' % dt.now(), 5)

            resource = archiveextract(resource=rename_complexinputs(request.inputs['resource']))
            refSt = request.inputs['refSt'][0].data
            refEn = request.inputs['refEn'][0].data
            dateSt = request.inputs['dateSt'][0].data
            dateEn = request.inputs['dateEn'][0].data
            seasonwin = request.inputs['seasonwin'][0].data
            nanalog = request.inputs['nanalog'][0].data

            # bbox = [-80, 20, 50, 70]
            # TODO: Add checking for wrong cordinates and apply default if nesessary
            #level = 500

            level = request.inputs['level'][0].data
            if (level == 500): 
                dummylevel = 1000 # dummy workaround for cdo sellevel
            else:
                dummylevel = 500
            LOGGER.debug('LEVEL selected: %s hPa' % (level))

            bbox=[]
            bboxStr = request.inputs['BBox'][0].data
            bboxStr = bboxStr.split(',')
            #for i in bboxStr: bbox.append(int(i))
            bbox.append(float(bboxStr[0]))
            bbox.append(float(bboxStr[2]))
            bbox.append(float(bboxStr[1]))
            bbox.append(float(bboxStr[3]))
            LOGGER.debug('BBOX for ocgis: %s ' % (bbox))
            LOGGER.debug('BBOX original: %s ' % (bboxStr))

            # if bbox_obj is not None:
            #     LOGGER.info("bbox_obj={0}".format(bbox_obj.coords))
            #     bbox = [bbox_obj.coords[0][0],
            #             bbox_obj.coords[0][1],
            #             bbox_obj.coords[1][0],
            #             bbox_obj.coords[1][1]]
            #     LOGGER.info("bbox={0}".format(bbox))
            # else:
            #     bbox = None
            # region = self.getInputValues(identifier='region')[0]
            # bbox = [float(b) for b in region.split(',')]
            # bbox_obj = self.BBox.getValue()

            normalize = request.inputs['normalize'][0].data
            distance = request.inputs['dist'][0].data
            outformat = request.inputs['outformat'][0].data
            timewin = request.inputs['timewin'][0].data

            # model_var = request.inputs['reanalyses'][0].data
            # model, var = model_var.split('_')

            # experiment = self.getInputValues(identifier='experiment')[0]
            # dataset, var = experiment.split('_')
            # LOGGER.info('environment set')
            LOGGER.info('input parameters set')
            response.update_status('Read in and convert the arguments', 5)
        except Exception as e:
            msg = 'failed to read input prameter %s ' % e
            LOGGER.error(msg)
            raise Exception(msg)

        ######################################
        # convert types and set environment
        ######################################
        try:
            # refSt = dt.strptime(refSt[0], '%Y-%m-%d')
            # refEn = dt.strptime(refEn[0], '%Y-%m-%d')
            # dateSt = dt.strptime(dateSt[0], '%Y-%m-%d')
            # dateEn = dt.strptime(dateEn[0], '%Y-%m-%d')

            #not nesessary if fix ocgis_module.py
            refSt = dt.combine(refSt,dt_time(12,0))
            refEn = dt.combine(refEn,dt_time(12,0))
            dateSt = dt.combine(dateSt,dt_time(12,0))
            dateEn = dt.combine(dateEn,dt_time(12,0))

            # refSt = refSt.replace(hour=12)
            # refEn = refEn.replace(hour=12)
            # dateSt = dateSt.replace(hour=12)
            # dateEn = dateEn.replace(hour=12)

            if normalize == 'None':
                seacyc = False
            else:
                seacyc = True

            if outformat == 'ascii':
                outformat = '.txt'
            elif outformat == 'netCDF':
                outformat = '.nc'
            else:
                LOGGER.error('output format not valid')

            start = min(refSt, dateSt)
            end = max(refEn, dateEn)

#            if bbox_obj is not None:
#                LOGGER.info("bbox_obj={0}".format(bbox_obj.coords))
#                bbox = [bbox_obj.coords[0][0],
#                        bbox_obj.coords[0][1],
#                        bbox_obj.coords[1][0],
#                        bbox_obj.coords[1][1]]
#                LOGGER.info("bbox={0}".format(bbox))
#            else:
#                bbox = None

            LOGGER.info('environment set')
        except Exception as e:
            msg = 'failed to set environment %s ' % e
            LOGGER.error(msg)
            raise Exception(msg)

        LOGGER.debug("init took %s seconds.", time.time() - start_time)
        response.update_status('Read in and convert the arguments', 5)

        ########################
        # input data preperation
        ########################

        # TODO: Check if files containing more than one dataset

        response.update_status('Start preparing input data', 12)
        start_time = time.time()  # mesure data preperation ...
        try:
            # TODO: Add selection of the level. maybe bellow in call(..., level_range=[...,...])

            if type(resource) == list:
                #resource.sort()
                resource = sorted(resource, key=lambda i: path.splitext(path.basename(i))[0])
            else:
                resource=[resource]

            #===============================================================
            # TODO: REMOVE resources which are out of interest from the list 
            # (years > and < than requested for calculation)

            tmp_resource = []

            for re in resource:
                s,e = get_timerange(re)
                tmpSt = dt.strptime(s,'%Y%m%d') 
                tmpEn = dt.strptime(e,'%Y%m%d') 
                if ((tmpSt <= end ) and (tmpEn >= start)):
                    tmp_resource.append(re)
                    LOGGER.debug('Selected file: %s ' % (re))
            resource = tmp_resource
            # ===============================================================

            #================================================================
            # Try to fix memory issue... (ocgis call for files like 20-30 gb... )
            # IF 4D - select pressure level before domain cut
            #
            # resource properties
            ds = Dataset(resource[0])
            variable = get_variable(resource[0])
            var = ds.variables[variable]
            dims = list(var.dimensions)
            dimlen = len(dims)

            try:
                model_id = ds.getncattr('model_id') 
            except AttributeError:
                model_id = 'Unknown model'

            LOGGER.debug('MODEL: %s ' % (model_id)) 

            lev_units = 'hPa'

            if (dimlen>3) :
                lev = ds.variables[dims[1]]
                # actually index [1] need to be detected... assuming zg(time, plev, lat, lon)
                lev_units = lev.units

                if (lev_units=='Pa'):
                    level = level*100
                    dummylevel=dummylevel*100
                    # TODO: OR check the NAME and units of vertical level and find 200 , 300, or 500 mbar in it
                    # Not just level = level * 100.

            # Get Levels

            from cdo import Cdo
            cdo = Cdo()

            lev_res=[]
            if(dimlen>3):
                for res_fn in resource:
                    tmp_f = 'lev_' + path.basename(res_fn)
                    comcdo = '%s,%s' % (level,dummylevel)
                    cdo.sellevel(comcdo, input=res_fn, output=tmp_f)
                    lev_res.append(tmp_f)
            else:
                lev_res = resource

            # Get domain
            regr_res=[]
            for res_fn in lev_res:
                tmp_f = 'dom_' + path.basename(res_fn)
                comcdo = '%s,%s,%s,%s' % (bbox[0],bbox[2],bbox[1],bbox[3])
                cdo.sellonlatbox(comcdo, input=res_fn, output=tmp_f)
                regr_res.append(tmp_f)

            #archive_tmp = call(resource=resource, time_range=[refSt, refEn], geom=bbox, spatial_wrapping='wrap')
            #simulation_tmp = call(resource=resource, time_range=[dateSt, dateEn], geom=bbox, spatial_wrapping='wrap')
            #============================  

            archive_tmp = call(resource=regr_res, time_range=[refSt, refEn], spatial_wrapping='wrap')
            simulation_tmp = call(resource=regr_res, time_range=[dateSt, dateEn], spatial_wrapping='wrap')

            #######################################################################################
            # TEMORAL dirty workaround to get the level and it's units - will be func in utils.py
            
            if (dimlen>3) :
                archive = get_level(archive_tmp, level = level)
                simulation = get_level(simulation_tmp,level = level)
                variable = 'z%s' % level
                # TODO: here should be modulated
            else:
                archive = archive_tmp
                simulation = simulation_tmp
                # 3D, move forward
            #######################################################################################

            if seacyc is True:
                seasoncyc_base, seasoncyc_sim = analogs.seacyc(archive, simulation, method=normalize)
            else:
                seasoncyc_base = None
                seasoncyc_sim = None
        except Exception as e:
            msg = 'failed to prepare archive and simulation files %s ' % e
            LOGGER.debug(msg)
            raise Exception(msg)
        ip, output = mkstemp(dir='.', suffix='.txt')
        output_file = path.abspath(output)
        files = [path.abspath(archive), path.abspath(simulation), output_file]

        LOGGER.debug("data preperation took %s seconds.", time.time() - start_time)

        ############################
        # generating the config file
        ############################

        # TODO: add MODEL name as argument

        response.update_status('writing config file', 15)
        start_time = time.time()  # measure write config ...

        try:
            config_file = analogs.get_configfile(
                files=files,
                seasoncyc_base=seasoncyc_base,
                seasoncyc_sim=seasoncyc_sim,
                base_id=model_id,
                sim_id=model_id, 
                timewin=timewin,
                varname=variable,
                seacyc=seacyc,
                cycsmooth=91,
                nanalog=nanalog,
                seasonwin=seasonwin,
                distfun=distance,
                outformat=outformat,
                calccor=True,
                silent=False,
                period=[dt.strftime(refSt, '%Y-%m-%d'), dt.strftime(refEn, '%Y-%m-%d')],
                bbox="%s,%s,%s,%s" % (bbox[0], bbox[2], bbox[1], bbox[3]))
        except Exception as e:
            msg = 'failed to generate config file %s ' % e
            LOGGER.debug(msg)
            raise Exception(msg)

        LOGGER.debug("write_config took %s seconds.", time.time() - start_time)

        ##############
        # CASTf90 call
        ##############
        import subprocess
        import shlex

        start_time = time.time()  # measure call castf90
        response.update_status('Start CASTf90 call', 20)
        try:
            # response.update_status('execution of CASTf90', 50)
            cmd = 'analogue.out %s' % path.relpath(config_file)
            # system(cmd)
            args = shlex.split(cmd)
            output, error = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
            LOGGER.info('analogue.out info:\n %s ' % output)
            LOGGER.debug('analogue.out errors:\n %s ' % error)
            response.update_status('**** CASTf90 suceeded', 70)
        except Exception as e:
            msg = 'CASTf90 failed %s ' % e
            LOGGER.error(msg)
            raise Exception(msg)
        
        LOGGER.debug("castf90 took %s seconds.", time.time() - start_time)
        response.update_status('preparing output', 70)

        response.outputs['config'].file = config_file #config_output_url  # config_file )
        response.outputs['analogs'].file = output_file
        response.outputs['output_netcdf'].file = simulation

        ########################
        # generate analog viewer
        ########################

        formated_analogs_file = analogs.reformat_analogs(output_file)
        # response.outputs['formated_analogs'].storage = FileStorage()
        response.outputs['formated_analogs'].file = formated_analogs_file
        LOGGER.info('analogs reformated')
        response.update_status('reformatted analog file', 80)

        viewer_html = analogs.render_viewer(
            # configfile=response.outputs['config'].get_url(),
            configfile=config_file,
            # datafile=response.outputs['formated_analogs'].get_url())
            datafile=formated_analogs_file)
        response.outputs['output'].file = viewer_html
        response.update_status('Successfully generated analogs viewer', 90)
        LOGGER.info('rendered pages: %s ', viewer_html)

        response.update_status('execution ended', 100)
        LOGGER.debug("total execution took %s seconds.",
                     time.time() - process_start_time)
        return response
Пример #26
0
    def _handler(self, request, response):
        chdir(self.workdir)
        init_process_logger('log.txt')

        process_start_time = time.time()  # measure process execution time ...
        response.update_status('execution started at : %s ' % dt.now(), 5)

        start_time = time.time()  # measure init ...

        resource = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))

        # Filter resource:
        if type(resource) == list:
            resource = sorted(resource,
                              key=lambda i: path.splitext(path.basename(i))[0])
        else:
            resource = [resource]

        refSt = request.inputs['refSt'][0].data
        refEn = request.inputs['refEn'][0].data
        dateSt = request.inputs['dateSt'][0].data
        dateEn = request.inputs['dateEn'][0].data

        regrset = request.inputs['regrset'][0].data
        direction = request.inputs['direction'][0].data
        # Check if model has 360_day calendar:

        try:
            modcal, calunits = get_calendar(resource[0])
            LOGGER.debug('CALENDAR: %s' % (modcal))
            if '360_day' in modcal:
                if direction == 're2mo':
                    if refSt.day == 31:
                        refSt = refSt.replace(day=30)
                        LOGGER.debug('Date has been changed for: %s' % (refSt))
                    if refEn.day == 31:
                        refEn = refEn.replace(day=30)
                        LOGGER.debug('Date has been changed for: %s' % (refEn))
                else:  # mo2re
                    if dateSt.day == 31:
                        dateSt = dateSt.replace(day=30)
                        LOGGER.debug('Date has been changed for: %s' %
                                     (dateSt))
                    if dateEn.day == 31:
                        dateEn = dateEn.replace(day=30)
                        LOGGER.debug('Date has been changed for: %s' %
                                     (dateEn))
        except:
            LOGGER.debug('Could not detect calendar')

        seasonwin = request.inputs['seasonwin'][0].data
        nanalog = request.inputs['nanalog'][0].data

        bboxDef = '-20,40,30,70'  # in general format

        bbox = []
        bboxStr = request.inputs['BBox'][0].data
        LOGGER.debug('BBOX selected by user: %s ' % (bboxStr))
        bboxStr = bboxStr.split(',')

        # Checking for wrong cordinates and apply default if nesessary
        if (abs(float(bboxStr[0])) > 180 or abs(float(bboxStr[1]) > 180)
                or abs(float(bboxStr[2]) > 90) or abs(float(bboxStr[3])) > 90):
            bboxStr = bboxDef  # request.inputs['BBox'].default  # .default doesn't work anymore!!!
            LOGGER.debug(
                'BBOX is out of the range, using default instead: %s ' %
                (bboxStr))
            bboxStr = bboxStr.split(',')

        bbox.append(float(bboxStr[0]))
        bbox.append(float(bboxStr[2]))
        bbox.append(float(bboxStr[1]))
        bbox.append(float(bboxStr[3]))

        normalize = request.inputs['normalize'][0].data
        plot = request.inputs['plot'][0].data
        distance = request.inputs['dist'][0].data
        outformat = request.inputs['outformat'][0].data
        timewin = request.inputs['timewin'][0].data

        model_var = request.inputs['reanalyses'][0].data
        model, var = model_var.split('_')

        try:
            if direction == 're2mo':
                anaSt = dt.combine(dateSt, dt_time(
                    0, 0))  # dt.strptime(dateSt[0], '%Y-%m-%d')
                anaEn = dt.combine(dateEn, dt_time(
                    0, 0))  # dt.strptime(dateEn[0], '%Y-%m-%d')
                refSt = dt.combine(refSt, dt_time(
                    12, 0))  # dt.strptime(refSt[0], '%Y-%m-%d')
                refEn = dt.combine(refEn, dt_time(
                    12, 0))  # dt.strptime(refEn[0], '%Y-%m-%d')
                r_time_range = [anaSt, anaEn]
                m_time_range = [refSt, refEn]
            elif direction == 'mo2re':
                anaSt = dt.combine(dateSt, dt_time(
                    12, 0))  # dt.strptime(refSt[0], '%Y-%m-%d')
                anaEn = dt.combine(dateEn, dt_time(
                    12, 0))  # dt.strptime(refEn[0], '%Y-%m-%d')
                refSt = dt.combine(refSt, dt_time(
                    0, 0))  # dt.strptime(dateSt[0], '%Y-%m-%d')
                refEn = dt.combine(refEn, dt_time(
                    0, 0))  # dt.strptime(dateEn[0], '%Y-%m-%d')
                r_time_range = [refSt, refEn]
                m_time_range = [anaSt, anaEn]
            else:
                LOGGER.exception(
                    'failed to find time periods for comparison direction')
        except:
            msg = 'failed to put simulation and reference time in order'
            LOGGER.exception(msg)
            raise Exception(msg)

        if normalize == 'None':
            seacyc = False
        else:
            seacyc = True

        if outformat == 'ascii':
            outformat = '.txt'
        elif outformat == 'netCDF':
            outformat = '.nc'
        else:
            LOGGER.exception('output format not valid')

        try:
            if model == 'NCEP':
                # getlevel = True
                getlevel = False
                if 'z' in var:
                    level = var.strip('z')
                    variable = 'hgt'
                    # conform_units_to='hPa'
                else:
                    variable = 'slp'
                    level = None
                    # conform_units_to='hPa'
            elif '20CRV2' in model:
                getlevel = False
                if 'z' in var:
                    variable = 'hgt'
                    level = var.strip('z')
                    # conform_units_to=None
                else:
                    variable = 'prmsl'
                    level = None
                    # conform_units_to='hPa'
            else:
                LOGGER.exception('Reanalyses model not known')
            LOGGER.info('environment set')
        except:
            msg = 'failed to set environment'
            LOGGER.exception(msg)
            raise Exception(msg)

        # LOGGER.exception("init took %s seconds.", time.time() - start_time)
        response.update_status('Read in the arguments', 10)

        #################
        # get input data
        #################
        # TODO: do not forget to select years

        start_time = time.time()  # measure get_input_data ...
        response.update_status('fetching input data', 20)
        try:
            if direction == 're2mo':
                nc_reanalyses = reanalyses(start=anaSt.year,
                                           end=anaEn.year,
                                           variable=var,
                                           dataset=model,
                                           getlevel=getlevel)
            else:
                nc_reanalyses = reanalyses(start=refSt.year,
                                           end=refEn.year,
                                           variable=var,
                                           dataset=model,
                                           getlevel=getlevel)

            if type(nc_reanalyses) == list:
                nc_reanalyses = sorted(
                    nc_reanalyses,
                    key=lambda i: path.splitext(path.basename(i))[0])
            else:
                nc_reanalyses = [nc_reanalyses]

            # For 20CRV2 geopotential height, daily dataset for 100 years is about 50 Gb
            # So it makes sense, to operate it step-by-step
            # TODO: need to create dictionary for such datasets (for models as well)
            # TODO: benchmark the method bellow for NCEP z500 for 60 years, may be use the same (!)
            # TODO Now everything regrid to the reanalysis

            # if ('20CRV2' in model) and ('z' in var):
            if ('z' in var):
                tmp_total = []
                origvar = get_variable(nc_reanalyses[0])

                for z in nc_reanalyses:
                    # tmp_n = 'tmp_%s' % (uuid.uuid1())
                    b0 = call(resource=z,
                              variable=origvar,
                              level_range=[int(level), int(level)],
                              geom=bbox,
                              spatial_wrapping='wrap',
                              prefix='levdom_' + path.basename(z)[0:-3])
                    tmp_total.append(b0)

                tmp_total = sorted(
                    tmp_total,
                    key=lambda i: path.splitext(path.basename(i))[0])
                inter_subset_tmp = call(resource=tmp_total,
                                        variable=origvar,
                                        time_range=r_time_range)

                # Create new variable
                ds = Dataset(inter_subset_tmp, mode='a')
                z_var = ds.variables.pop(origvar)
                dims = z_var.dimensions
                new_var = ds.createVariable('z%s' % level,
                                            z_var.dtype,
                                            dimensions=(dims[0], dims[2],
                                                        dims[3]))
                new_var[:, :, :] = squeeze(z_var[:, 0, :, :])
                # new_var.setncatts({k: z_var.getncattr(k) for k in z_var.ncattrs()})
                ds.close()
                nc_subset = call(inter_subset_tmp, variable='z%s' % level)
                # Clean
                for i in tmp_total:
                    tbr = 'rm -f %s' % (i)
                    system(tbr)
                # for i in inter_subset_tmp
                tbr = 'rm -f %s' % (inter_subset_tmp)
                system(tbr)
            else:
                # TODO: ADD HERE serial as well as in analogs reanalysis process!!
                nc_subset = call(
                    resource=nc_reanalyses,
                    variable=var,
                    geom=bbox,
                    spatial_wrapping='wrap',
                    time_range=r_time_range,
                )

            response.update_status('**** Input reanalyses data fetched', 30)
        except:
            msg = 'failed to fetch or subset input files'
            LOGGER.exception(msg)
            raise Exception(msg)

        ########################
        # input data preperation
        ########################
        response.update_status('Start preparing input data', 40)

        m_start = m_time_range[0]
        m_end = m_time_range[1]

        # ===============================================================
        # REMOVE resources from the list which are out of interest from the list
        # (years > and < than requested for calculation)

        tmp_resource = []

        for re in resource:
            s, e = get_timerange(re)
            tmpSt = dt.strptime(s, '%Y%m%d')
            tmpEn = dt.strptime(e, '%Y%m%d')
            if ((tmpSt <= m_end) and (tmpEn >= m_start)):
                tmp_resource.append(re)
                LOGGER.debug('Selected file: %s ' % (re))
        resource = tmp_resource

        start_time = time.time()  # mesure data preperation ...
        # TODO: Check the callendars ! for model vs reanalyses.
        # TODO: Check the units! model vs reanalyses.
        try:
            m_total = []
            modvar = get_variable(resource)
            # resource properties
            ds = Dataset(resource[0])
            m_var = ds.variables[modvar]
            dims = list(m_var.dimensions)
            dimlen = len(dims)

            try:
                model_id = ds.getncattr('model_id')
            except AttributeError:
                model_id = 'Unknown model'

            LOGGER.debug('MODEL: %s ' % (model_id))

            lev_units = 'hPa'

            if (dimlen > 3):
                lev = ds.variables[dims[1]]
                # TODO: actually index [1] need to be detected... assuming zg(time, plev, lat, lon)
                lev_units = lev.units

                if (lev_units == 'Pa'):
                    m_level = str(int(level) * 100)
                else:
                    m_level = level
            else:
                m_level = None

            if level == None:
                level_range = None
            else:
                level_range = [int(m_level), int(m_level)]

            ds.close()

            for z in resource:
                tmp_n = 'tmp_%s' % (uuid.uuid1())
                # TODO: Important! if only 1 file - select time period from that first!

                # select level and regrid

                # \/\/ working version 19Feb2019
                # b0 = call(resource=z, variable=modvar, level_range=level_range,
                #         spatial_wrapping='wrap', cdover='system',
                #         regrid_destination=nc_reanalyses[0], regrid_options='bil', prefix=tmp_n)

                try:
                    b0 = call(resource=z,
                              variable=modvar,
                              level_range=level_range,
                              spatial_wrapping='wrap',
                              cdover='system',
                              regrid_destination=nc_subset,
                              regrid_options='bil',
                              prefix=tmp_n)
                except:
                    b0 = call(resource=z,
                              variable=modvar,
                              level_range=level_range,
                              spatial_wrapping='wrap',
                              cdover='system',
                              regrid_destination=nc_reanalyses[0],
                              regrid_options='bil',
                              prefix=tmp_n)

                # select domain (already selected in fact, if regrided to 'nc_subset')
                b01 = call(resource=b0,
                           geom=bbox,
                           spatial_wrapping='wrap',
                           prefix='levregr_' + path.basename(z)[0:-3])

                # TODO: REPLACE rm -f by os.remove() !
                tbr = 'rm -f %s' % (b0)
                system(tbr)
                tbr = 'rm -f %s.nc' % (tmp_n)
                system(tbr)
                # get full resource
                m_total.append(b01)

            model_subset = call(m_total, time_range=m_time_range)

            for i in m_total:
                tbr = 'rm -f %s' % (i)
                system(tbr)

            if m_level is not None:
                # Create new variable in model set
                ds = Dataset(model_subset, mode='a')
                mod_var = ds.variables.pop(modvar)
                dims = mod_var.dimensions
                new_modvar = ds.createVariable('z%s' % level,
                                               mod_var.dtype,
                                               dimensions=(dims[0], dims[2],
                                                           dims[3]))
                new_modvar[:, :, :] = squeeze(mod_var[:, 0, :, :])
                # new_var.setncatts({k: z_var.getncattr(k) for k in z_var.ncattrs()})
                ds.close()
                mod_subset = call(model_subset, variable='z%s' % level)
            else:
                mod_subset = model_subset

        except:
            msg = 'failed to subset simulation or reference data'
            LOGGER.exception(msg)
            raise Exception(msg)

# --------------------------------------------
        try:
            if direction == 'mo2re':
                simulation = mod_subset
                archive = nc_subset
                base_id = model
                sim_id = model_id
            elif direction == 're2mo':
                simulation = nc_subset
                archive = mod_subset
                base_id = model_id
                sim_id = model
            else:
                LOGGER.exception('direction not valid: %s ' % direction)
        except:
            msg = 'failed to find comparison direction'
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            if level is not None:
                out_var = 'z%s' % level
            else:
                var_archive = get_variable(archive)
                var_simulation = get_variable(simulation)
                if var_archive != var_simulation:
                    rename_variable(archive,
                                    oldname=var_archive,
                                    newname=var_simulation)
                    out_var = var_simulation
                    LOGGER.info('varname %s in netCDF renamed to %s' %
                                (var_archive, var_simulation))
        except:
            msg = 'failed to rename variable in target files'
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            if seacyc is True:
                seasoncyc_base, seasoncyc_sim = analogs.seacyc(
                    archive, simulation, method=normalize)
            else:
                seasoncyc_base = None
                seasoncyc_sim = None
        except:
            msg = 'failed to prepare seasonal cycle reference files'
            LOGGER.exception(msg)
            raise Exception(msg)

        # ip, output = mkstemp(dir='.', suffix='.txt')
        # output_file = path.abspath(output)

        output_file = 'output.txt'

        ################################
        # Prepare names for config.txt #
        ################################

        # refDatesString = dt.strftime(refSt, '%Y-%m-%d') + "_" + dt.strftime(refEn, '%Y-%m-%d')
        # simDatesString = dt.strftime(dateSt, '%Y-%m-%d') + "_" + dt.strftime(dateEn, '%Y-%m-%d')

        # Fix < 1900 issue...
        refDatesString = refSt.isoformat().strip().split(
            "T")[0] + "_" + refEn.isoformat().strip().split("T")[0]
        simDatesString = dateSt.isoformat().strip().split(
            "T")[0] + "_" + dateEn.isoformat().strip().split("T")[0]

        archiveNameString = "base_" + out_var + "_" + refDatesString + '_%.1f_%.1f_%.1f_%.1f' \
                            % (bbox[0], bbox[2], bbox[1], bbox[3]) + '.nc'
        simNameString = "sim_" + out_var + "_" + simDatesString + '_%.1f_%.1f_%.1f_%.1f' \
                        % (bbox[0], bbox[2], bbox[1], bbox[3]) + '.nc'

        move(archive, archiveNameString)
        move(simulation, simNameString)

        archive = archiveNameString
        simulation = simNameString

        files = [path.abspath(archive), path.abspath(simulation), output_file]

        ############################
        # generating the config file
        ############################

        response.update_status('writing config file', 50)
        start_time = time.time()  # measure write config ...

        try:
            config_file = analogs.get_configfile(
                files=files,
                seasoncyc_base=seasoncyc_base,
                seasoncyc_sim=seasoncyc_sim,
                base_id=base_id,
                sim_id=sim_id,
                timewin=timewin,
                # varname=var,
                varname=out_var,
                seacyc=seacyc,
                cycsmooth=91,
                nanalog=nanalog,
                seasonwin=seasonwin,
                distfun=distance,
                outformat=outformat,
                calccor=True,
                silent=False,
                # period=[dt.strftime(refSt, '%Y-%m-%d'), dt.strftime(refEn, '%Y-%m-%d')],
                period=[
                    refSt.isoformat().strip().split("T")[0],
                    refEn.isoformat().strip().split("T")[0]
                ],
                bbox="%s,%s,%s,%s" % (bbox[0], bbox[2], bbox[1], bbox[3]))
        except:
            msg = 'failed to generate config file'
            LOGGER.exception(msg)
            raise Exception(msg)

        #######################
        # CASTf90 call
        #######################
        import subprocess
        import shlex

        start_time = time.time()  # measure call castf90

        response.update_status('Start CASTf90 call', 60)

        # -----------------------
        try:
            import ctypes
            # TODO: This lib is for linux
            mkl_rt = ctypes.CDLL('libmkl_rt.so')
            nth = mkl_rt.mkl_get_max_threads()
            LOGGER.debug('Current number of threads: %s' % (nth))
            mkl_rt.mkl_set_num_threads(ctypes.byref(ctypes.c_int(64)))
            nth = mkl_rt.mkl_get_max_threads()
            LOGGER.debug('NEW number of threads: %s' % (nth))
            # TODO: Does it \/\/\/ work with default shell=False in subprocess... (?)
            environ['MKL_NUM_THREADS'] = str(nth)
            environ['OMP_NUM_THREADS'] = str(nth)
        except Exception as e:
            msg = 'Failed to set THREADS %s ' % e
            LOGGER.debug(msg)
        # -----------------------

        # ##### TEMPORAL WORKAROUND! With instaled hdf5-1.8.18 in anaconda ###############
        # ##### MUST be removed after castf90 recompiled with the latest hdf version
        # ##### NOT safe
        environ['HDF5_DISABLE_VERSION_CHECK'] = '1'
        # hdflib = os.path.expanduser("~") + '/anaconda/lib'
        # hdflib = os.getenv("HOME") + '/anaconda/lib'
        import pwd
        hdflib = pwd.getpwuid(getuid()).pw_dir + '/anaconda/lib'
        environ['LD_LIBRARY_PATH'] = hdflib
        # ################################################################################

        try:
            response.update_status('execution of CASTf90', 70)
            cmd = 'analogue.out %s' % path.relpath(config_file)
            # system(cmd)
            args = shlex.split(cmd)
            output, error = subprocess.Popen(
                args, stdout=subprocess.PIPE,
                stderr=subprocess.PIPE).communicate()
            LOGGER.info('analogue.out info:\n %s ' % output)
            LOGGER.exception('analogue.out errors:\n %s ' % error)
            response.update_status('**** CASTf90 suceeded', 80)
        except:
            msg = 'CASTf90 failed'
            LOGGER.exception(msg)
            raise Exception(msg)

        LOGGER.debug("castf90 took %s seconds.", time.time() - start_time)

        # TODO: Add try - except for pdfs
        if plot == 'Yes':
            analogs_pdf = analogs.plot_analogs(configfile=config_file)
        else:
            analogs_pdf = 'dummy_plot.pdf'
            with open(analogs_pdf, 'a'):
                utime(analogs_pdf, None)

        response.update_status('preparting output', 90)

        # Stopper to keep twitcher results, for debug
        # dummy=dummy
        response.outputs['analog_pdf'].file = analogs_pdf
        response.outputs['config'].file = config_file
        response.outputs['analogs'].file = output_file
        response.outputs['output_netcdf'].file = simulation
        response.outputs['target_netcdf'].file = archive

        ########################
        # generate analog viewer
        ########################

        formated_analogs_file = analogs.reformat_analogs(output_file)
        response.outputs['formated_analogs'].file = formated_analogs_file
        LOGGER.info('analogs reformated')
        # response.update_status('reformatted analog file', 95)
        viewer_html = analogs.render_viewer(
            # configfile=response.outputs['config'].get_url(),
            configfile=config_file,
            # datafile=response.outputs['formated_analogs'].get_url())
            datafile=formated_analogs_file)
        response.outputs['output'].file = viewer_html
        response.update_status('Successfully generated analogs viewer', 95)
        LOGGER.info('rendered pages: %s ', viewer_html)
        response.update_status('execution ended', 100)
        LOGGER.debug("total execution took %s seconds.",
                     time.time() - process_start_time)
        response.outputs['output_log'].file = 'log.txt'
        return response
Пример #27
0
    def parseEvent(self, event, fg, bg):
        """Method to turn google event into a format that we can use more
           easily.
        """
        # Set up a reference to UTC (all events need a timezone so we can
        # sort them)
        utc = pytz.UTC

        # Have we got a "date" or "dateTime" event?
        # Parse the start end end times as appropriate
        if event["start"].get("date", False):
            start = dateutil.parser.parse(event["start"]["date"])
            end = dateutil.parser.parse(event["end"]["date"])
        else:
            start = dateutil.parser.parse(event["start"]["dateTime"])
            end = dateutil.parser.parse(event["end"]["dateTime"])

        # Change the end time to one second earlier (useful to check number
        # of days of event)
        false_end = end - timedelta(0, 1)
        duration = false_end - start

        # Empty list for our events
        ev_list = []

        # Split long events into daily events
        for i in range(duration.days + 1):

            # Create a new start time if the daily event start time isn't the
            # same as the overall start time
            new_date = start + timedelta(i)
            if new_date.date() != start.date():
                st = datetime.combine(new_date.date(),
                                      dt_time(0, 0, tzinfo=start.tzinfo))
            else:
                st = start

            # Create a new end time if the daily event end time isn't the same
            # as the overall end time
            if new_date.date() != false_end.date():
                add_day = new_date.date() + timedelta(1)
                en = datetime.combine(add_day,
                                      dt_time(0, 0, tzinfo=start.tzinfo))
            else:
                en = end

            # If there's no timezone set, then let's set one
            if st.tzinfo is None:
                st = utc.localize(st)

            if en.tzinfo is None:
                en = utc.localize(en)

            # Create a dict of the info we need
            ev = {
                "fg": fg,
                "bg": bg,
                "summary": event.get("summary", ""),
                "location": event.get("location", ""),
                "start": st,
                "end": en,
                "stdate": st.date()
            }

            # Add to our list
            ev_list.append(ev)

        return ev_list
Пример #28
0
    sys.exit(0)

if infoboks.has_param(commonargs['year']) and infoboks.has_param(commonargs['week']):
    year = int(re.sub(ur'<\!--.+?-->', ur'', unicode(infoboks.parameters[commonargs['year']])).strip())
    startweek = int(re.sub(ur'<\!--.+?-->', ur'', unicode(infoboks.parameters[commonargs['week']])).strip())
    if infoboks.has_param(commonargs['week2']):
        endweek = re.sub(ur'<\!--.+?-->', ur'', unicode(infoboks.parameters[commonargs['week2']])).strip()
        if endweek == '':
            endweek = startweek
    else:
        endweek = startweek
    endweek = int(endweek)

    startweek = Week(year, startweek)
    endweek = Week(year, endweek)
    start = wiki_tz.localize(datetime.combine(startweek.monday(), dt_time(0, 0, 0)))
    end = wiki_tz.localize(datetime.combine(endweek.sunday(), dt_time(23, 59, 59)))
elif infoboks.has_param(ibcfg['start']) and infoboks.has_param(ibcfg['end']):
    startdt = infoboks.parameters[ibcfg['start']].value
    enddt = infoboks.parameters[ibcfg['end']].value
    start = wiki_tz.localize(datetime.strptime(startdt + ' 00 00 00', '%Y-%m-%d %H %M %S'))
    end = wiki_tz.localize(datetime.strptime(enddt + ' 23 59 59', '%Y-%m-%d %H %M %S'))
else:
    log('!! fant ikke datoer')
    sys.exit(0)

year = start.isocalendar()[0]
startweek = start.isocalendar()[1]
endweek = end.isocalendar()[1]

figname = config['plot']['figname'] % {'year': year, 'week': startweek}
Пример #29
0
from datetime import datetime, timedelta
from datetime import time as dt_time
from collections import defaultdict
from pathlib import Path, PurePath

from vnpy.trader.object import BarData, TradeData
from vnpy.trader.constant import Interval, Offset
from vnpy.trader.database import database_manager
from vnpy.trader.utility import extract_vt_symbol

from strategy.boll_channel_strategy import BollChannelStrategy
from strategy.turtle_signal_strategy import TurtleSignalStrategy
from backtesting import SegBacktestingEngine
from basic_data import future_basic_data, future_hot_start, dominant_data

DAY_START = dt_time(8)
DAY_END = dt_time(16)

NIGHT_A_START = dt_time(20)
NIGHT_A_END = dt_time(23, 59, 59)

NIGHT_B_START = dt_time(0)
NIGHT_B_END = dt_time(5)

strategy_dict = {'boll': BollChannelStrategy, 'turtle': TurtleSignalStrategy}

zh_to_en = {'多': 'long', '空': 'short', '开': 'open', '平': 'close'}

compare_items = [
    'total_days', 'profit_days', 'max_ddpercent', 'max_drawdown_duration',
    'total_return', 'return_std', 'daily_return', 'sharpe_ratio',
                arrivals = directions_data[direction]

                arrivals.append({'t': int(dt.timestamp()), 'v': vid})

                if line_num % 1000 == 0:
                    print(f"{path}:{line_num}: {time_str}")

    def time_key(arrival):
        return arrival['t']

    for d, routes_data in all_data.items():
        next_day = d + incr

        start_time = int(
            tz.localize(datetime.combine(
                d, dt_time(hour=start_hour))).timestamp())
        end_time = int(
            tz.localize(datetime.combine(
                next_day, dt_time(hour=start_hour))).timestamp())

        for route, stops_data in routes_data.items():

            route_id = route.replace(' ', '_')

            for stop, stop_data in stops_data.items():
                for direction, arrivals in stop_data["arrivals"].items():
                    arrivals.sort(key=time_key)

            history = arrival_history.ArrivalHistory(agency_id,
                                                     route_id,
                                                     start_time=start_time,
class StagingDataAccessor(base.BaseDataAccessor):
    requests_before_logout = -1

    def __init__(self, url_fetcher, staging_hosts=None, client_lang=None, user_agent=None):
        self.client_lang = client_lang
        self._account = None
        self.url_fetcher = url_fetcher
        self.staging_hosts = staging_hosts or {}
        self.user_agent = user_agent
        return

    def login(self, callback, account_id, spa_token):
        self._account = account_id
        result, status_code = ('ok', 200)
        response_code = exceptions.ResponseCodes.NO_ERRORS
        callback(result, status_code, response_code)

    def logout(self, callback):
        self._account = None
        result, status_code = ('ok', 200)
        response_code = exceptions.ResponseCodes.NO_ERRORS
        callback(result, status_code, response_code)
        return

    def get_alive_status(self, callback):
        result, status_code = {'status': 'I am alive!'}, 200
        response_code = exceptions.ResponseCodes.NO_ERRORS
        callback(result, status_code, response_code)

    def _request_data(self, callback, service, url, method='GET', postData=None):
        service_host = self.staging_hosts[service].strip('/')
        url = '/'.join([service_host] + url.strip('/').split('/'))
        if '?' not in url:
            url = url + '/'
        args = [None, 30.0, method]
        if postData:
            args.append(json.dumps(postData))
        self.url_fetcher(url, callback, *args)
        return

    @mapped_fields({'efficiency': 'efficiency',
     'clan_id': 'clan_id',
     'battles_count_avg': 'battles_count_avg',
     'wins_ratio_avg': 'wins_ratio_avg',
     'xp_avg': 'xp_avg',
     'gm_elo_rating_6': 'gm_elo_rating_6',
     'gm_elo_rating_8': 'gm_elo_rating_8',
     'gm_elo_rating_10': 'gm_elo_rating_10',
     'gm_elo_rating_6_rank': 'gm_elo_rating_6_rank',
     'gm_elo_rating_8_rank': 'gm_elo_rating_8_rank',
     'gm_elo_rating_10_rank': 'gm_elo_rating_10_rank',
     'fb_elo_rating_8': 'fb_elo_rating_8',
     'fb_elo_rating_10': 'fb_elo_rating_10',
     'fb_battles_count_10_28d': 'fb_battles_count_10_28d',
     'fs_battles_count_10_28d': 'fs_battles_count_10_28d',
     'gm_battles_count_28d': 'gm_battles_count_28d',
     'fs_battles_count_28d': 'fs_battles_count_28d',
     'fb_battles_count_28d': 'fb_battles_count_28d'})
    def get_clans_ratings(self, callback, clan_ids, fields=None):
        get_params = {'project': 'api',
         'fields': ','.join(fields),
         'ids': ','.join(map(str, clan_ids))}
        url = 'api/wot/clans/bulks/?%s' % urlencode(get_params)

        @preprocess_callback(callback, 'ratings')
        def inner_callback(data):
            return data['data']

        return self._request_data(inner_callback, 'ratings', url)

    @convert_data({'created_at': from_iso})
    @mapped_fields({'name': 'name',
     'tag': 'tag',
     'motto': 'motto',
     'leader_id': 'leader_id',
     'members_count': 'members_count',
     'created_at': 'created_at',
     'clan_id': 'id',
     'treasury': 'treasury',
     'accepts_join_requests': 'accepts_join_requests'})
    def get_clans_info(self, callback, clan_ids, fields=None):
        get_params = {'ids': ','.join(map(str, clan_ids)),
         'fields': ','.join(fields)}
        url = '/clans/?%s' % urlencode(get_params)

        @preprocess_callback(callback, 'clans')
        def inner_callback(data):
            return data['items']

        return self._request_data(inner_callback, 'clans', url)

    @mapped_fields({'id': 'id',
     'name': 'name'})
    def get_accounts_names(self, callback, account_ids, fields=None):
        get_params = {'id': account_ids}
        url = '/spa/accounts/names/?%s' % urlencode(get_params, doseq=True)

        @preprocess_callback(callback, 'spa')
        def inner_callback(data):
            return [ {'id': k,
             'name': v} for k, v in data.iteritems() ]

        return self._request_data(inner_callback, 'spa', url)

    @convert_data({'joined_at': from_iso})
    @mapped_fields({'account_id': 'id',
     'joined_at': 'joined_at',
     'clan_id': 'clan_id',
     'role_bw_flag': 'role.bw_flag',
     'role_name': 'role.name'})
    def get_clan_members(self, callback, clan_id, fields=None):
        get_params = {'fields': ','.join(fields)}
        url = '/clans/%s/members?%s' % (clan_id, urlencode(get_params))
        return self._request_data(preprocess_callback(callback, 'clans'), 'clans', url)

    @convert_data({'favorite_primetime': lambda x: x and datetime.strptime(x, '%H:%M').time()})
    @mapped_fields({'favorite_arena_6': 'favorite_arena_6',
     'favorite_arena_8': 'favorite_arena_8',
     'favorite_arena_10': 'favorite_arena_10',
     'clan_id': 'clan_id',
     'favorite_primetime': 'favorite_primetime'})
    def get_clan_favorite_attributes(self, callback, clan_id, fields=None):
        url = '/gm/clans/%s/favorite_attributes' % clan_id

        @preprocess_callback(callback, 'clans')
        def inner_callback(backend_data):
            result = {}
            for field in ['clan_id', 'favorite_primetime']:
                if field in backend_data:
                    result[field] = backend_data[field]

            for data in backend_data.get('favorite_arenas', []):
                if data.get('frontlevel') in (6, 8, 10) and 'arena' in data:
                    result['favorite_arena_{}'.format(data['frontlevel'])] = data['arena']

            return result

        return self._request_data(inner_callback, 'clans', url)

    @convert_data({'joined_at': from_iso,
     'in_clan_cooldown_till': from_iso})
    @mapped_fields({'account_id': 'id',
     'joined_at': 'joined_at',
     'clan_id': 'clan_id',
     'role_bw_flag': 'role.bw_flag',
     'role_name': 'role.name',
     'in_clan_cooldown_till': 'in_clan_cooldown_till'})
    def get_accounts_clans(self, callback, account_ids, fields=None):
        get_params = {'fields': ','.join(fields),
         'ids': ','.join(map(str, account_ids))}
        url = '/accounts/?%s' % urlencode(get_params)

        @preprocess_callback(callback, 'clans')
        def inner_callback(data):
            return data['items']

        return self._request_data(inner_callback, 'clans', url)

    @mapped_fields({'total': 'total'}, accept_fields_argument=False)
    def get_account_applications_count_since(self, callback, account_id, since=None):
        get_params = {'fields': 'id',
         'account_id': account_id,
         'created_after': since.isoformat()}
        url = '/applications/?%s' % urlencode(get_params)
        return self._request_data(preprocess_callback(callback, 'clans'), 'clans', url)

    @mapped_fields({'total': 'total'}, accept_fields_argument=False)
    def get_clan_invites_count_since(self, callback, clan_id, since=None):
        get_params = {'fields': 'id',
         'clan_id': clan_id,
         'created_after': since.isoformat()}
        url = '/invites/?%s' % urlencode(get_params)
        return self._request_data(preprocess_callback(callback, 'clans'), 'clans', url)

    @convert_data({'created_at': from_iso,
     'updated_at': from_iso}, paginated=True)
    @mapped_fields({'status': 'status',
     'created_at': 'created_at',
     'updated_at': 'updated_at',
     'sender_id': 'sender_id',
     'id': 'id',
     'account_id': 'account_id',
     'clan_id': 'clan_id',
     'comment': 'data.comment',
     'status_changer_id': 'data.status_changer_id'}, paginated=True)
    def get_account_applications(self, callback, fields=None, statuses=None, get_total_count=False, limit=18, offset=0):
        statuses = statuses or ['active',
         'declined',
         'accepted',
         'expired',
         'error',
         'deleted']
        get_params = {'fields': ','.join(fields),
         'account_id': self._account,
         'statuses': ','.join(statuses),
         'limit': limit,
         'offset': offset}
        url = '/applications/?%s' % urlencode(get_params)
        return self._request_data(preprocess_callback(callback, 'clans'), 'clans', url)

    @convert_data({'created_at': from_iso,
     'updated_at': from_iso}, paginated=True)
    @mapped_fields({'status': 'status',
     'created_at': 'created_at',
     'updated_at': 'updated_at',
     'sender_id': 'sender_id',
     'id': 'id',
     'account_id': 'account_id',
     'clan_id': 'clan_id',
     'comment': 'data.comment',
     'status_changer_id': 'data.status_changer_id'}, paginated=True)
    def get_clan_applications(self, callback, clan_id, fields=None, statuses=None, get_total_count=False, limit=18, offset=0):
        statuses = statuses or ['active',
         'declined',
         'accepted',
         'expired',
         'error',
         'deleted']
        get_params = {'fields': ','.join(fields),
         'clan_id': clan_id,
         'statuses': ','.join(statuses),
         'limit': limit,
         'offset': offset}
        url = '/applications/?%s' % urlencode(get_params)
        return self._request_data(preprocess_callback(callback, 'clans'), 'clans', url)

    @mapped_fields({'clan_id': 'clan_id',
     'id': 'id',
     'account_id': 'account_id'})
    def create_applications(self, callback, clan_ids, comment, fields=None):
        url = '/applications/'
        data = {'account_id': self._account,
         'clan_ids': clan_ids,
         'comment': comment}

        @preprocess_callback(callback, 'clans')
        def inner_callback(data):
            return data.values()

        return self._request_data(inner_callback, 'clans', url, method='POST', postData=data)

    @mapped_fields({'transaction_id': 'transaction_id',
     'id': 'id',
     'account_id': 'account_id',
     'clan_id': 'clan_id'})
    def accept_application(self, callback, application_id, fields=None):
        url = '/applications/%s/' % application_id
        data = {'initiator_id': self._account,
         'status': 'accepted'}

        @preprocess_callback(callback, 'clans')
        def inner_callback(data):
            data = data or {}
            data['account_id'] = data.pop('account_ids')[0]
            data['id'] = application_id
            return data

        return self._request_data(inner_callback, 'clans', url, method='PATCH', postData=data)

    @mapped_fields({'transaction_id': 'transaction_id',
     'id': 'id',
     'account_id': 'account_id',
     'clan_id': 'clan_id'})
    def decline_application(self, callback, application_id, fields=None):
        url = '/applications/%s/' % application_id
        data = {'initiator_id': self._account,
         'status': 'declined'}

        @preprocess_callback(callback, 'clans')
        def inner_callback(data):
            data = data or {}
            data['id'] = application_id
            return data

        return self._request_data(inner_callback, 'clans', url, method='PATCH', postData=data)

    @mapped_fields({'clan_id': 'clan_id',
     'id': 'id',
     'account_id': 'account_id'})
    def create_invites(self, callback, clan_id, account_ids, comment, fields=None):
        url = '/invites/'
        data = {'initiator_id': self._account,
         'clan_id': clan_id,
         'account_ids': account_ids,
         'comment': comment}

        @preprocess_callback(callback, 'clans')
        def inner_callback(data):
            return data.values()

        return self._request_data(inner_callback, 'clans', url, method='POST', postData=data)

    @mapped_fields({'transaction_id': 'transaction_id',
     'id': 'id',
     'account_id': 'account_id',
     'clan_id': 'clan_id'})
    def accept_invite(self, callback, invite_id, fields=None):
        url = '/invites/%s/' % invite_id
        data = {'initiator_id': self._account,
         'status': 'accepted'}

        @preprocess_callback(callback, 'clans')
        def inner_callback(data):
            data = data or {}
            data['account_id'] = data.pop('account_ids')[0]
            data['id'] = invite_id
            return data

        return self._request_data(inner_callback, 'clans', url, method='PATCH', postData=data)

    @mapped_fields({'transaction_id': 'transaction_id',
     'id': 'id',
     'account_id': 'account_id',
     'clan_id': 'clan_id'})
    def decline_invite(self, callback, invite_id, fields=None):
        url = '/invites/%s/' % invite_id
        data = {'initiator_id': self._account,
         'status': 'declined'}

        @preprocess_callback(callback, 'clans')
        def inner_callback(data):
            data = data or {}
            data['id'] = invite_id
            return data

        return self._request_data(inner_callback, 'clans', url, method='PATCH', postData=data)

    @mapped_fields({'id': 'id',
     'clan_id': 'clan_id',
     'account_id': 'account_id'})
    def bulk_decline_invites(self, callback, invite_ids, fields=None):
        url = '/invites/'
        data = {'initiator_id': self._account,
         'status': 'declined',
         'ids': invite_ids}

        @preprocess_callback(callback, 'clans')
        def inner_callback(data):
            data = data and data['items'] or {}
            return data

        return self._request_data(inner_callback, 'clans', url, method='PATCH', postData=data)

    @convert_data({'created_at': from_iso}, paginated=True)
    @mapped_fields({'name': 'name',
     'tag': 'tag',
     'motto': 'motto',
     'leader_id': 'leader_id',
     'members_count': 'members_count',
     'created_at': 'created_at',
     'clan_id': 'id',
     'treasury': 'treasury',
     'accepts_join_requests': 'accepts_join_requests'}, paginated=True)
    def search_clans(self, callback, search, get_total_count=False, fields=None, offset=0, limit=18):
        get_params = {'search': search,
         'game': 'wot',
         'fields': ','.join(fields),
         'limit': limit,
         'offset': offset}
        url = '/clans/search/?%s' % urlencode(get_params)
        return self._request_data(preprocess_callback(callback, 'clans'), 'clans', url)

    @convert_data({'created_at': from_iso}, paginated=True)
    @mapped_fields({'name': 'name',
     'tag': 'tag',
     'motto': 'motto',
     'leader_id': 'leader_id',
     'members_count': 'members_count',
     'created_at': 'created_at',
     'clan_id': 'id',
     'treasury': 'treasury',
     'accepts_join_requests': 'accepts_join_requests'}, paginated=True)
    def get_recommended_clans(self, callback, get_total_count=False, fields=None, offset=0, limit=18):
        get_params = {'game': 'wot',
         'fields': ','.join(fields),
         'limit': limit,
         'offset': offset}
        url = '/clans/?%s' % urlencode(get_params)
        return self._request_data(preprocess_callback(callback, 'clans'), 'clans', url)

    @convert_data({'created_at': from_iso,
     'updated_at': from_iso}, paginated=True)
    @mapped_fields({'status': 'status',
     'created_at': 'created_at',
     'updated_at': 'updated_at',
     'sender_id': 'sender_id',
     'id': 'id',
     'account_id': 'account_id',
     'clan_id': 'clan_id',
     'comment': 'data.comment',
     'status_changer_id': 'data.status_changer_id'}, paginated=True)
    def get_clan_invites(self, callback, clan_id, fields=None, statuses=None, get_total_count=False, limit=18, offset=0):
        statuses = statuses or ['active',
         'declined',
         'accepted',
         'expired',
         'error',
         'deleted']
        get_params = {'fields': ','.join(fields),
         'clan_id': clan_id,
         'statuses': ','.join(statuses),
         'limit': limit,
         'offset': offset}
        url = '/invites/?%s' % urlencode(get_params)
        return self._request_data(preprocess_callback(callback, 'clans'), 'clans', url)

    @convert_data({'created_at': from_iso,
     'updated_at': from_iso}, paginated=True)
    @mapped_fields({'status': 'status',
     'created_at': 'created_at',
     'updated_at': 'updated_at',
     'sender_id': 'sender_id',
     'id': 'id',
     'account_id': 'account_id',
     'clan_id': 'clan_id',
     'comment': 'data.comment',
     'status_changer_id': 'data.status_changer_id'}, paginated=True)
    def get_account_invites(self, callback, fields=None, statuses=None, get_total_count=False, limit=18, offset=0):
        statuses = statuses or ['active',
         'declined',
         'accepted',
         'expired',
         'error',
         'deleted']
        get_params = {'fields': ','.join(fields),
         'account_id': self._account,
         'statuses': ','.join(statuses),
         'limit': limit,
         'offset': offset}
        url = '/invites/?%s' % urlencode(get_params)
        return self._request_data(preprocess_callback(callback, 'clans'), 'clans', url)

    @mapped_fields({'global_rating': 'summary.global_rating',
     'battle_avg_xp': 'summary.battle_avg_xp',
     'battles_count': 'summary.battles_count',
     'battle_avg_performance': 'summary.battle_avg_performance',
     'xp_amount': 'summary.xp_amount',
     'account_id': 'account_id'})
    def get_accounts_info(self, callback, account_ids, fields=None):
        fields = [ i.split('.', 1) for i in fields if i != 'account_id' ]
        grouped = groupby(sorted(fields), key=lambda x: x[0])
        sections = [ '%s[%s]' % (k, ','.join([ j[1] for j in v ])) for k, v in grouped ]
        get_params = {'account_ids': ','.join(map(str, account_ids)),
         'sections': ','.join(sections)}
        url = '/wot/accounts/?%s' % urlencode(get_params)

        @preprocess_callback(callback, 'exporter')
        def inner_callback(data):
            new_data = []
            for account_id, values in data.items():
                values['account_id'] = account_id
                new_data.append(values)

            return new_data

        return self._request_data(inner_callback, 'exporter', url)

    @convert_data({'pillage_end_datetime': from_iso,
     'prime_time': lambda x: x and datetime.strptime(x, '%H:%M').time()})
    @mapped_fields({'front_name': 'frontname',
     'province_id': 'province_id',
     'front_name_localized': 'frontname_localized',
     'province_id_localized': 'province_id_localized',
     'revenue': 'daily_revenue',
     'hq_connected': 'hq_connected',
     'prime_time': 'primetime',
     'game_map': 'game_map',
     'periphery': 'periphery_id',
     'turns_owned': 'turns_owned',
     'pillage_cooldown': 'pillage_cooldown',
     'pillage_end_datetime': 'pillage_end_datetime',
     'arena_id': 'arena_id'})
    def get_clan_provinces(self, callback, clan_id, fields=None):
        get_params = {'clans': ','.join(map(str, [clan_id]))}
        url = '/clans/provinces/?%s' % urlencode(get_params)

        @preprocess_callback(callback, 'global_map')
        def inner_callback(data):
            res = data['clans'] and data['clans'][0]['provinces']
            for i in res:
                i['frontname_localized'] = i['frontname']
                i['province_id_localized'] = i['province_id']

            return res

        return self._request_data(inner_callback, 'global_map', url)

    @mapped_fields({'battles_lost': 'battles_lost',
     'battles_played': 'battles_played',
     'battles_played_on_10_level': 'battles_played_on_10_level',
     'battles_played_on_6_level': 'battles_played_on_6_level',
     'battles_played_on_8_level': 'battles_played_on_8_level',
     'battles_won': 'battles_won',
     'battles_won_on_10_level': 'battles_won_on_10_level',
     'battles_won_on_6_level': 'battles_won_on_6_level',
     'battles_won_on_8_level': 'battles_won_on_8_level',
     'influence_points': 'influence_points',
     'provinces_captured': 'provinces_captured',
     'provinces_count': 'provinces_count'})
    def get_clan_globalmap_stats(self, callback, clan_id, fields=None):
        get_params = {'clans': ','.join(map(str, [clan_id]))}
        url = '/clans/stats?%s' % urlencode(get_params)

        @preprocess_callback(callback, 'global_map')
        def inner_callback(data):
            return data['clans'][0]['stats']

        return self._request_data(inner_callback, 'global_map', url)

    @mapped_fields({'front_name': 'id',
     'front_name_localized': 'id_localized',
     'min_vehicle_level': 'min_vehicle_level',
     'max_vehicle_level': 'max_vehicle_level'})
    def get_fronts_info(self, callback, front_names=None, fields=None):
        url = '/fronts/'

        @preprocess_callback(callback, 'global_map')
        def inner_callback(data):
            res = data['fronts']
            for i in res:
                i['id_localized'] = i['id']

            return res

        return self._request_data(inner_callback, 'global_map', url)

    @convert_data({'defence_hour': lambda x: dt_time(x, 0) if x >= 0 else None})
    @mapped_fields({'buildings.direction': 'buildings.direction',
     'buildings.type': 'buildings.type',
     'buildings.level': 'buildings.level',
     'buildings.position': 'buildings.position',
     'defence_attack_efficiency': 'defence_attack_efficiency',
     'defence_battles_count': 'defence_battles_count',
     'defence_capture_enemy_building_total_count': 'defence_capture_enemy_building_total_count',
     'defence_combat_wins': 'defence_combat_wins',
     'defence_defence_efficiency': 'defence_defence_efficiency',
     'defence_enemy_base_capture_count': 'defence_enemy_base_capture_count',
     'defence_loss_own_building_total_count': 'defence_loss_own_building_total_count',
     'defence_resource_capture_count': 'defence_resource_capture_count',
     'defence_resource_loss_count': 'defence_resource_loss_count',
     'sortie_absolute_battles_count': 'sortie_absolute_battles_count',
     'sortie_battles_count': 'sortie_battles_count',
     'sortie_champion_battles_count': 'sortie_champion_battles_count',
     'sortie_middle_battles_count': 'sortie_middle_battles_count',
     'defence_attack_count': 'defence_attack_count',
     'defence_defence_count': 'defence_defence_count',
     'defence_success_attack_count': 'defence_success_attack_count',
     'defence_success_defence_count': 'defence_success_defence_count',
     'sortie_fort_resource_in_absolute': 'sortie_fort_resource_in_absolute',
     'sortie_fort_resource_in_champion': 'sortie_fort_resource_in_champion',
     'sortie_fort_resource_in_middle': 'sortie_fort_resource_in_middle',
     'sortie_losses': 'sortie_losses',
     'sortie_wins': 'sortie_wins',
     'level': 'level',
     'defence_hour': 'defence_hour',
     'defence_mode_is_activated': 'defence_mode_is_activated',
     'fb_battles_count_10': 'fb_battles_count_10',
     'fb_battles_count_8': 'fb_battles_count_8',
     'total_resource_amount': 'total_resource_amount'})
    def get_stronghold_info(self, callback, clan_id=None, fields=None):
        get_params = urlencode({'performer_id': self._account})
        try:
            clan_id = int(clan_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = 'api/strongholds/%s/' % clan_id
        if self._account:
            url = '?'.join([url, get_params])

        @preprocess_callback(callback, 'strongholds')
        def inner_callback(data):
            return data['stronghold']

        return self._request_data(inner_callback, 'strongholds', url)

    @convert_data({'vacation_finish': timestamp_to_datetime,
     'vacation_start': timestamp_to_datetime})
    @mapped_fields({'buildings.type': 'buildings.type',
     'buildings.hp': 'buildings.hp',
     'buildings.direction': 'buildings.direction',
     'buildings.position': 'buildings.position',
     'buildings.storage': 'buildings.resource_amount',
     'buildings.level': 'buildings.level',
     'buildings_count': 'buildings_count',
     'clan_id': 'clan_id',
     'level': 'level',
     'clan_name': 'clan_name',
     'clan_tag': 'clan_tag',
     'directions': 'directions',
     'directions_count': 'directions_count',
     'off_day': 'off_day',
     'periphery_id': 'periphery_id',
     'vacation_finish': 'vacation_finish',
     'vacation_start': 'vacation_start',
     'sortie_wins_period': 'sortie_wins_period',
     'sortie_battles_wins_percentage_period': 'sortie_battles_wins_percentage_period',
     'sortie_battles_count_period': 'sortie_battles_count_period',
     'defence_battles_count_period': 'defence_battles_count_period'})
    def get_strongholds_statistics(self, callback, clan_id, fields=None):
        get_params = urlencode({'performer_id': self._account})
        try:
            clan_id = int(clan_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/api/strongholds/statistics/%s/' % clan_id
        if self._account:
            url = '?'.join([url, get_params])

        @preprocess_callback(callback, 'strongholds')
        def inner_callback(data):
            return data[0]

        return self._request_data(inner_callback, 'strongholds', url)

    def get_wgsh_unit_info(self, callback, periphery_id, unit_server_id, fields=None):
        try:
            periphery_id = int(periphery_id)
            unit_server_id = int(unit_server_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/unit_api/periphery/{periphery_id}/units/{unit_server_id}/'.format(periphery_id=periphery_id, unit_server_id=unit_server_id)

        @preprocess_callback(callback, 'wgsh')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'wgsh', url)

    def set_vehicle(self, callback, periphery_id, unit_server_id, vehicle_cd, fields=None):
        try:
            periphery_id = int(periphery_id)
            unit_server_id = int(unit_server_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/unit_api/periphery/{periphery_id}/units/{unit_server_id}/members/{account_id}/vehicles'.format(periphery_id=periphery_id, unit_server_id=unit_server_id, account_id=self._account)
        patch_data = {'vehicle_cd': vehicle_cd}

        @preprocess_callback(callback, 'wgsh')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'wgsh', url, method='PATCH', postData=patch_data)

    def set_readiness(self, callback, periphery_id, unit_server_id, is_ready, reset_vehicle, fields=None):
        try:
            periphery_id = int(periphery_id)
            unit_server_id = int(unit_server_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/unit_api/periphery/{periphery_id}/units/{unit_server_id}/members/{account_id}/readiness'.format(periphery_id=periphery_id, unit_server_id=unit_server_id, account_id=self._account)
        patch_data = {'is_ready': is_ready,
         'reset_vehicle': reset_vehicle}

        @preprocess_callback(callback, 'wgsh')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'wgsh', url, method='PATCH', postData=patch_data)

    def invite_players(self, callback, periphery_id, unit_server_id, accounts_to_invite, comment, fields=None):
        try:
            periphery_id = int(periphery_id)
            unit_server_id = int(unit_server_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/unit_api/periphery/{periphery_id}/units/{unit_server_id}/participants/{account_id}/invite'.format(periphery_id=periphery_id, unit_server_id=unit_server_id, account_id=self._account)
        post_data = {'accounts_to_invite': accounts_to_invite,
         'comment': comment}

        @preprocess_callback(callback, 'wgsh')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'wgsh', url, method='POST', postData=post_data)

    def assign_player(self, callback, periphery_id, unit_server_id, account_to_assign, fields=None):
        try:
            periphery_id = int(periphery_id)
            unit_server_id = int(unit_server_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/unit_api/periphery/{periphery_id}/units/{unit_server_id}/participants/{account_id}/assign'.format(periphery_id=periphery_id, unit_server_id=unit_server_id, account_id=self._account)
        post_data = {'account_to_assign': account_to_assign}

        @preprocess_callback(callback, 'wgsh')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'wgsh', url, method='POST', postData=post_data)

    def unassign_player(self, callback, periphery_id, unit_server_id, account_to_unassign, fields=None):
        try:
            periphery_id = int(periphery_id)
            unit_server_id = int(unit_server_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/unit_api/periphery/{periphery_id}/units/{unit_server_id}/participants/{account_id}/unassign'.format(periphery_id=periphery_id, unit_server_id=unit_server_id, account_id=self._account)
        post_data = {'account_to_unassign': account_to_unassign}

        @preprocess_callback(callback, 'wgsh')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'wgsh', url, method='POST', postData=post_data)

    def give_leadership(self, callback, periphery_id, unit_server_id, target_account_id, fields=None):
        try:
            periphery_id = int(periphery_id)
            unit_server_id = int(unit_server_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/unit_api/periphery/{periphery_id}/units/{unit_server_id}/members/{account_id}/give_leadership'.format(periphery_id=periphery_id, unit_server_id=unit_server_id, account_id=self._account)
        post_data = {'target_account_id': target_account_id}

        @preprocess_callback(callback, 'wgsh')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'wgsh', url, method='PATCH', postData=post_data)

    def set_equipment_commander(self, callback, periphery_id, unit_server_id, target_account_id, fields=None):
        try:
            periphery_id = int(periphery_id)
            unit_server_id = int(unit_server_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/unit_api/periphery/{periphery_id}/units/{unit_server_id}/members/{account_id}/equipment_commander'.format(periphery_id=periphery_id, unit_server_id=unit_server_id, account_id=self._account)
        post_data = {'equipment_commander_id': target_account_id}

        @preprocess_callback(callback, 'wgsh')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'wgsh', url, method='PATCH', postData=post_data)

    def leave_room(self, callback, periphery_id, unit_server_id, fields=None):
        try:
            periphery_id = int(periphery_id)
            unit_server_id = int(unit_server_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/unit_api/periphery/{periphery_id}/units/{unit_server_id}/participants/{account_id}/leave'.format(periphery_id=periphery_id, unit_server_id=unit_server_id, account_id=self._account)

        @preprocess_callback(callback, 'wgsh')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'wgsh', url, method='POST')

    def take_away_leadership(self, callback, periphery_id, unit_server_id, fields=None):
        try:
            periphery_id = int(periphery_id)
            unit_server_id = int(unit_server_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/unit_api/periphery/{periphery}/units/{unit}/participants/{account}/take_away_leadership'.format(periphery=periphery_id, unit=unit_server_id, account=self._account)

        @preprocess_callback(callback, 'wgsh')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'wgsh', url, method='PATCH')

    def kick_player(self, callback, periphery_id, unit_server_id, account_to_kick, fields=None):
        try:
            periphery_id = int(periphery_id)
            unit_server_id = int(unit_server_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/unit_api/periphery/{periphery_id}/units/{unit_server_id}/members/{account_id}/kick'.format(periphery_id=periphery_id, unit_server_id=unit_server_id, account_id=self._account)
        post_data = {'account_to_kick': account_to_kick}

        @preprocess_callback(callback, 'wgsh')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'wgsh', url, method='POST', postData=post_data)

    def set_open(self, callback, periphery_id, unit_server_id, is_open, fields=None):
        try:
            periphery_id = int(periphery_id)
            unit_server_id = int(unit_server_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/unit_api/periphery/{periphery_id}/units/{unit_server_id}/participants/{account_id}/set_open'.format(periphery_id=periphery_id, unit_server_id=unit_server_id, account_id=self._account)
        post_data = {'is_open': is_open}

        @preprocess_callback(callback, 'wgsh')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'wgsh', url, method='PATCH', postData=post_data)

    def lock_reserve(self, callback, periphery_id, unit_server_id, reserve_id, fields=None):
        try:
            periphery_id = int(periphery_id)
            unit_server_id = int(unit_server_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/unit_api/periphery/{periphery_id}/units/{unit_server_id}/members/{account_id}/lock_reserve'.format(periphery_id=periphery_id, unit_server_id=unit_server_id, account_id=self._account)
        post_data = {'reserve_id': reserve_id}

        @preprocess_callback(callback, 'wgsh')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'wgsh', url, method='POST', postData=post_data)

    def unlock_reserve(self, callback, periphery_id, unit_server_id, reserve_id, fields=None):
        try:
            periphery_id = int(periphery_id)
            unit_server_id = int(unit_server_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/unit_api/periphery/{periphery_id}/units/{unit_server_id}/members/{account_id}/unlock_reserve'.format(periphery_id=periphery_id, unit_server_id=unit_server_id, account_id=self._account)
        post_data = {'reserve_id': reserve_id}

        @preprocess_callback(callback, 'wgsh')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'wgsh', url, method='POST', postData=post_data)

    def clan_statistics(self, callback, clan_id, fields=None):
        try:
            clan_id = int(clan_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/external_api/v1/clan_card/{clan_id}'.format(clan_id=clan_id)

        @preprocess_callback(callback, 'wgsh')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'wgsh', url, method='GET')

    def account_statistics(self, callback, account_id, fields=None):
        try:
            account_id = int(account_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/external_api/accounts/{account_id}'.format(account_id=account_id)

        @preprocess_callback(callback, 'wgsh')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'wgsh', url, method='GET')

    def join_room(self, callback, periphery_id, unit_server_id, fields=None):
        try:
            periphery_id = int(periphery_id)
            unit_server_id = int(unit_server_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        url = '/unit_api/periphery/{periphery_id}/units/{unit_server_id}/members/{account_id}/join'.format(periphery_id=periphery_id, unit_server_id=unit_server_id, account_id=self._account)

        @preprocess_callback(callback, 'wgsh')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'wgsh', url, method='POST')

    def user_season_statistics(self, callback, fields=None):
        url = '/user-season-statistics/{account_id}/'.format(account_id=self._account)

        @preprocess_callback(callback, 'rblb')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'rblb', url, method='GET')

    def user_ranked_position(self, callback, fields=None):
        url = '/user-position/{account_id}/'.format(account_id=self._account)

        @preprocess_callback(callback, 'rblb')
        def inner_callback(data):
            return data or {}

        return self._request_data(inner_callback, 'rblb', url, method='GET')

    @convert_data({'defence_hour': lambda x: dt_time(x, 0) if x >= 0 else None})
    @mapped_fields({'clan_id': 'clan_id',
     'defence_hour': 'defence_hour'})
    def get_strongholds_state(self, callback, clan_id, fields=None):
        get_params = {'clan_id': clan_id}
        try:
            clan_id = int(clan_id)
        except (TypeError, ValueError):
            error = exceptions.BadRequest()
            return callback({'description': error.description}, error.status_code, error.response_code)

        if self._account:
            get_params['performer_id'] = self._account
        url = '/api/strongholds/state/?%s' % urlencode(get_params)

        @preprocess_callback(callback, 'strongholds')
        def inner_callback(data):
            return data and data[0] or {}

        return self._request_data(inner_callback, 'strongholds', url)
Пример #32
0
DICT_TYPE = type(dict())
STR_TYPE = type(str())
UNICODE_TYPE = type(unicode())
FLOAT_TYPE = type(float())
INT_TYPE = type(int())
LONG_TYPE = type(long())
BOOL_TYPE = type(bool())

LIST_TYPES = (LIST_TYPE, TUPLE_TYPE)
STR_TYPES = (STR_TYPE, UNICODE_TYPE)
NOMINAL_TYPES = (INT_TYPE, FLOAT_TYPE, LONG_TYPE)

NONE_VALUES = (None, '---', '', 'None')

# 00:00:00; used to combine with date() to form datetime() at midnight
NULL_TIME = dt_time()

# Convenience constants; these are exipration times in seconds
# console escapes for colored output
WHITE = '\033[97m'
BLUE = '\033[96m'
YELLOW = '\033[93m'
GREEN = '\033[92m'
RED = '\033[91m'
ENDC = '\033[0m'

# date only
RE_DATE = re.compile('(\d\d\d\d)-(\d\d)-(\d\d)')
# date only range syntax
RE_DATE_RANGE = re.compile('(\d\d\d\d-\d\d-\d\d):?(\d\d\d\d-\d\d-\d\d)?')
# datetime only
Пример #33
0
class ServiceEventSchedulingNotice(models.Model):

    ALL = 0
    DUE = 10
    UPCOMING_AND_DUE = 20
    UPCOMING = 30

    NOTIFICATION_TYPES = (
        (ALL, _l("Notify About All Service Event Schedule Due Dates")),
        (DUE,
         _l("Notify About Scheduled Service Events Currently Due & Overdue")),
        (UPCOMING_AND_DUE,
         _l("Notify About Scheduled Service Events Currently Due & Overdue, and Upcoming Due Dates"
            )),
        (UPCOMING,
         _l("Notify About Scheduled Service Events Upcoming Due Dates Only")),
    )

    TIME_CHOICES = [(dt_time(x // 60, x % 60), "%02d:%02d" % (x // 60, x % 60))
                    for x in range(0, 24 * 60, 15)]

    notification_type = models.IntegerField(
        verbose_name=_l("Notification Type"),
        choices=NOTIFICATION_TYPES,
    )

    send_empty = models.BooleanField(
        verbose_name=_l("Send Empty Notices"),
        help_text=_l(
            "Check to send notices even if there's no QC to currently notify about"
        ),
        default=False,
    )

    recurrences = RecurrenceField(
        verbose_name=_l("Recurrences"),
        help_text=_l(
            "Define the schedule this notification should be sent on."),
        default="",
    )

    time = models.TimeField(
        verbose_name=_l("Time of day"),
        help_text=_l(
            "Set the time of day this notice should be sent (00:00-23:59)."),
        choices=TIME_CHOICES,
    )

    future_days = models.PositiveIntegerField(
        verbose_name=_l("Future Days"),
        blank=True,
        null=True,
        help_text=_l(
            "How many days in the future should notices about upcoming QC due dates include. "
            "A value of zero will only include test lists due today."),
    )

    recipients = models.ForeignKey(
        RecipientGroup,
        verbose_name=_l("Recipients"),
        help_text=_l(
            "Choose the group of recipients who should receive these notifications"
        ),
        on_delete=models.PROTECT,
    )

    units = models.ForeignKey(
        UnitGroup,
        verbose_name=_l("Unit Group filter"),
        help_text=_l(
            "Select which group of Units this notification should be limited to. Leave blank to include all units"
        ),
        null=True,
        blank=True,
        on_delete=models.PROTECT,
    )

    last_sent = models.DateTimeField(null=True, editable=False)

    class Meta:
        verbose_name = _l("Service Event Scheduling Notice")

    @property
    def is_all(self):
        return self.notification_type == self.ALL

    @property
    def is_due(self):
        return self.notification_type == self.DUE

    @property
    def is_upcoming_and_due(self):
        return self.notification_type == self.UPCOMING_AND_DUE

    @property
    def is_upcoming(self):
        return self.notification_type == self.UPCOMING

    def schedules(self):
        """Return ServiceEventSchedule relevant to this notice"""

        schedules = ServiceEventSchedule.objects.filter(active=True)

        if self.units_id:
            schedules = schedules.filter(
                unit_service_area__unit__in=self.units.units.all())

        return schedules.order_by(
            "unit_service_area__unit__%s" % settings.ORDER_UNITS_BY,
            "unit_service_area__service_area__name",
            "due_date",
        )

    def all(self):
        return self.schedules().exclude(due_date=None)

    def upcoming(self, include_overdue=False):
        """Return Schedules that will be coming due in the future. Optionally
        include test lists that are currently due and overdue"""

        start, end = today_start_end()
        end = end + timezone.timedelta(days=self.future_days)

        schedules = self.schedules().filter(due_date__lte=end)
        if not include_overdue:
            schedules = schedules.filter(due_date__gte=start)

        return schedules

    def due_and_overdue(self):
        """Return Schedules that are currently due or overdue"""
        start, end = today_start_end()
        return self.schedules().filter(due_date__lte=end)

    def upcoming_and_due(self):
        """Return Schedules that are either coming due soon or currently due or overdue"""
        return self.upcoming(include_overdue=True)

    def schedules_to_notify(self):
        dispatch = {
            self.ALL: self.all,
            self.DUE: self.due_and_overdue,
            self.UPCOMING_AND_DUE: self.upcoming_and_due,
            self.UPCOMING: self.upcoming,
        }
        return dispatch[self.notification_type]()

    def send_required(self):
        return self.send_empty or self.schedules_to_notify().count() > 0
Пример #34
0
 def testTimeOnlyOffset(self):
     # tzoffset doesn't care
     tz_offset = tz.tzoffset('+3', 3600)
     self.assertEqual(
         dt_time(13, 20, tzinfo=tz_offset).utcoffset(),
         timedelta(seconds=3600))
Пример #35
0
def get_forecast_times(forecast_length,
                       forecast_date=None,
                       forecast_time=None):
    """
    Generate a list of python datetime objects specifying the desired forecast
    times. This list will be created from input specifications if provided.
    Otherwise defaults are to start today at the most recent 6-hourly interval
    (00, 06, 12, 18) and to run out to T+144 hours.

    Args:
        forecast_length (int):
            An integer giving the desired length of the forecast output in
            hours (e.g. 48 for a two day forecast period).

        forecast_date (string (YYYYMMDD)):
            A string of format YYYYMMDD defining the start date for which
            forecasts are required. If unset it defaults to today in UTC.

        forecast_time (int):
            An integer giving the hour on the forecast_date at which to start
            the forecast output; 24hr clock such that 17 = 17Z for example. If
            unset it defaults to the latest 6 hour cycle as a start time.

    Returns:
        forecast_times (list of datetime.datetime objects):
            A list of python datetime.datetime objects that represent the
            times at which diagnostic data should be extracted.

    Raises:
        ValueError : raised if the input date is not in the expected format.

    """
    date_format = re.compile('[0-9]{8}')

    if forecast_date is None:
        start_date = datetime.utcnow().date()
    else:
        if date_format.match(forecast_date) and len(forecast_date) == 8:
            start_date = datetime.strptime(forecast_date, "%Y%m%d").date()
        else:
            raise ValueError('Date {} is in unexpected format; should be '
                             'YYYYMMDD.'.format(forecast_date))

    if forecast_time is None:
        # If no start hour provided, go back to the nearest multiple of 6
        # hours (e.g. utcnow = 11Z --> 06Z).
        forecast_start_time = datetime.combine(
            start_date, dt_time(divmod(datetime.utcnow().hour, 6)[0] * 6))
    else:
        forecast_start_time = datetime.combine(start_date,
                                               dt_time(forecast_time))

    # Generate forecast times. Hourly to T+48, 3 hourly to T+forecast_length.
    forecast_times = [
        forecast_start_time + timedelta(hours=x)
        for x in range(min(forecast_length, 49))
    ]
    forecast_times = (forecast_times + [
        forecast_start_time + timedelta(hours=x)
        for x in range(51, forecast_length + 1, 3)
    ])

    return forecast_times
Пример #36
0
class TestBusinessHour(Base):
    _offset = BusinessHour

    def setup_method(self, method):
        self.d = datetime(2014, 7, 1, 10, 00)

        self.offset1 = BusinessHour()
        self.offset2 = BusinessHour(n=3)

        self.offset3 = BusinessHour(n=-1)
        self.offset4 = BusinessHour(n=-4)

        from datetime import time as dt_time

        self.offset5 = BusinessHour(start=dt_time(11, 0), end=dt_time(14, 30))
        self.offset6 = BusinessHour(start="20:00", end="05:00")
        self.offset7 = BusinessHour(n=-2,
                                    start=dt_time(21, 30),
                                    end=dt_time(6, 30))
        self.offset8 = BusinessHour(start=["09:00", "13:00"],
                                    end=["12:00", "17:00"])
        self.offset9 = BusinessHour(n=3,
                                    start=["09:00", "22:00"],
                                    end=["13:00", "03:00"])
        self.offset10 = BusinessHour(n=-1,
                                     start=["23:00", "13:00"],
                                     end=["02:00", "17:00"])

    @pytest.mark.parametrize(
        "start,end,match",
        [
            (
                dt_time(11, 0, 5),
                "17:00",
                "time data must be specified only with hour and minute",
            ),
            ("AAA", "17:00", "time data must match '%H:%M' format"),
            ("14:00:05", "17:00", "time data must match '%H:%M' format"),
            ([], "17:00", "Must include at least 1 start time"),
            ("09:00", [], "Must include at least 1 end time"),
            (
                ["09:00", "11:00"],
                "17:00",
                "number of starting time and ending time must be the same",
            ),
            (
                ["09:00", "11:00"],
                ["10:00"],
                "number of starting time and ending time must be the same",
            ),
            (
                ["09:00", "11:00"],
                ["12:00", "20:00"],
                r"invalid starting and ending time\(s\): opening hours should not "
                "touch or overlap with one another",
            ),
            (
                ["12:00", "20:00"],
                ["09:00", "11:00"],
                r"invalid starting and ending time\(s\): opening hours should not "
                "touch or overlap with one another",
            ),
        ],
    )
    def test_constructor_errors(self, start, end, match):
        with pytest.raises(ValueError, match=match):
            BusinessHour(start=start, end=end)

    def test_different_normalize_equals(self):
        # GH#21404 changed __eq__ to return False when `normalize` does not match
        offset = self._offset()
        offset2 = self._offset(normalize=True)
        assert offset != offset2

    def test_repr(self):
        assert repr(self.offset1) == "<BusinessHour: BH=09:00-17:00>"
        assert repr(self.offset2) == "<3 * BusinessHours: BH=09:00-17:00>"
        assert repr(self.offset3) == "<-1 * BusinessHour: BH=09:00-17:00>"
        assert repr(self.offset4) == "<-4 * BusinessHours: BH=09:00-17:00>"

        assert repr(self.offset5) == "<BusinessHour: BH=11:00-14:30>"
        assert repr(self.offset6) == "<BusinessHour: BH=20:00-05:00>"
        assert repr(self.offset7) == "<-2 * BusinessHours: BH=21:30-06:30>"
        assert repr(
            self.offset8) == "<BusinessHour: BH=09:00-12:00,13:00-17:00>"
        assert repr(
            self.offset9) == "<3 * BusinessHours: BH=09:00-13:00,22:00-03:00>"
        assert repr(
            self.offset10) == "<-1 * BusinessHour: BH=13:00-17:00,23:00-02:00>"

    def test_with_offset(self):
        expected = Timestamp("2014-07-01 13:00")

        assert self.d + BusinessHour() * 3 == expected
        assert self.d + BusinessHour(n=3) == expected

    @pytest.mark.parametrize(
        "offset_name",
        [
            "offset1", "offset2", "offset3", "offset4", "offset8", "offset9",
            "offset10"
        ],
    )
    def test_eq_attribute(self, offset_name):
        offset = getattr(self, offset_name)
        assert offset == offset

    @pytest.mark.parametrize(
        "offset1,offset2",
        [
            (BusinessHour(start="09:00"), BusinessHour()),
            (
                BusinessHour(start=["23:00", "13:00"], end=["12:00", "17:00"]),
                BusinessHour(start=["13:00", "23:00"], end=["17:00", "12:00"]),
            ),
        ],
    )
    def test_eq(self, offset1, offset2):
        assert offset1 == offset2

    @pytest.mark.parametrize(
        "offset1,offset2",
        [
            (BusinessHour(), BusinessHour(-1)),
            (BusinessHour(start="09:00"), BusinessHour(start="09:01")),
            (
                BusinessHour(start="09:00", end="17:00"),
                BusinessHour(start="17:00", end="09:01"),
            ),
            (
                BusinessHour(start=["13:00", "23:00"], end=["18:00", "07:00"]),
                BusinessHour(start=["13:00", "23:00"], end=["17:00", "12:00"]),
            ),
        ],
    )
    def test_neq(self, offset1, offset2):
        assert offset1 != offset2

    @pytest.mark.parametrize(
        "offset_name",
        [
            "offset1", "offset2", "offset3", "offset4", "offset8", "offset9",
            "offset10"
        ],
    )
    def test_hash(self, offset_name):
        offset = getattr(self, offset_name)
        assert offset == offset

    def test_call(self):
        with tm.assert_produces_warning(FutureWarning):
            # GH#34171 DateOffset.__call__ is deprecated
            assert self.offset1(self.d) == datetime(2014, 7, 1, 11)
            assert self.offset2(self.d) == datetime(2014, 7, 1, 13)
            assert self.offset3(self.d) == datetime(2014, 6, 30, 17)
            assert self.offset4(self.d) == datetime(2014, 6, 30, 14)
            assert self.offset8(self.d) == datetime(2014, 7, 1, 11)
            assert self.offset9(self.d) == datetime(2014, 7, 1, 22)
            assert self.offset10(self.d) == datetime(2014, 7, 1, 1)

    def test_sub(self):
        # we have to override test_sub here because self.offset2 is not
        # defined as self._offset(2)
        off = self.offset2
        msg = "Cannot subtract datetime from offset"
        with pytest.raises(TypeError, match=msg):
            off - self.d
        assert 2 * off - off == off

        assert self.d - self.offset2 == self.d + self._offset(-3)

    def testRollback1(self):
        assert self.offset1.rollback(self.d) == self.d
        assert self.offset2.rollback(self.d) == self.d
        assert self.offset3.rollback(self.d) == self.d
        assert self.offset4.rollback(self.d) == self.d
        assert self.offset5.rollback(self.d) == datetime(2014, 6, 30, 14, 30)
        assert self.offset6.rollback(self.d) == datetime(2014, 7, 1, 5, 0)
        assert self.offset7.rollback(self.d) == datetime(2014, 7, 1, 6, 30)
        assert self.offset8.rollback(self.d) == self.d
        assert self.offset9.rollback(self.d) == self.d
        assert self.offset10.rollback(self.d) == datetime(2014, 7, 1, 2)

        d = datetime(2014, 7, 1, 0)
        assert self.offset1.rollback(d) == datetime(2014, 6, 30, 17)
        assert self.offset2.rollback(d) == datetime(2014, 6, 30, 17)
        assert self.offset3.rollback(d) == datetime(2014, 6, 30, 17)
        assert self.offset4.rollback(d) == datetime(2014, 6, 30, 17)
        assert self.offset5.rollback(d) == datetime(2014, 6, 30, 14, 30)
        assert self.offset6.rollback(d) == d
        assert self.offset7.rollback(d) == d
        assert self.offset8.rollback(d) == datetime(2014, 6, 30, 17)
        assert self.offset9.rollback(d) == d
        assert self.offset10.rollback(d) == d

        assert self._offset(5).rollback(self.d) == self.d

    def testRollback2(self):
        assert self._offset(-3).rollback(datetime(2014, 7, 5, 15,
                                                  0)) == datetime(
                                                      2014, 7, 4, 17, 0)

    def testRollforward1(self):
        assert self.offset1.rollforward(self.d) == self.d
        assert self.offset2.rollforward(self.d) == self.d
        assert self.offset3.rollforward(self.d) == self.d
        assert self.offset4.rollforward(self.d) == self.d
        assert self.offset5.rollforward(self.d) == datetime(2014, 7, 1, 11, 0)
        assert self.offset6.rollforward(self.d) == datetime(2014, 7, 1, 20, 0)
        assert self.offset7.rollforward(self.d) == datetime(2014, 7, 1, 21, 30)
        assert self.offset8.rollforward(self.d) == self.d
        assert self.offset9.rollforward(self.d) == self.d
        assert self.offset10.rollforward(self.d) == datetime(2014, 7, 1, 13)

        d = datetime(2014, 7, 1, 0)
        assert self.offset1.rollforward(d) == datetime(2014, 7, 1, 9)
        assert self.offset2.rollforward(d) == datetime(2014, 7, 1, 9)
        assert self.offset3.rollforward(d) == datetime(2014, 7, 1, 9)
        assert self.offset4.rollforward(d) == datetime(2014, 7, 1, 9)
        assert self.offset5.rollforward(d) == datetime(2014, 7, 1, 11)
        assert self.offset6.rollforward(d) == d
        assert self.offset7.rollforward(d) == d
        assert self.offset8.rollforward(d) == datetime(2014, 7, 1, 9)
        assert self.offset9.rollforward(d) == d
        assert self.offset10.rollforward(d) == d

        assert self._offset(5).rollforward(self.d) == self.d

    def testRollforward2(self):
        assert self._offset(-3).rollforward(datetime(2014, 7, 5, 16,
                                                     0)) == datetime(
                                                         2014, 7, 7, 9)

    def test_roll_date_object(self):
        offset = BusinessHour()

        dt = datetime(2014, 7, 6, 15, 0)

        result = offset.rollback(dt)
        assert result == datetime(2014, 7, 4, 17)

        result = offset.rollforward(dt)
        assert result == datetime(2014, 7, 7, 9)

    normalize_cases = []
    normalize_cases.append((
        BusinessHour(normalize=True),
        {
            datetime(2014, 7, 1, 8): datetime(2014, 7, 1),
            datetime(2014, 7, 1, 17): datetime(2014, 7, 2),
            datetime(2014, 7, 1, 16): datetime(2014, 7, 2),
            datetime(2014, 7, 1, 23): datetime(2014, 7, 2),
            datetime(2014, 7, 1, 0): datetime(2014, 7, 1),
            datetime(2014, 7, 4, 15): datetime(2014, 7, 4),
            datetime(2014, 7, 4, 15, 59): datetime(2014, 7, 4),
            datetime(2014, 7, 4, 16, 30): datetime(2014, 7, 7),
            datetime(2014, 7, 5, 23): datetime(2014, 7, 7),
            datetime(2014, 7, 6, 10): datetime(2014, 7, 7),
        },
    ))

    normalize_cases.append((
        BusinessHour(-1, normalize=True),
        {
            datetime(2014, 7, 1, 8): datetime(2014, 6, 30),
            datetime(2014, 7, 1, 17): datetime(2014, 7, 1),
            datetime(2014, 7, 1, 16): datetime(2014, 7, 1),
            datetime(2014, 7, 1, 10): datetime(2014, 6, 30),
            datetime(2014, 7, 1, 0): datetime(2014, 6, 30),
            datetime(2014, 7, 7, 10): datetime(2014, 7, 4),
            datetime(2014, 7, 7, 10, 1): datetime(2014, 7, 7),
            datetime(2014, 7, 5, 23): datetime(2014, 7, 4),
            datetime(2014, 7, 6, 10): datetime(2014, 7, 4),
        },
    ))

    normalize_cases.append((
        BusinessHour(1, normalize=True, start="17:00", end="04:00"),
        {
            datetime(2014, 7, 1, 8): datetime(2014, 7, 1),
            datetime(2014, 7, 1, 17): datetime(2014, 7, 1),
            datetime(2014, 7, 1, 23): datetime(2014, 7, 2),
            datetime(2014, 7, 2, 2): datetime(2014, 7, 2),
            datetime(2014, 7, 2, 3): datetime(2014, 7, 2),
            datetime(2014, 7, 4, 23): datetime(2014, 7, 5),
            datetime(2014, 7, 5, 2): datetime(2014, 7, 5),
            datetime(2014, 7, 7, 2): datetime(2014, 7, 7),
            datetime(2014, 7, 7, 17): datetime(2014, 7, 7),
        },
    ))

    @pytest.mark.parametrize("case", normalize_cases)
    def test_normalize(self, case):
        offset, cases = case
        for dt, expected in cases.items():
            assert offset.apply(dt) == expected

    on_offset_cases = []
    on_offset_cases.append((
        BusinessHour(),
        {
            datetime(2014, 7, 1, 9): True,
            datetime(2014, 7, 1, 8, 59): False,
            datetime(2014, 7, 1, 8): False,
            datetime(2014, 7, 1, 17): True,
            datetime(2014, 7, 1, 17, 1): False,
            datetime(2014, 7, 1, 18): False,
            datetime(2014, 7, 5, 9): False,
            datetime(2014, 7, 6, 12): False,
        },
    ))

    on_offset_cases.append((
        BusinessHour(start="10:00", end="15:00"),
        {
            datetime(2014, 7, 1, 9): False,
            datetime(2014, 7, 1, 10): True,
            datetime(2014, 7, 1, 15): True,
            datetime(2014, 7, 1, 15, 1): False,
            datetime(2014, 7, 5, 12): False,
            datetime(2014, 7, 6, 12): False,
        },
    ))

    on_offset_cases.append((
        BusinessHour(start="19:00", end="05:00"),
        {
            datetime(2014, 7, 1, 9, 0): False,
            datetime(2014, 7, 1, 10, 0): False,
            datetime(2014, 7, 1, 15): False,
            datetime(2014, 7, 1, 15, 1): False,
            datetime(2014, 7, 5, 12, 0): False,
            datetime(2014, 7, 6, 12, 0): False,
            datetime(2014, 7, 1, 19, 0): True,
            datetime(2014, 7, 2, 0, 0): True,
            datetime(2014, 7, 4, 23): True,
            datetime(2014, 7, 5, 1): True,
            datetime(2014, 7, 5, 5, 0): True,
            datetime(2014, 7, 6, 23, 0): False,
            datetime(2014, 7, 7, 3, 0): False,
        },
    ))

    on_offset_cases.append((
        BusinessHour(start=["09:00", "13:00"], end=["12:00", "17:00"]),
        {
            datetime(2014, 7, 1, 9): True,
            datetime(2014, 7, 1, 8, 59): False,
            datetime(2014, 7, 1, 8): False,
            datetime(2014, 7, 1, 17): True,
            datetime(2014, 7, 1, 17, 1): False,
            datetime(2014, 7, 1, 18): False,
            datetime(2014, 7, 5, 9): False,
            datetime(2014, 7, 6, 12): False,
            datetime(2014, 7, 1, 12, 30): False,
        },
    ))

    on_offset_cases.append((
        BusinessHour(start=["19:00", "23:00"], end=["21:00", "05:00"]),
        {
            datetime(2014, 7, 1, 9, 0): False,
            datetime(2014, 7, 1, 10, 0): False,
            datetime(2014, 7, 1, 15): False,
            datetime(2014, 7, 1, 15, 1): False,
            datetime(2014, 7, 5, 12, 0): False,
            datetime(2014, 7, 6, 12, 0): False,
            datetime(2014, 7, 1, 19, 0): True,
            datetime(2014, 7, 2, 0, 0): True,
            datetime(2014, 7, 4, 23): True,
            datetime(2014, 7, 5, 1): True,
            datetime(2014, 7, 5, 5, 0): True,
            datetime(2014, 7, 6, 23, 0): False,
            datetime(2014, 7, 7, 3, 0): False,
            datetime(2014, 7, 4, 22): False,
        },
    ))

    @pytest.mark.parametrize("case", on_offset_cases)
    def test_is_on_offset(self, case):
        offset, cases = case
        for dt, expected in cases.items():
            assert offset.is_on_offset(dt) == expected

    apply_cases = [
        (
            BusinessHour(),
            {
                datetime(2014, 7, 1, 11):
                datetime(2014, 7, 1, 12),
                datetime(2014, 7, 1, 13):
                datetime(2014, 7, 1, 14),
                datetime(2014, 7, 1, 15):
                datetime(2014, 7, 1, 16),
                datetime(2014, 7, 1, 19):
                datetime(2014, 7, 2, 10),
                datetime(2014, 7, 1, 16):
                datetime(2014, 7, 2, 9),
                datetime(2014, 7, 1, 16, 30, 15):
                datetime(2014, 7, 2, 9, 30, 15),
                datetime(2014, 7, 1, 17):
                datetime(2014, 7, 2, 10),
                datetime(2014, 7, 2, 11):
                datetime(2014, 7, 2, 12),
                # out of business hours
                datetime(2014, 7, 2, 8):
                datetime(2014, 7, 2, 10),
                datetime(2014, 7, 2, 19):
                datetime(2014, 7, 3, 10),
                datetime(2014, 7, 2, 23):
                datetime(2014, 7, 3, 10),
                datetime(2014, 7, 3, 0):
                datetime(2014, 7, 3, 10),
                # saturday
                datetime(2014, 7, 5, 15):
                datetime(2014, 7, 7, 10),
                datetime(2014, 7, 4, 17):
                datetime(2014, 7, 7, 10),
                datetime(2014, 7, 4, 16, 30):
                datetime(2014, 7, 7, 9, 30),
                datetime(2014, 7, 4, 16, 30, 30):
                datetime(2014, 7, 7, 9, 30, 30),
            },
        ),
        (
            BusinessHour(4),
            {
                datetime(2014, 7, 1, 11): datetime(2014, 7, 1, 15),
                datetime(2014, 7, 1, 13): datetime(2014, 7, 2, 9),
                datetime(2014, 7, 1, 15): datetime(2014, 7, 2, 11),
                datetime(2014, 7, 1, 16): datetime(2014, 7, 2, 12),
                datetime(2014, 7, 1, 17): datetime(2014, 7, 2, 13),
                datetime(2014, 7, 2, 11): datetime(2014, 7, 2, 15),
                datetime(2014, 7, 2, 8): datetime(2014, 7, 2, 13),
                datetime(2014, 7, 2, 19): datetime(2014, 7, 3, 13),
                datetime(2014, 7, 2, 23): datetime(2014, 7, 3, 13),
                datetime(2014, 7, 3, 0): datetime(2014, 7, 3, 13),
                datetime(2014, 7, 5, 15): datetime(2014, 7, 7, 13),
                datetime(2014, 7, 4, 17): datetime(2014, 7, 7, 13),
                datetime(2014, 7, 4, 16, 30): datetime(2014, 7, 7, 12, 30),
                datetime(2014, 7, 4, 16, 30, 30):
                datetime(2014, 7, 7, 12, 30, 30),
            },
        ),
        (
            BusinessHour(-1),
            {
                datetime(2014, 7, 1, 11):
                datetime(2014, 7, 1, 10),
                datetime(2014, 7, 1, 13):
                datetime(2014, 7, 1, 12),
                datetime(2014, 7, 1, 15):
                datetime(2014, 7, 1, 14),
                datetime(2014, 7, 1, 16):
                datetime(2014, 7, 1, 15),
                datetime(2014, 7, 1, 10):
                datetime(2014, 6, 30, 17),
                datetime(2014, 7, 1, 16, 30, 15):
                datetime(2014, 7, 1, 15, 30, 15),
                datetime(2014, 7, 1, 9, 30, 15):
                datetime(2014, 6, 30, 16, 30, 15),
                datetime(2014, 7, 1, 17):
                datetime(2014, 7, 1, 16),
                datetime(2014, 7, 1, 5):
                datetime(2014, 6, 30, 16),
                datetime(2014, 7, 2, 11):
                datetime(2014, 7, 2, 10),
                # out of business hours
                datetime(2014, 7, 2, 8):
                datetime(2014, 7, 1, 16),
                datetime(2014, 7, 2, 19):
                datetime(2014, 7, 2, 16),
                datetime(2014, 7, 2, 23):
                datetime(2014, 7, 2, 16),
                datetime(2014, 7, 3, 0):
                datetime(2014, 7, 2, 16),
                # saturday
                datetime(2014, 7, 5, 15):
                datetime(2014, 7, 4, 16),
                datetime(2014, 7, 7, 9):
                datetime(2014, 7, 4, 16),
                datetime(2014, 7, 7, 9, 30):
                datetime(2014, 7, 4, 16, 30),
                datetime(2014, 7, 7, 9, 30, 30):
                datetime(2014, 7, 4, 16, 30, 30),
            },
        ),
        (
            BusinessHour(-4),
            {
                datetime(2014, 7, 1, 11): datetime(2014, 6, 30, 15),
                datetime(2014, 7, 1, 13): datetime(2014, 6, 30, 17),
                datetime(2014, 7, 1, 15): datetime(2014, 7, 1, 11),
                datetime(2014, 7, 1, 16): datetime(2014, 7, 1, 12),
                datetime(2014, 7, 1, 17): datetime(2014, 7, 1, 13),
                datetime(2014, 7, 2, 11): datetime(2014, 7, 1, 15),
                datetime(2014, 7, 2, 8): datetime(2014, 7, 1, 13),
                datetime(2014, 7, 2, 19): datetime(2014, 7, 2, 13),
                datetime(2014, 7, 2, 23): datetime(2014, 7, 2, 13),
                datetime(2014, 7, 3, 0): datetime(2014, 7, 2, 13),
                datetime(2014, 7, 5, 15): datetime(2014, 7, 4, 13),
                datetime(2014, 7, 4, 18): datetime(2014, 7, 4, 13),
                datetime(2014, 7, 7, 9, 30): datetime(2014, 7, 4, 13, 30),
                datetime(2014, 7, 7, 9, 30, 30):
                datetime(2014, 7, 4, 13, 30, 30),
            },
        ),
        (
            BusinessHour(start="13:00", end="16:00"),
            {
                datetime(2014, 7, 1, 11): datetime(2014, 7, 1, 14),
                datetime(2014, 7, 1, 13): datetime(2014, 7, 1, 14),
                datetime(2014, 7, 1, 15): datetime(2014, 7, 2, 13),
                datetime(2014, 7, 1, 19): datetime(2014, 7, 2, 14),
                datetime(2014, 7, 1, 16): datetime(2014, 7, 2, 14),
                datetime(2014, 7, 1, 15, 30, 15):
                datetime(2014, 7, 2, 13, 30, 15),
                datetime(2014, 7, 5, 15): datetime(2014, 7, 7, 14),
                datetime(2014, 7, 4, 17): datetime(2014, 7, 7, 14),
            },
        ),
        (
            BusinessHour(n=2, start="13:00", end="16:00"),
            {
                datetime(2014, 7, 1, 17): datetime(2014, 7, 2, 15),
                datetime(2014, 7, 2, 14): datetime(2014, 7, 3, 13),
                datetime(2014, 7, 2, 8): datetime(2014, 7, 2, 15),
                datetime(2014, 7, 2, 19): datetime(2014, 7, 3, 15),
                datetime(2014, 7, 2, 14, 30): datetime(2014, 7, 3, 13, 30),
                datetime(2014, 7, 3, 0): datetime(2014, 7, 3, 15),
                datetime(2014, 7, 5, 15): datetime(2014, 7, 7, 15),
                datetime(2014, 7, 4, 17): datetime(2014, 7, 7, 15),
                datetime(2014, 7, 4, 14, 30): datetime(2014, 7, 7, 13, 30),
                datetime(2014, 7, 4, 14, 30, 30):
                datetime(2014, 7, 7, 13, 30, 30),
            },
        ),
        (
            BusinessHour(n=-1, start="13:00", end="16:00"),
            {
                datetime(2014, 7, 2, 11): datetime(2014, 7, 1, 15),
                datetime(2014, 7, 2, 13): datetime(2014, 7, 1, 15),
                datetime(2014, 7, 2, 14): datetime(2014, 7, 1, 16),
                datetime(2014, 7, 2, 15): datetime(2014, 7, 2, 14),
                datetime(2014, 7, 2, 19): datetime(2014, 7, 2, 15),
                datetime(2014, 7, 2, 16): datetime(2014, 7, 2, 15),
                datetime(2014, 7, 2, 13, 30, 15):
                datetime(2014, 7, 1, 15, 30, 15),
                datetime(2014, 7, 5, 15): datetime(2014, 7, 4, 15),
                datetime(2014, 7, 7, 11): datetime(2014, 7, 4, 15),
            },
        ),
        (
            BusinessHour(n=-3, start="10:00", end="16:00"),
            {
                datetime(2014, 7, 1, 17): datetime(2014, 7, 1, 13),
                datetime(2014, 7, 2, 14): datetime(2014, 7, 2, 11),
                datetime(2014, 7, 2, 8): datetime(2014, 7, 1, 13),
                datetime(2014, 7, 2, 13): datetime(2014, 7, 1, 16),
                datetime(2014, 7, 2, 19): datetime(2014, 7, 2, 13),
                datetime(2014, 7, 2, 11, 30): datetime(2014, 7, 1, 14, 30),
                datetime(2014, 7, 3, 0): datetime(2014, 7, 2, 13),
                datetime(2014, 7, 4, 10): datetime(2014, 7, 3, 13),
                datetime(2014, 7, 5, 15): datetime(2014, 7, 4, 13),
                datetime(2014, 7, 4, 16): datetime(2014, 7, 4, 13),
                datetime(2014, 7, 4, 12, 30): datetime(2014, 7, 3, 15, 30),
                datetime(2014, 7, 4, 12, 30, 30):
                datetime(2014, 7, 3, 15, 30, 30),
            },
        ),
        (
            BusinessHour(start="19:00", end="05:00"),
            {
                datetime(2014, 7, 1, 17): datetime(2014, 7, 1, 20),
                datetime(2014, 7, 2, 14): datetime(2014, 7, 2, 20),
                datetime(2014, 7, 2, 8): datetime(2014, 7, 2, 20),
                datetime(2014, 7, 2, 13): datetime(2014, 7, 2, 20),
                datetime(2014, 7, 2, 19): datetime(2014, 7, 2, 20),
                datetime(2014, 7, 2, 4, 30): datetime(2014, 7, 2, 19, 30),
                datetime(2014, 7, 3, 0): datetime(2014, 7, 3, 1),
                datetime(2014, 7, 4, 10): datetime(2014, 7, 4, 20),
                datetime(2014, 7, 4, 23): datetime(2014, 7, 5, 0),
                datetime(2014, 7, 5, 0): datetime(2014, 7, 5, 1),
                datetime(2014, 7, 5, 4): datetime(2014, 7, 7, 19),
                datetime(2014, 7, 5, 4, 30): datetime(2014, 7, 7, 19, 30),
                datetime(2014, 7, 5, 4, 30, 30):
                datetime(2014, 7, 7, 19, 30, 30),
            },
        ),
        (
            BusinessHour(n=-1, start="19:00", end="05:00"),
            {
                datetime(2014, 7, 1, 17): datetime(2014, 7, 1, 4),
                datetime(2014, 7, 2, 14): datetime(2014, 7, 2, 4),
                datetime(2014, 7, 2, 8): datetime(2014, 7, 2, 4),
                datetime(2014, 7, 2, 13): datetime(2014, 7, 2, 4),
                datetime(2014, 7, 2, 20): datetime(2014, 7, 2, 5),
                datetime(2014, 7, 2, 19): datetime(2014, 7, 2, 4),
                datetime(2014, 7, 2, 19, 30): datetime(2014, 7, 2, 4, 30),
                datetime(2014, 7, 3, 0): datetime(2014, 7, 2, 23),
                datetime(2014, 7, 3, 6): datetime(2014, 7, 3, 4),
                datetime(2014, 7, 4, 23): datetime(2014, 7, 4, 22),
                datetime(2014, 7, 5, 0): datetime(2014, 7, 4, 23),
                datetime(2014, 7, 5, 4): datetime(2014, 7, 5, 3),
                datetime(2014, 7, 7, 19, 30): datetime(2014, 7, 5, 4, 30),
                datetime(2014, 7, 7, 19, 30, 30):
                datetime(2014, 7, 5, 4, 30, 30),
            },
        ),
        (
            BusinessHour(n=4, start="00:00", end="23:00"),
            {
                datetime(2014, 7, 3, 22): datetime(2014, 7, 4, 3),
                datetime(2014, 7, 4, 22): datetime(2014, 7, 7, 3),
                datetime(2014, 7, 3, 22, 30): datetime(2014, 7, 4, 3, 30),
                datetime(2014, 7, 3, 22, 20): datetime(2014, 7, 4, 3, 20),
                datetime(2014, 7, 4, 22, 30, 30):
                datetime(2014, 7, 7, 3, 30, 30),
                datetime(2014, 7, 4, 22, 30, 20):
                datetime(2014, 7, 7, 3, 30, 20),
            },
        ),
        (
            BusinessHour(n=-4, start="00:00", end="23:00"),
            {
                datetime(2014, 7, 4, 3): datetime(2014, 7, 3, 22),
                datetime(2014, 7, 7, 3): datetime(2014, 7, 4, 22),
                datetime(2014, 7, 4, 3, 30): datetime(2014, 7, 3, 22, 30),
                datetime(2014, 7, 4, 3, 20): datetime(2014, 7, 3, 22, 20),
                datetime(2014, 7, 7, 3, 30, 30):
                datetime(2014, 7, 4, 22, 30, 30),
                datetime(2014, 7, 7, 3, 30, 20):
                datetime(2014, 7, 4, 22, 30, 20),
            },
        ),
        (
            BusinessHour(start=["09:00", "14:00"], end=["12:00", "18:00"]),
            {
                datetime(2014, 7, 1, 11):
                datetime(2014, 7, 1, 14),
                datetime(2014, 7, 1, 15):
                datetime(2014, 7, 1, 16),
                datetime(2014, 7, 1, 19):
                datetime(2014, 7, 2, 10),
                datetime(2014, 7, 1, 16):
                datetime(2014, 7, 1, 17),
                datetime(2014, 7, 1, 16, 30, 15):
                datetime(2014, 7, 1, 17, 30, 15),
                datetime(2014, 7, 1, 17):
                datetime(2014, 7, 2, 9),
                datetime(2014, 7, 2, 11):
                datetime(2014, 7, 2, 14),
                # out of business hours
                datetime(2014, 7, 1, 13):
                datetime(2014, 7, 1, 15),
                datetime(2014, 7, 2, 8):
                datetime(2014, 7, 2, 10),
                datetime(2014, 7, 2, 19):
                datetime(2014, 7, 3, 10),
                datetime(2014, 7, 2, 23):
                datetime(2014, 7, 3, 10),
                datetime(2014, 7, 3, 0):
                datetime(2014, 7, 3, 10),
                # saturday
                datetime(2014, 7, 5, 15):
                datetime(2014, 7, 7, 10),
                datetime(2014, 7, 4, 17):
                datetime(2014, 7, 7, 9),
                datetime(2014, 7, 4, 17, 30):
                datetime(2014, 7, 7, 9, 30),
                datetime(2014, 7, 4, 17, 30, 30):
                datetime(2014, 7, 7, 9, 30, 30),
            },
        ),
        (
            BusinessHour(n=4, start=["09:00", "14:00"], end=["12:00",
                                                             "18:00"]),
            {
                datetime(2014, 7, 1, 11): datetime(2014, 7, 1, 17),
                datetime(2014, 7, 1, 13): datetime(2014, 7, 2, 9),
                datetime(2014, 7, 1, 15): datetime(2014, 7, 2, 10),
                datetime(2014, 7, 1, 16): datetime(2014, 7, 2, 11),
                datetime(2014, 7, 1, 17): datetime(2014, 7, 2, 14),
                datetime(2014, 7, 2, 11): datetime(2014, 7, 2, 17),
                datetime(2014, 7, 2, 8): datetime(2014, 7, 2, 15),
                datetime(2014, 7, 2, 19): datetime(2014, 7, 3, 15),
                datetime(2014, 7, 2, 23): datetime(2014, 7, 3, 15),
                datetime(2014, 7, 3, 0): datetime(2014, 7, 3, 15),
                datetime(2014, 7, 5, 15): datetime(2014, 7, 7, 15),
                datetime(2014, 7, 4, 17): datetime(2014, 7, 7, 14),
                datetime(2014, 7, 4, 16, 30): datetime(2014, 7, 7, 11, 30),
                datetime(2014, 7, 4, 16, 30, 30):
                datetime(2014, 7, 7, 11, 30, 30),
            },
        ),
        (
            BusinessHour(n=-4,
                         start=["09:00", "14:00"],
                         end=["12:00", "18:00"]),
            {
                datetime(2014, 7, 1, 11): datetime(2014, 6, 30, 16),
                datetime(2014, 7, 1, 13): datetime(2014, 6, 30, 17),
                datetime(2014, 7, 1, 15): datetime(2014, 6, 30, 18),
                datetime(2014, 7, 1, 16): datetime(2014, 7, 1, 10),
                datetime(2014, 7, 1, 17): datetime(2014, 7, 1, 11),
                datetime(2014, 7, 2, 11): datetime(2014, 7, 1, 16),
                datetime(2014, 7, 2, 8): datetime(2014, 7, 1, 12),
                datetime(2014, 7, 2, 19): datetime(2014, 7, 2, 12),
                datetime(2014, 7, 2, 23): datetime(2014, 7, 2, 12),
                datetime(2014, 7, 3, 0): datetime(2014, 7, 2, 12),
                datetime(2014, 7, 5, 15): datetime(2014, 7, 4, 12),
                datetime(2014, 7, 4, 18): datetime(2014, 7, 4, 12),
                datetime(2014, 7, 7, 9, 30): datetime(2014, 7, 4, 14, 30),
                datetime(2014, 7, 7, 9, 30, 30):
                datetime(2014, 7, 4, 14, 30, 30),
            },
        ),
        (
            BusinessHour(n=-1,
                         start=["19:00", "03:00"],
                         end=["01:00", "05:00"]),
            {
                datetime(2014, 7, 1, 17): datetime(2014, 7, 1, 4),
                datetime(2014, 7, 2, 14): datetime(2014, 7, 2, 4),
                datetime(2014, 7, 2, 8): datetime(2014, 7, 2, 4),
                datetime(2014, 7, 2, 13): datetime(2014, 7, 2, 4),
                datetime(2014, 7, 2, 20): datetime(2014, 7, 2, 5),
                datetime(2014, 7, 2, 19): datetime(2014, 7, 2, 4),
                datetime(2014, 7, 2, 4): datetime(2014, 7, 2, 1),
                datetime(2014, 7, 2, 19, 30): datetime(2014, 7, 2, 4, 30),
                datetime(2014, 7, 3, 0): datetime(2014, 7, 2, 23),
                datetime(2014, 7, 3, 6): datetime(2014, 7, 3, 4),
                datetime(2014, 7, 4, 23): datetime(2014, 7, 4, 22),
                datetime(2014, 7, 5, 0): datetime(2014, 7, 4, 23),
                datetime(2014, 7, 5, 4): datetime(2014, 7, 5, 0),
                datetime(2014, 7, 7, 3, 30): datetime(2014, 7, 5, 0, 30),
                datetime(2014, 7, 7, 19, 30): datetime(2014, 7, 7, 4, 30),
                datetime(2014, 7, 7, 19, 30, 30):
                datetime(2014, 7, 7, 4, 30, 30),
            },
        ),
    ]

    # long business hours (see gh-26381)

    # multiple business hours

    @pytest.mark.parametrize("case", apply_cases)
    def test_apply(self, case):
        offset, cases = case
        for base, expected in cases.items():
            assert_offset_equal(offset, base, expected)

    apply_large_n_cases = [
        (
            # A week later
            BusinessHour(40),
            {
                datetime(2014, 7, 1, 11): datetime(2014, 7, 8, 11),
                datetime(2014, 7, 1, 13): datetime(2014, 7, 8, 13),
                datetime(2014, 7, 1, 15): datetime(2014, 7, 8, 15),
                datetime(2014, 7, 1, 16): datetime(2014, 7, 8, 16),
                datetime(2014, 7, 1, 17): datetime(2014, 7, 9, 9),
                datetime(2014, 7, 2, 11): datetime(2014, 7, 9, 11),
                datetime(2014, 7, 2, 8): datetime(2014, 7, 9, 9),
                datetime(2014, 7, 2, 19): datetime(2014, 7, 10, 9),
                datetime(2014, 7, 2, 23): datetime(2014, 7, 10, 9),
                datetime(2014, 7, 3, 0): datetime(2014, 7, 10, 9),
                datetime(2014, 7, 5, 15): datetime(2014, 7, 14, 9),
                datetime(2014, 7, 4, 18): datetime(2014, 7, 14, 9),
                datetime(2014, 7, 7, 9, 30): datetime(2014, 7, 14, 9, 30),
                datetime(2014, 7, 7, 9, 30, 30):
                datetime(2014, 7, 14, 9, 30, 30),
            },
        ),
        (
            # 3 days and 1 hour before
            BusinessHour(-25),
            {
                datetime(2014, 7, 1, 11): datetime(2014, 6, 26, 10),
                datetime(2014, 7, 1, 13): datetime(2014, 6, 26, 12),
                datetime(2014, 7, 1, 9): datetime(2014, 6, 25, 16),
                datetime(2014, 7, 1, 10): datetime(2014, 6, 25, 17),
                datetime(2014, 7, 3, 11): datetime(2014, 6, 30, 10),
                datetime(2014, 7, 3, 8): datetime(2014, 6, 27, 16),
                datetime(2014, 7, 3, 19): datetime(2014, 6, 30, 16),
                datetime(2014, 7, 3, 23): datetime(2014, 6, 30, 16),
                datetime(2014, 7, 4, 9): datetime(2014, 6, 30, 16),
                datetime(2014, 7, 5, 15): datetime(2014, 7, 1, 16),
                datetime(2014, 7, 6, 18): datetime(2014, 7, 1, 16),
                datetime(2014, 7, 7, 9, 30): datetime(2014, 7, 1, 16, 30),
                datetime(2014, 7, 7, 10, 30, 30):
                datetime(2014, 7, 2, 9, 30, 30),
            },
        ),
        (
            # 5 days and 3 hours later
            BusinessHour(28, start="21:00", end="02:00"),
            {
                datetime(2014, 7, 1, 11): datetime(2014, 7, 9, 0),
                datetime(2014, 7, 1, 22): datetime(2014, 7, 9, 1),
                datetime(2014, 7, 1, 23): datetime(2014, 7, 9, 21),
                datetime(2014, 7, 2, 2): datetime(2014, 7, 10, 0),
                datetime(2014, 7, 3, 21): datetime(2014, 7, 11, 0),
                datetime(2014, 7, 4, 1): datetime(2014, 7, 11, 23),
                datetime(2014, 7, 4, 2): datetime(2014, 7, 12, 0),
                datetime(2014, 7, 4, 3): datetime(2014, 7, 12, 0),
                datetime(2014, 7, 5, 1): datetime(2014, 7, 14, 23),
                datetime(2014, 7, 5, 15): datetime(2014, 7, 15, 0),
                datetime(2014, 7, 6, 18): datetime(2014, 7, 15, 0),
                datetime(2014, 7, 7, 1): datetime(2014, 7, 15, 0),
                datetime(2014, 7, 7, 23, 30): datetime(2014, 7, 15, 21, 30),
            },
        ),
        (
            # large n for multiple opening hours (3 days and 1 hour before)
            BusinessHour(n=-25,
                         start=["09:00", "14:00"],
                         end=["12:00", "19:00"]),
            {
                datetime(2014, 7, 1, 11): datetime(2014, 6, 26, 10),
                datetime(2014, 7, 1, 13): datetime(2014, 6, 26, 11),
                datetime(2014, 7, 1, 9): datetime(2014, 6, 25, 18),
                datetime(2014, 7, 1, 10): datetime(2014, 6, 25, 19),
                datetime(2014, 7, 3, 11): datetime(2014, 6, 30, 10),
                datetime(2014, 7, 3, 8): datetime(2014, 6, 27, 18),
                datetime(2014, 7, 3, 19): datetime(2014, 6, 30, 18),
                datetime(2014, 7, 3, 23): datetime(2014, 6, 30, 18),
                datetime(2014, 7, 4, 9): datetime(2014, 6, 30, 18),
                datetime(2014, 7, 5, 15): datetime(2014, 7, 1, 18),
                datetime(2014, 7, 6, 18): datetime(2014, 7, 1, 18),
                datetime(2014, 7, 7, 9, 30): datetime(2014, 7, 1, 18, 30),
                datetime(2014, 7, 7, 10, 30, 30):
                datetime(2014, 7, 2, 9, 30, 30),
            },
        ),
        (
            # 5 days and 3 hours later
            BusinessHour(28, start=["21:00", "03:00"], end=["01:00", "04:00"]),
            {
                datetime(2014, 7, 1, 11): datetime(2014, 7, 9, 0),
                datetime(2014, 7, 1, 22): datetime(2014, 7, 9, 3),
                datetime(2014, 7, 1, 23): datetime(2014, 7, 9, 21),
                datetime(2014, 7, 2, 2): datetime(2014, 7, 9, 23),
                datetime(2014, 7, 3, 21): datetime(2014, 7, 11, 0),
                datetime(2014, 7, 4, 1): datetime(2014, 7, 11, 23),
                datetime(2014, 7, 4, 2): datetime(2014, 7, 11, 23),
                datetime(2014, 7, 4, 3): datetime(2014, 7, 11, 23),
                datetime(2014, 7, 4, 21): datetime(2014, 7, 12, 0),
                datetime(2014, 7, 5, 0): datetime(2014, 7, 14, 22),
                datetime(2014, 7, 5, 1): datetime(2014, 7, 14, 23),
                datetime(2014, 7, 5, 15): datetime(2014, 7, 14, 23),
                datetime(2014, 7, 6, 18): datetime(2014, 7, 14, 23),
                datetime(2014, 7, 7, 1): datetime(2014, 7, 14, 23),
                datetime(2014, 7, 7, 23, 30): datetime(2014, 7, 15, 21, 30),
            },
        ),
    ]

    @pytest.mark.parametrize("case", apply_large_n_cases)
    def test_apply_large_n(self, case):
        offset, cases = case
        for base, expected in cases.items():
            assert_offset_equal(offset, base, expected)

    def test_apply_nanoseconds(self):
        tests = [
            (
                BusinessHour(),
                {
                    Timestamp("2014-07-04 15:00") + Nano(5):
                    Timestamp("2014-07-04 16:00") + Nano(5),
                    Timestamp("2014-07-04 16:00") + Nano(5):
                    Timestamp("2014-07-07 09:00") + Nano(5),
                    Timestamp("2014-07-04 16:00") - Nano(5):
                    Timestamp("2014-07-04 17:00") - Nano(5),
                },
            ),
            (
                BusinessHour(-1),
                {
                    Timestamp("2014-07-04 15:00") + Nano(5):
                    Timestamp("2014-07-04 14:00") + Nano(5),
                    Timestamp("2014-07-04 10:00") + Nano(5):
                    Timestamp("2014-07-04 09:00") + Nano(5),
                    Timestamp("2014-07-04 10:00") - Nano(5):
                    Timestamp("2014-07-03 17:00") - Nano(5),
                },
            ),
        ]

        for offset, cases in tests:
            for base, expected in cases.items():
                assert_offset_equal(offset, base, expected)

    def test_datetimeindex(self):
        idx1 = date_range(start="2014-07-04 15:00",
                          end="2014-07-08 10:00",
                          freq="BH")
        idx2 = date_range(start="2014-07-04 15:00", periods=12, freq="BH")
        idx3 = date_range(end="2014-07-08 10:00", periods=12, freq="BH")
        expected = DatetimeIndex(
            [
                "2014-07-04 15:00",
                "2014-07-04 16:00",
                "2014-07-07 09:00",
                "2014-07-07 10:00",
                "2014-07-07 11:00",
                "2014-07-07 12:00",
                "2014-07-07 13:00",
                "2014-07-07 14:00",
                "2014-07-07 15:00",
                "2014-07-07 16:00",
                "2014-07-08 09:00",
                "2014-07-08 10:00",
            ],
            freq="BH",
        )
        for idx in [idx1, idx2, idx3]:
            tm.assert_index_equal(idx, expected)

        idx1 = date_range(start="2014-07-04 15:45",
                          end="2014-07-08 10:45",
                          freq="BH")
        idx2 = date_range(start="2014-07-04 15:45", periods=12, freq="BH")
        idx3 = date_range(end="2014-07-08 10:45", periods=12, freq="BH")

        expected = idx1
        for idx in [idx1, idx2, idx3]:
            tm.assert_index_equal(idx, expected)

    def test_bday_ignores_timedeltas(self):
        idx = date_range("2010/02/01", "2010/02/10", freq="12H")
        t1 = idx + BDay(offset=Timedelta(3, unit="H"))

        expected = DatetimeIndex(
            [
                "2010-02-02 03:00:00",
                "2010-02-02 15:00:00",
                "2010-02-03 03:00:00",
                "2010-02-03 15:00:00",
                "2010-02-04 03:00:00",
                "2010-02-04 15:00:00",
                "2010-02-05 03:00:00",
                "2010-02-05 15:00:00",
                "2010-02-08 03:00:00",
                "2010-02-08 15:00:00",
                "2010-02-08 03:00:00",
                "2010-02-08 15:00:00",
                "2010-02-08 03:00:00",
                "2010-02-08 15:00:00",
                "2010-02-09 03:00:00",
                "2010-02-09 15:00:00",
                "2010-02-10 03:00:00",
                "2010-02-10 15:00:00",
                "2010-02-11 03:00:00",
            ],
            freq=None,
        )
        tm.assert_index_equal(t1, expected)
Пример #37
0
 def testTimeOnlyUTC(self):
     # https://github.com/dateutil/dateutil/issues/132
     # tzutc doesn't care
     tz_utc = tz.tzutc()
     self.assertEqual(
         dt_time(13, 20, tzinfo=tz_utc).utcoffset(), timedelta(0))
Пример #38
0
def get_previous_week(date):
    iso_date = date.isocalendar()
    end = dt.combine(dt_date.fromisocalendar(iso_date[0], iso_date[1], 1),
                     dt_time(tzinfo=tz.utc))
    start = end - dt_delta(weeks=1)
    return start, end
Пример #39
0
 def testTimeOnlyLocal(self):
     # tzlocal returns None
     tz_local = tz.tzlocal()
     self.assertIs(dt_time(13, 20, tzinfo=tz_local).utcoffset(), None)
Пример #40
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'
        process_start_time = time.time()  # measure process execution time ...

        response.update_status('execution started at : %s ' % dt.now(), 5)

        start_time = time.time()  # measure init ...

        resource = archiveextract(
            resource=rename_complexinputs(request.inputs['resource']))

        refSt = request.inputs['refSt'][0].data
        refEn = request.inputs['refEn'][0].data
        dateSt = request.inputs['dateSt'][0].data
        dateEn = request.inputs['dateEn'][0].data
        regrset = request.inputs['regrset'][0].data

        # fix 31 December issue
        # refSt = dt.combine(refSt,dt_time(12,0))
        # refEn = dt.combine(refEn,dt_time(12,0))
        # dateSt = dt.combine(dateSt,dt_time(12,0))
        # dateEn = dt.combine(dateEn,dt_time(12,0))

        seasonwin = request.inputs['seasonwin'][0].data
        nanalog = request.inputs['nanalog'][0].data
        # bbox = [-80, 20, 50, 70]
        # TODO: Add checking for wrong cordinates and apply default if nesessary
        bbox = []
        bboxStr = request.inputs['BBox'][0].data
        bboxStr = bboxStr.split(',')
        bbox.append(float(bboxStr[0]))
        bbox.append(float(bboxStr[2]))
        bbox.append(float(bboxStr[1]))
        bbox.append(float(bboxStr[3]))

        direction = request.inputs['direction'][0].data
        normalize = request.inputs['normalize'][0].data
        distance = request.inputs['dist'][0].data
        outformat = request.inputs['outformat'][0].data
        timewin = request.inputs['timewin'][0].data

        model_var = request.inputs['reanalyses'][0].data
        model, var = model_var.split('_')

        try:
            if direction == 're2mo':
                anaSt = dt.combine(dateSt, dt_time(
                    0, 0))  #dt.strptime(dateSt[0], '%Y-%m-%d')
                anaEn = dt.combine(dateEn, dt_time(
                    0, 0))  #dt.strptime(dateEn[0], '%Y-%m-%d')
                refSt = dt.combine(refSt, dt_time(
                    12, 0))  #dt.strptime(refSt[0], '%Y-%m-%d')
                refEn = dt.combine(refEn, dt_time(
                    12, 0))  #dt.strptime(refEn[0], '%Y-%m-%d')
                r_time_range = [anaSt, anaEn]
                m_time_range = [refSt, refEn]
            elif direction == 'mo2re':
                anaSt = dt.combine(dateSt, dt_time(
                    12, 0))  #dt.strptime(refSt[0], '%Y-%m-%d')
                anaEn = dt.combine(dateEn, dt_time(
                    12, 0))  #dt.strptime(refEn[0], '%Y-%m-%d')
                refSt = dt.combine(refSt, dt_time(
                    0, 0))  #dt.strptime(dateSt[0], '%Y-%m-%d')
                refEn = dt.combine(refEn, dt_time(
                    0, 0))  #dt.strptime(dateEn[0], '%Y-%m-%d')
                r_time_range = [refSt, refEn]
                m_time_range = [anaSt, anaEn]
            else:
                LOGGER.exception(
                    'failed to find time periods for comparison direction')
        except:
            msg = 'failed to put simulation and reference time in order'
            LOGGER.exception(msg)
            raise Exception(msg)

        if normalize == 'None':
            seacyc = False
        else:
            seacyc = True

        if outformat == 'ascii':
            outformat = '.txt'
        elif outformat == 'netCDF':
            outformat = '.nc'
        else:
            LOGGER.exception('output format not valid')

        try:
            if model == 'NCEP':
                getlevel = True
                if 'z' in var:
                    level = var.strip('z')
                    variable = 'hgt'
                    # conform_units_to='hPa'
                else:
                    variable = 'slp'
                    level = None
                    # conform_units_to='hPa'
            elif '20CRV2' in model:
                getlevel = False
                if 'z' in var:
                    variable = 'hgt'
                    level = var.strip('z')
                    # conform_units_to=None
                else:
                    variable = 'prmsl'
                    level = None
                    # conform_units_to='hPa'
            else:
                LOGGER.exception('Reanalyses model not known')
            LOGGER.info('environment set')
        except:
            msg = 'failed to set environment'
            LOGGER.exception(msg)
            raise Exception(msg)

        # LOGGER.exception("init took %s seconds.", time.time() - start_time)
        response.update_status('Read in the arguments', 6)

        #################
        # get input data
        #################
        # TODO: do not forget to select years

        start_time = time.time()  # measure get_input_data ...
        response.update_status('fetching input data', 7)
        try:
            if direction == 're2mo':
                nc_reanalyses = reanalyses(start=anaSt.year,
                                           end=anaEn.year,
                                           variable=var,
                                           dataset=model,
                                           getlevel=getlevel)
            else:
                nc_reanalyses = reanalyses(start=refSt.year,
                                           end=refEn.year,
                                           variable=var,
                                           dataset=model,
                                           getlevel=getlevel)

            if type(nc_reanalyses) == list:
                nc_reanalyses = sorted(
                    nc_reanalyses,
                    key=lambda i: path.splitext(path.basename(i))[0])
            else:
                nc_reanalyses = [nc_reanalyses]

            # For 20CRV2 geopotential height, daily dataset for 100 years is about 50 Gb
            # So it makes sense, to operate it step-by-step
            # TODO: need to create dictionary for such datasets (for models as well)
            # TODO: benchmark the method bellow for NCEP z500 for 60 years, may be use the same (!)
            # TODO Now everything regrid to the reanalysis

            if ('20CRV2' in model) and ('z' in var):
                tmp_total = []
                origvar = get_variable(nc_reanalyses)

                for z in nc_reanalyses:
                    tmp_n = 'tmp_%s' % (uuid.uuid1())
                    b0 = call(resource=z,
                              variable=origvar,
                              level_range=[int(level), int(level)],
                              geom=bbox,
                              spatial_wrapping='wrap',
                              prefix='levdom_' + path.basename(z)[0:-3])
                    tmp_total.append(b0)

                tmp_total = sorted(
                    tmp_total,
                    key=lambda i: path.splitext(path.basename(i))[0])
                inter_subset_tmp = call(resource=tmp_total,
                                        variable=origvar,
                                        time_range=r_time_range)

                # Clean
                for i in tmp_total:
                    tbr = 'rm -f %s' % (i)
                    #system(tbr)

                # Create new variable
                ds = Dataset(inter_subset_tmp, mode='a')
                z_var = ds.variables.pop(origvar)
                dims = z_var.dimensions
                new_var = ds.createVariable('z%s' % level,
                                            z_var.dtype,
                                            dimensions=(dims[0], dims[2],
                                                        dims[3]))
                new_var[:, :, :] = squeeze(z_var[:, 0, :, :])
                # new_var.setncatts({k: z_var.getncattr(k) for k in z_var.ncattrs()})
                ds.close()
                nc_subset = call(inter_subset_tmp, variable='z%s' % level)
            else:
                nc_subset = call(
                    resource=nc_reanalyses,
                    variable=var,
                    geom=bbox,
                    spatial_wrapping='wrap',
                    time_range=r_time_range,
                    # conform_units_to=conform_units_to
                )

            # nc_subset = call(resource=nc_reanalyses, variable=var, geom=bbox, spatial_wrapping='wrap') # XXXXXX wrap
            # LOGGER.exception("get_input_subset_model took %s seconds.", time.time() - start_time)
            response.update_status('**** Input reanalyses data fetched', 10)
        except:
            msg = 'failed to fetch or subset input files'
            LOGGER.exception(msg)
            raise Exception(msg)

        ########################
        # input data preperation
        ########################
        response.update_status('Start preparing input data', 12)

        # Filter resource:
        if type(resource) == list:
            resource = sorted(resource,
                              key=lambda i: path.splitext(path.basename(i))[0])
        else:
            resource = [resource]

        tmp_resource = []

        m_start = m_time_range[0]
        m_end = m_time_range[1]

        for re in resource:
            s, e = get_timerange(re)
            tmpSt = dt.strptime(s, '%Y%m%d')
            tmpEn = dt.strptime(e, '%Y%m%d')
            if ((tmpSt <= m_end) and (tmpEn >= m_start)):
                tmp_resource.append(re)
                LOGGER.debug('Selected file: %s ' % (re))
        resource = tmp_resource

        start_time = time.time()  # mesure data preperation ...
        # TODO: Check the callendars ! for model vs reanalyses.
        # TODO: Check the units! model vs reanalyses.
        try:
            m_total = []
            modvar = get_variable(resource)
            # resource properties
            ds = Dataset(resource[0])
            m_var = ds.variables[modvar]
            dims = list(m_var.dimensions)
            dimlen = len(dims)

            try:
                model_id = ds.getncattr('model_id')
            except AttributeError:
                model_id = 'Unknown model'

            LOGGER.debug('MODEL: %s ' % (model_id))

            lev_units = 'hPa'

            if (dimlen > 3):
                lev = ds.variables[dims[1]]
                # actually index [1] need to be detected... assuming zg(time, plev, lat, lon)
                lev_units = lev.units

                if (lev_units == 'Pa'):
                    m_level = str(int(level) * 100)
                else:
                    m_level = level
            else:
                m_level = None

            if level == None:
                level_range = None
            else:
                level_range = [int(m_level), int(m_level)]

            for z in resource:
                tmp_n = 'tmp_%s' % (uuid.uuid1())
                # select level and regrid
                b0 = call(
                    resource=z,
                    variable=modvar,
                    level_range=level_range,
                    spatial_wrapping='wrap',  #cdover='system',
                    regrid_destination=nc_reanalyses[0],
                    regrid_options='bil',
                    prefix=tmp_n)
                # select domain
                b01 = call(resource=b0,
                           geom=bbox,
                           spatial_wrapping='wrap',
                           prefix='levregr_' + path.basename(z)[0:-3])
                tbr = 'rm -f %s' % (b0)
                #system(tbr)
                tbr = 'rm -f %s' % (tmp_n)
                #system(tbr)
                # get full resource
                m_total.append(b01)
            ds.close()
            model_subset = call(m_total, time_range=m_time_range)
            for i in m_total:
                tbr = 'rm -f %s' % (i)
                #system(tbr)

            if m_level is not None:
                # Create new variable in model set
                ds = Dataset(model_subset, mode='a')
                mod_var = ds.variables.pop(modvar)
                dims = mod_var.dimensions
                new_modvar = ds.createVariable('z%s' % level,
                                               mod_var.dtype,
                                               dimensions=(dims[0], dims[2],
                                                           dims[3]))
                new_modvar[:, :, :] = squeeze(mod_var[:, 0, :, :])
                # new_var.setncatts({k: z_var.getncattr(k) for k in z_var.ncattrs()})
                ds.close()
                mod_subset = call(model_subset, variable='z%s' % level)
            else:
                mod_subset = model_subset

#            if direction == 're2mo':
#                try:
#                    response.update_status('Preparing simulation data', 15)
#                    reanalyses_subset = call(resource=nc_subset, time_range=[anaSt, anaEn])
#                except:
#                    msg = 'failed to prepare simulation period'
#                    LOGGER.exception(msg)
#                try:
#                    response.update_status('Preparing target data', 17)
#                    var_target = get_variable(resource)
#                    # var_simulation = get_variable(simulation)

#                    model_subset_tmp = call(resource=resource, variable=var_target,
#                                            time_range=[refSt, refEn],
#                                            t_calendar='standard',
#                                            spatial_wrapping='wrap',
#                                            regrid_destination=nc_reanalyses[0],
#                                            regrid_options='bil')

#                    # model_subset = call(resource=resource, variable=var_target,
#                    #                     time_range=[refSt, refEn],
#                    #                     geom=bbox,
#                    #                     t_calendar='standard',
#                    #                     # conform_units_to=conform_units_to,
#                    #                     spatial_wrapping='wrap',
#                    #                     regrid_destination=reanalyses_subset,
#                    #                     regrid_options='bil') # XXXXXXXXXXXX ADD WRAP rem calendar

#                    model_subset = call(resource=model_subset_tmp,variable=var_target, geom=bbox, spatial_wrapping='wrap', t_calendar='standard')

#                   # ISSUE: the regrided model has white border with null! Check it.
#                   # check t_calendar!
#                except:
#                    msg = 'failed subset archive model'
#                    LOGGER.exception(msg)
#                    raise Exception(msg)
#            else:
#                try:
#                    response.update_status('Preparing target data', 15)
#                    var_target = get_variable(resource)
#                    # var_simulation = get_variable(simulation)
#                    model_subset = call(resource=resource, variable=var_target,
#                                        time_range=[refSt, refEn],
#                                        geom=bbox,
#                                        t_calendar='standard',
#                                        # conform_units_to=conform_units_to,
#                                        # spatial_wrapping='wrap',
#                                        )
#                except:
#                    msg = 'failed subset archive model'
#                    LOGGER.exception(msg)
#                    raise Exception(msg)
#                try:
#                    response.update_status('Preparing simulation data', 17)
#                    reanalyses_subset = call(resource=nc_subset,
#                                             time_range=[anaSt, anaEn],
#                                             regrid_destination=model_subset,
#                                             regrid_options='bil')
#                except:
#                    msg = 'failed to prepare simulation period'
#                    LOGGER.exception(msg)
        except:
            msg = 'failed to subset simulation or reference data'
            LOGGER.exception(msg)
            raise Exception(msg)

# --------------------------------------------
        try:
            if direction == 'mo2re':
                simulation = mod_subset
                archive = nc_subset
                base_id = model
                sim_id = model_id
            elif direction == 're2mo':
                simulation = nc_subset
                archive = mod_subset
                base_id = model_id
                sim_id = model
            else:
                LOGGER.exception('direction not valid: %s ' % direction)
        except:
            msg = 'failed to find comparison direction'
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            if level is not None:
                out_var = 'z%s' % level
            else:
                var_archive = get_variable(archive)
                var_simulation = get_variable(simulation)
                if var_archive != var_simulation:
                    rename_variable(archive,
                                    oldname=var_archive,
                                    newname=var_simulation)
                    out_var = var_simulation
                    LOGGER.info('varname %s in netCDF renamed to %s' %
                                (var_archive, var_simulation))
        except:
            msg = 'failed to rename variable in target files'
            LOGGER.exception(msg)
            raise Exception(msg)

        try:
            if seacyc is True:
                seasoncyc_base, seasoncyc_sim = analogs.seacyc(
                    archive, simulation, method=normalize)
            else:
                seasoncyc_base = None
                seasoncyc_sim = None
        except:
            msg = 'failed to prepare seasonal cycle reference files'
            LOGGER.exception(msg)
            raise Exception(msg)

        ip, output = mkstemp(dir='.', suffix='.txt')
        output_file = path.abspath(output)
        files = [path.abspath(archive), path.abspath(simulation), output_file]

        # LOGGER.exception("data preperation took %s seconds.", time.time() - start_time)

        ############################
        # generating the config file
        ############################

        response.update_status('writing config file', 18)
        start_time = time.time()  # measure write config ...

        try:
            config_file = analogs.get_configfile(
                files=files,
                seasoncyc_base=seasoncyc_base,
                seasoncyc_sim=seasoncyc_sim,
                base_id=base_id,
                sim_id=sim_id,
                timewin=timewin,
                varname=var,
                seacyc=seacyc,
                cycsmooth=91,
                nanalog=nanalog,
                seasonwin=seasonwin,
                distfun=distance,
                outformat=outformat,
                calccor=True,
                silent=False,
                period=[
                    dt.strftime(refSt, '%Y-%m-%d'),
                    dt.strftime(refEn, '%Y-%m-%d')
                ],
                bbox="%s,%s,%s,%s" % (bbox[0], bbox[2], bbox[1], bbox[3]))
        except:
            msg = 'failed to generate config file'
            LOGGER.exception(msg)
            raise Exception(msg)

        # LOGGER.exception("write_config took %s seconds.", time.time() - start_time)

        #######################
        # CASTf90 call
        #######################
        import subprocess
        import shlex

        start_time = time.time()  # measure call castf90

        response.update_status('Start CASTf90 call', 20)
        try:
            # response.update_status('execution of CASTf90', 50)
            cmd = 'analogue.out %s' % path.relpath(config_file)
            # system(cmd)
            args = shlex.split(cmd)
            output, error = subprocess.Popen(
                args, stdout=subprocess.PIPE,
                stderr=subprocess.PIPE).communicate()
            LOGGER.info('analogue.out info:\n %s ' % output)
            LOGGER.exception('analogue.out errors:\n %s ' % error)
            response.update_status('**** CASTf90 suceeded', 90)
        except:
            msg = 'CASTf90 failed'
            LOGGER.exception(msg)
            raise Exception(msg)

        LOGGER.debug("castf90 took %s seconds.", time.time() - start_time)

        response.update_status('preparting output', 91)

        # Stopper to keep twitcher results, for debug
        # dummy=dummy

        response.outputs[
            'config'].file = config_file  #config_output_url  # config_file )
        response.outputs['analogs'].file = output_file
        response.outputs['output_netcdf'].file = simulation
        response.outputs['target_netcdf'].file = archive

        ########################
        # generate analog viewer
        ########################

        formated_analogs_file = analogs.reformat_analogs(output_file)
        # response.outputs['formated_analogs'].storage = FileStorage()
        response.outputs['formated_analogs'].file = formated_analogs_file
        LOGGER.info('analogs reformated')
        response.update_status('reformatted analog file', 95)
        viewer_html = analogs.render_viewer(
            # configfile=response.outputs['config'].get_url(),
            configfile=config_file,
            # datafile=response.outputs['formated_analogs'].get_url())
            datafile=formated_analogs_file)
        response.outputs['output'].file = viewer_html
        response.update_status('Successfully generated analogs viewer', 99)
        LOGGER.info('rendered pages: %s ', viewer_html)
        response.update_status('execution ended', 100)
        LOGGER.debug("total execution took %s seconds.",
                     time.time() - process_start_time)
        return response
Пример #41
0
 def testTimeOnlyGettz(self):
     # gettz returns None
     tz_get = tz.gettz('Europe/Minsk')
     self.assertIs(dt_time(13, 20, tzinfo=tz_get).utcoffset(), None)
Пример #42
0
import re
import locale
from datetime import time as dt_time

_DIRECT_PATTERN = re.compile(r'%[A-z]')

_C_KEY = locale.nl_langinfo(locale.D_T_FMT)
_XD_KEY = locale.nl_langinfo(locale.D_FMT)
_XT_KEY = locale.nl_langinfo(locale.T_FMT)
_AMPM_KEY = '({am}|{pm})'.format(am=dt_time().strftime('%p'),
                                 pm=dt_time(12).strftime('%p'))

_DIRECTIVES = {
    r'\w+': ('%a', '%A', '%b', '%B'),
    r'\d{2}': ('%d', '%H', '%I', '%j', '%m', '%M', '%S', '%U', '%W', '%y'),
    r'\d{4,}': ('%Y', ),
    r'\d': ('%w', ),

    _AMPM_KEY: ('%p', ),
    _C_KEY: ('%c', ),
    _XD_KEY: ('%x', ),
    _XT_KEY: ('%X', ),

    r'(UTC|EST|CST)?': ('%Z', ),
    r'(\+\d{4})?': ('%z', ),
}


def convert(dt_format):
    _special = ('%c', '%x', '%X', )
Пример #43
0
def get_day_end(day):
    tzinfo = None
    if isinstance(day, datetime):
        tzinfo = day.tzinfo
        day = day.date()
    return datetime.combine(day, dt_time(23, 59, tzinfo=tzinfo))
Пример #44
0
def extract_datetime(s):
    '''Converts a datetime string into a datetime object'''
    now = datetime.now()
    args = s.upper().split()

    if len(args) == 1:
        # This is a good way to check if a single parameter is time,
        # because standalone time always has one of these substrings.
        # Careful though! If time is preceded by date, it's possible
        # to omit the colon for 24-hour time strings (e.g. "pdc 3/2 15").
        if ':' in args[0] or 'PM' in args[0] or 'AM' in args[0]:
            time = args[0]
            date = None
        else:
            time = None
            date = args[0]
    elif len(args) == 2:
        date, time = args
    else:
        print('Too many arguments. See "pdc --help" for reference.')
        return None

    day, month, year = (None, ) * 3
    hour, minute, second = (None, ) * 3
    pm_am = None

    if date is not None:

        # Extract date
        match = re.match(r'^(?P<day>\d+)\.(?P<month>\d+)(\.(?P<year>\d+))?$',
                         date)
        if match is None:
            match = re.match(r'^(?P<month>\d+)/(?P<day>\d+)(/(?P<year>\d+))?$',
                             date)
            if match is None:
                match = re.match(
                    r'^(?P<year>\d+)-(?P<month>\d+)(-(?P<day>\d+))?$', date)
                if match is None:
                    print(
                        'Unrecognized date format. Available: "dd.mm.yyyy", "mm/dd/yyyy", "yyyy-mm-dd"'
                    )
                    return None
        # Notice that in the first two formats year can be omitted,
        # while in the third format the day can be omitted.
        # If a day is omitted, we simply assume it to be 1.
        # If a year is omitted however, we have to pick the lowest possible
        # year for the rest of the datetime to make sense,
        # i.e. if it is currently August and the user request a date
        # in November, the year is assumed to be the current one,
        # but if the requested a date in March, the year would have to be
        # the subsequent one. The check will be performed later.

        # Store matches
        day = match.group('day')
        month = match.group('month')
        year = match.group('year')

        # Convert known values
        day = int(day if day is not None else 1)
        month = int(month)
        year = int(year) if year is not None else None

    # If date is omitted completely and there's only time,
    # then depending on the current time the date will either be
    # today or tomorrow. This check is also performed later.

    if time is not None:
        # Extract time
        # Some match strings contain unmatchable groups, e.g. '...$(?P<unmatchable>_)?'
        # This is a hacky way to enforce that these named groups always exist in the match object,
        # to avoid ugly checks later on. Basically, everything that doesn't exist always has None value.
        match = re.match(
            r'^(?P<hour>\d+):(?P<minute>\d+)(:(?P<second>\d+))?$(?P<pm_am>_)?',
            time)
        if match is None:
            match = re.match(
                r'^(?P<hour>\d+)(:(?P<minute>\d+)(:(?P<second>\d+))?)?(?P<pm_am>PM|AM)',
                time)
            if match is None and date is not None:
                match = re.match(
                    r'^(?P<hour>\d+)$(?P<minute>_)?(?P<second>_)?(?P<pm_am>_)?',
                    time)
                if match is None:
                    print(
                        'Unrecognized time format. See "pdc --help" for valid examples.'
                    )
                    return None

        # Store matches
        hour = match.group('hour')
        minute = match.group('minute')
        second = match.group('second')
        pm_am = match.group('pm_am')

    # Convert to int and if any time values were omitted, assume 0
    hour = int(hour if hour is not None else 0)
    minute = int(minute if minute is not None else 0)
    second = int(second if second is not None else 0)

    # Verify time validity
    try:
        dt = dt_time(hour, minute, second)
    except Exception as e:
        print(f'{type(e).__name__}: {e}')
        return None

    # Convert 12-hour time to 24-hour time
    if pm_am is not None:
        if hour == 12 and pm_am == 'AM':
            hour = 0
        elif hour != 12 and pm_am == 'PM':
            hour += 12

    # If date was omitted, find the closest suitable one
    if date is None:
        d = datetime(now.year, now.month, now.day, hour, minute, second)
        if d < now:
            d += timedelta(1)
        day = d.day
        month = d.month
        year = d.year

    # If year was omitted, find the closest suitable one
    elif year is None:
        # Verify month and day validity
        # Use 2016 as year, because it was a leap year (Feb 29 is a valid date)
        try:
            dt = dt_date(2016, month, day)
        except Exception as e:
            print(f'{type(e).__name__}: {e}')
            return None

        # Increment year till the date is valid and in the future
        year = now.year
        dt = datetime(year, month, day, hour, minute, second)
        while dt < now:
            year += 1
            try:
                dt = datetime(year, month, day, hour, minute, second)
            except ValueError:
                continue

    # Verify full timedate validity
    try:
        dt = datetime(year, month, day, hour, minute, second)
    except Exception as e:
        print(f'{type(e).__name__}: {e}')
        return None

    return dt
Пример #45
0
    transition_functions = [SimpleTransition, dissolve_changes_only]
else:
    raise ValueError("Unsupported platform - must be MacOS or Linux")

# set up list of transit messages - since this is static, it is done outside the loop
lstTransitMessages = []
# lstTransitMessages.append(TransitMessageURL(
#     "http://www.norta.com/Mobile/whers-my-busdetail.aspx?stopcode=235&routecode=10123&direction=0", "Street Car"))
# lstTransitMessages.append(TransitMessageURL(
#     "http://www.norta.com/Mobile/whers-my-busdetail.aspx?stopcode=145&routecode=10122&direction=0", "Magazine Bus"))
# lstTransitMessages.append(TransitMessageURL(
#     "http://www.norta.com/Mobile/whers-my-busdetail.aspx?stopcode=58&routecode=10121&direction=0", "Tchoup Bus"))

q = datetime(1990, 1, 1, 1, 1)

start_time = dt_time(6, 45)
end_time = dt_time(23, 00)

while True:
    q = datetime(1990, 1, 1, 1, 1)
    now_time_fix = q.now().time()
    if start_time < now_time_fix < end_time:
        # Reset list of calendars and messages to display
        lstCalendars = []
        lstMessagestoDisplay = []
        lstTemporaryMessages = []
        try:
            # attempt to get new temporary messages and calendars from the google spreadsheet
            # the "check" list is used so that the temporary messages list is only replaced if the internet is up
            check = []
            GetGoogleSheetData(google_sheet_id, get_credentials(),
Пример #46
0
 def testTimeOnlyUTC(self):
     # https://github.com/dateutil/dateutil/issues/132
     # tzutc doesn't care
     tz_utc = tz.tzutc()
     self.assertEqual(dt_time(13, 20, tzinfo=tz_utc).utcoffset(), timedelta(0))
Пример #47
0
        re.sub(ur'<\!--.+?-->', ur'',
               unicode(infoboks.parameters[commonargs['week']])).strip())
    if infoboks.has_param(commonargs['week2']):
        endweek = re.sub(
            ur'<\!--.+?-->', ur'',
            unicode(infoboks.parameters[commonargs['week2']])).strip()
        if endweek == '':
            endweek = startweek
    else:
        endweek = startweek
    endweek = int(endweek)

    startweek = Week(year, startweek)
    endweek = Week(year, endweek)
    start = wiki_tz.localize(
        datetime.combine(startweek.monday(), dt_time(0, 0, 0)))
    end = wiki_tz.localize(
        datetime.combine(endweek.sunday(), dt_time(23, 59, 59)))
elif infoboks.has_param(ibcfg['start']) and infoboks.has_param(ibcfg['end']):
    startdt = infoboks.parameters[ibcfg['start']].value
    enddt = infoboks.parameters[ibcfg['end']].value
    start = wiki_tz.localize(
        datetime.strptime(startdt + ' 00 00 00', '%Y-%m-%d %H %M %S'))
    end = wiki_tz.localize(
        datetime.strptime(enddt + ' 23 59 59', '%Y-%m-%d %H %M %S'))
else:
    log('!! fant ikke datoer')
    sys.exit(0)

year = start.isocalendar()[0]
startweek = start.isocalendar()[1]
Пример #48
0
 def testTimeOnlyOffset(self):
     # tzoffset doesn't care
     tz_offset = tz.tzoffset("+3", 3600)
     self.assertEqual(dt_time(13, 20, tzinfo=tz_offset).utcoffset(), timedelta(seconds=3600))
Пример #49
0
def parse(datetime_string, localize=True):
    _utc_to_local = utc_to_local if localize else lambda x: x

    def _to_int(value):
        if value is None:
            return 0
        return int(value)

    # match time only '00:45:10'
    time_only_match = __RE_MATCH_TIME_ONLY__.match(datetime_string)
    if time_only_match:
        return _utc_to_local(
            datetime.combine(
                date.today(),
                dt_time(hour=_to_int(time_only_match.group('hour')),
                        minute=_to_int(time_only_match.group('minute')),
                        second=_to_int(
                            time_only_match.group('second'))))).time()

    # match date only '2014-11-08'
    date_only_match = __RE_MATCH_DATE_ONLY__.match(datetime_string)
    if date_only_match:
        return _utc_to_local(
            date(_to_int(date_only_match.group('year')),
                 _to_int(date_only_match.group('month')),
                 _to_int(date_only_match.group('day'))))

    # full date time
    date_time_match = __RE_MATCH_DATETIME__.match(datetime_string)
    if date_time_match:
        return _utc_to_local(
            datetime(_to_int(date_time_match.group('year')),
                     _to_int(date_time_match.group('month')),
                     _to_int(date_time_match.group('day')),
                     _to_int(date_time_match.group('hour')),
                     _to_int(date_time_match.group('minute')),
                     _to_int(date_time_match.group('second'))))

    # period - at the moment we support only hours, minutes and seconds (e.g. videos and audio)
    period_match = __RE_MATCH_PERIOD__.match(datetime_string)
    if period_match:
        return timedelta(hours=_to_int(period_match.group('hours')),
                         minutes=_to_int(period_match.group('minutes')),
                         seconds=_to_int(period_match.group('seconds')))

    # abbreviated match
    abbreviated_match = __RE_MATCH_ABBREVIATED__.match(datetime_string)
    if abbreviated_match:
        month = {
            'Jan': 1,
            'Feb': 2,
            'Mar': 3,
            'Apr': 4,
            'May': 5,
            'June': 6,
            'Jun': 6,
            'July': 7,
            'Jul': 7,
            'Aug': 8,
            'Sept': 9,
            'Sep': 9,
            'Oct': 10,
            'Nov': 11,
            'Dec': 12
        }
        return _utc_to_local(
            datetime(year=_to_int(abbreviated_match.group('year')),
                     month=month[abbreviated_match.group('month')],
                     day=_to_int(abbreviated_match.group('day')),
                     hour=_to_int(abbreviated_match.group('hour')),
                     minute=_to_int(abbreviated_match.group('minute')),
                     second=_to_int(abbreviated_match.group('second'))))

    raise KodionException("Could not parse iso 8601 timestamp '%s'" %
                          datetime_string)
Пример #50
0
 def testTimeOnlyLocal(self):
     # tzlocal returns None
     tz_local = tz.tzlocal()
     self.assertIs(dt_time(13, 20, tzinfo=tz_local).utcoffset(), None)
Пример #51
0
def check_time_in_range(time_range, check_time=None):
    """
    Check if the given time is contained in the time_range string.
    The time_range can be something like

     <DOW>-<DOW>: <hh:mm>-<hh:mm>,  <DOW>-<DOW>: <hh:mm>-<hh:mm>
     <DOW>-<DOW>: <h:mm>-<hh:mm>,  <DOW>: <h:mm>-<hh:mm>
     <DOW>: <h>-<hh>

    DOW beeing the day of the week: Mon, Tue, Wed, Thu, Fri, Sat, Sun
    hh: 00-23
    mm: 00-59

    If time is omitted the current time is used: time.localtime()

    :param time_range: The timerange
    :type time_range: basestring
    :param time: The time to check
    :type time: datetime
    :return: True, if time is within time_range.
    """
    time_match = False
    dow_index = {"mon": 1,
                 "tue": 2,
                 "wed": 3,
                 "thu": 4,
                 "fri": 5,
                 "sat": 6,
                 "sun": 7}

    check_time = check_time or datetime.now()
    check_day = check_time.isoweekday()
    check_hour =dt_time(check_time.hour, check_time.minute)
    # remove whitespaces
    time_range = ''.join(time_range.split())
    # split into list of time ranges
    time_ranges = time_range.split(",")
    try:
        for tr in time_ranges:
            # tr is something like: Mon-Tue:09:30-17:30
            dow, t = [x.lower() for x in tr.split(":", 1)]
            if "-" in dow:
                dow_start, dow_end = dow.split("-")
            else:
                dow_start = dow_end = dow
            t_start, t_end = t.split("-")
            # determine if we have times like 9:00-15:00 or 9-15
            ts = [int(x) for x in t_start.split(":")]
            te = [int(x) for x in t_end.split(":")]
            if len(ts) == 2:
                time_start =dt_time(ts[0], ts[1])
            else:
                time_start =dt_time(ts[0])
            if len(te) == 2:
                time_end =dt_time(te[0], te[1])
            else:
                time_end =dt_time(te[0])

            # check the day and the time
            if (dow_index.get(dow_start) <= check_day <= dow_index.get(dow_end)
                    and
                    time_start <= check_hour <= time_end):
                time_match = True
    except ValueError:
        log.error("Wrong time range format: <dow>-<dow>:<hh:mm>-<hh:mm>")
        log.debug("{0!s}".format(traceback.format_exc()))

    return time_match
Пример #52
0
 def testTimeOnlyRange(self):
     # tzrange returns None
     tz_range = tz.tzrange("dflt")
     self.assertIs(dt_time(13, 20, tzinfo=tz_range).utcoffset(), None)
Пример #53
0
    def parseEvent(self, event, fg, bg):
        """Method to turn google event into a format that we can use more
           easily.
        """
        # Set up a reference to UTC (all events need a timezone so we can
        # sort them)
        utc = pytz.UTC

        # Have we got a "date" or "dateTime" event?
        # Parse the start end end times as appropriate
        if event["start"].get("date", False):
            start = dateutil.parser.parse(event["start"]["date"])
            end = dateutil.parser.parse(event["end"]["date"])
        else:
            start = dateutil.parser.parse(event["start"]["dateTime"])
            end = dateutil.parser.parse(event["end"]["dateTime"])

        # Change the end time to one second earlier (useful to check number
        # of days of event)
        false_end = end - timedelta(0, 1)
        duration = false_end - start

        # Empty list for our events
        ev_list = []

        # Split long events into daily events
        for i in range(duration.days + 1):

            # Create a new start time if the daily event start time isn't the
            # same as the overall start time
            new_date = start + timedelta(i)
            if new_date.date() != start.date():
                st = datetime.combine(new_date.date(),
                                      dt_time(0, 0, tzinfo=start.tzinfo))
            else:
                st = start

            # Create a new end time if the daily event end time isn't the same
            # as the overall end time
            if new_date.date() != false_end.date():
                add_day = new_date.date() + timedelta(1)
                en = datetime.combine(add_day,
                                      dt_time(0, 0, tzinfo=start.tzinfo))
            else:
                en = end

            # If there's no timezone set, then let's set one
            if st.tzinfo is None:
                st = utc.localize(st)

            if en.tzinfo is None:
                en = utc.localize(en)

            # Create a dict of the info we need
            ev = {"fg": fg,
                  "bg": bg,
                  "summary": event.get("summary", ""),
                  "location": event.get("location", ""),
                  "start": st,
                  "end": en,
                  "stdate": st.date()}

            # Add to our list
            ev_list.append(ev)

        return ev_list
Пример #54
0
 def testTimeOnlyGettz(self):
     # gettz returns None
     tz_get = tz.gettz("Europe/Minsk")
     self.assertIs(dt_time(13, 20, tzinfo=tz_get).utcoffset(), None)
Пример #55
0
DICT_TYPE = type(dict())
STR_TYPE = type(str())
UNICODE_TYPE = type(unicode())
FLOAT_TYPE = type(float())
INT_TYPE = type(int())
LONG_TYPE = type(long())
BOOL_TYPE = type(bool())

LIST_TYPES = (LIST_TYPE, TUPLE_TYPE)
STR_TYPES = (STR_TYPE, UNICODE_TYPE)
NOMINAL_TYPES = (INT_TYPE, FLOAT_TYPE, LONG_TYPE)

NONE_VALUES = (None, '---', '', 'None')

# 00:00:00; used to combine with date() to form datetime() at midnight
NULL_TIME = dt_time()

# Convenience constants; these are exipration times in seconds
# console escapes for colored output
WHITE = '\033[97m'
BLUE = '\033[96m'
YELLOW = '\033[93m'
GREEN = '\033[92m'
RED = '\033[91m'
ENDC = '\033[0m'

# date only
RE_DATE = re.compile('(\d\d\d\d)-(\d\d)-(\d\d)')
# date only range syntax
RE_DATE_RANGE = re.compile('(\d\d\d\d-\d\d-\d\d):?(\d\d\d\d-\d\d-\d\d)?')
# datetime only
Пример #56
0
class FakeDataAccessor(base.BaseDataAccessor):
    """
    obtain fake data
    
    `FakeDataAccessor` should be used for test purposes when one want emulate
    expected backend response
    
    :Example:
    
    >>> fake_accessor = FakeDataAccessor()
    >>> requester = Requester(fake_accessor)
    >>> requester.login(str, 12312, 'sdfee23e2')
    >>> def printer (*args, **kwargs):
                    pprint(args)
    ...
    >>> requester.clans.get_account_applications_count_since(printer, 123)
    (
            {'total': 17},
            200,
            0
    )
    
    Use `requests_before_logout` to emulate session expiration.
    Session will be considered as expired when `requests_before_logout` is made
    use -1 for endless session (default behavior)
    
    :Example:
    
    >>> fake_accessor = FakeDataAccessor()
    >>> fake_accessor.requests_before_logout = 2
    >>> requester = Requester(fake_accessor)
    >>> requester.login(str, 12312, 'sdfee23e2')
    >>> def printer (*args, **kwargs):
                    print (args)
    ...
    >>> requester.clans.get_account_applications_count_since(printer, 123)
    ({'total': 17}, 200, 0)
    >>> requester.clans.get_account_applications_count_since(printer, 123)
    ({'total': 17}, 200, 0)
    >>> requester.clans.get_account_applications_count_since(printer, 123)
    ('User is not authentificated', 403, 2)
    
    To set expected result for method use `set_data` method
    
    :Example:
    
    >>> fake_accessor = FakeDataAccessor()
    >>> requester = Requester(fake_accessor)
    >>> requester.login(str, 12312, 'sdfee23e2')
    >>> def printer (*args, **kwargs):
                    print (args)
    >>> fake_accessor.set_data('account_applications_count_since', 14, {'total': 11})
    >>> requester.clans.get_account_applications_count_since(printer, 14)
    ({'total': 11}, 200, 0)
    >>> requester.clans.get_account_applications_count_since(printer, 123)
    ({'total': 17}, 200, 0)
    
    To emulate error in response set data to error instance
    
    :Example:
    
    >>> fake_accessor = FakeDataAccessor()
    >>> requester = Requester(fake_accessor)
    >>> requester.login(str, 12312, 'sdfee23e2')
    >>> def printer (*args, **kwargs):
                    print (args)
    >>> fake_accessor.set_data('account_applications_count_since', 14, exceptions.PermissionDenied())
    >>> requester.clans.get_account_applications_count_since(printer, 14)
    ('Forbidden', 403, 3)
    >>> requester.clans.get_account_applications_count_since(printer, 123)
    ({'total': 17}, 200, 0)
    
    """
    requests_before_logout = -1

    def __init__(self,
                 url_fetcher=None,
                 config=None,
                 client_lang=None,
                 user_agent=None):
        super(FakeDataAccessor, self).__init__()
        self.client_lang = client_lang
        self._account = None
        self._storage = {}
        self.account = None
        self.user_agent = user_agent
        return

    def login(self, callback, account_id, spa_token):
        self.account = account_id
        self._account = self.requests_before_logout
        result, status_code = ('ok', 200)
        response_code = exceptions.ResponseCodes.NO_ERRORS
        _doResponse(callback, result, status_code, response_code)

    def get_alive_status(self, callback):
        result, status_code = {'status': 'I am alive!'}, 200
        response_code = exceptions.ResponseCodes.NO_ERRORS
        _doResponse(callback, result, status_code, response_code)

    def logout(self, callback):
        self.account = None
        self._account = None
        result, status_code = ('ok', 200)
        response_code = exceptions.ResponseCodes.NO_ERRORS
        _doResponse(callback, result, status_code, response_code)
        return

    def _filter_data(self, data, fields):
        if isinstance(data, list):
            return [self._filter_data(i, fields) for i in data]
        return {k: v for k, v in data.iteritems() if k in fields}

    def _request_data(self, section, entity_id, fields=None):
        if not self._account:
            raise exceptions.AuthentificationError()
        self._account -= 1
        try:
            result = self._storage[section][entity_id]
        except KeyError:
            result = EXAMPLES[section]
            if callable(result):
                result = result(entity_id)
                self._storage.setdefault(section, {})[entity_id] = result

        if isinstance(result, exceptions.BaseRequestError):
            raise result
        if fields:
            result = self._filter_data(result, fields)
        return result

    def _compare_keys(self, example, data):
        if isinstance(example, list):
            for i in data:
                self._compare_keys(example[0], i)

        if isinstance(example, dict):
            if set(example) ^ set(data):
                missed = set(example) - set(data)
                extra = set(data) - set(example)
                message = []
                if missed:
                    message.append('(%s) keys are missed' % ', '.join(missed))
                if extra:
                    message.append('(%s) keys are not needed' %
                                   ', '.join(extra))
                raise ValueError(' and '.join(message))

    def set_data(self, section, entity_id, data):
        """
        set fake data for different sections, compare keys while setting
        
        possible sections are following:
        
                - account_applications_count_since
                - account_invites
                - accounts_clans
                - accounts_info
                - accounts_names
                - clan_applications
                - clan_globalmap_stats
                - clan_invites_count_since
                - clan_invites
                - clan_members
                - clan_provinces
                - clans_info
                - clans_ratings
                - fronts_info
                - search_clans
                - stronghold_info
                - strongholds_state
                - strongholds_statistics
        
        """
        if not section in EXAMPLES:
            raise AssertionError
            example = EXAMPLES[section]
            isinstance(data,
                       exceptions.BaseRequestError) or self._compare_keys(
                           example, data)
        self._storage.setdefault(section, {})[entity_id] = data

    @fake_method(
        example=lambda clan_id: {
            'clan_id': clan_id,
            'xp_avg': random.randrange(1, 1000) / 10.0,
            'efficiency': random.randrange(1, 10000),
            'battles_count_avg': random.randrange(1, 10000),
            'wins_ratio_avg': random.randrange(1, 100),
            'gm_elo_rating_6': random.randrange(1, 1000),
            'gm_elo_rating_8': random.randrange(1, 1000),
            'gm_elo_rating_10': random.randrange(1, 1000),
            'gm_elo_rating_6_rank': random.randrange(1, 1000),
            'gm_elo_rating_8_rank': random.randrange(1, 1000),
            'gm_elo_rating_10_rank': random.randrange(1, 1000),
            'fb_elo_rating_8': random.randrange(1, 1000),
            'fb_elo_rating_10': random.randrange(1, 1000),
            'fb_battles_count_10_28d': random.randrange(1, 100),
            'fs_battles_count_10_28d': random.randrange(1, 100),
            'gm_battles_count_28d': random.randrange(1, 100),
            'fs_battles_count_28d': random.randrange(1, 100),
            'fb_battles_count_28d': random.randrange(1, 100)
        })
    def get_clans_ratings(self, clan_ids, fields=None):
        """
        return fake data from `clans_ratings` section
        """
        return [
            self._request_data('clans_ratings', i, fields=fields)
            for i in clan_ids
        ]

    @fake_method(
        example=lambda clan_id: {
            'name': 'xxx',
            'tag': 'ff',
            'motto': 'yyyy',
            'leader_id': 666,
            'members_count': 13,
            'clan_id': clan_id,
            'created_at': datetime.now(),
            'accepts_join_requests': True,
            'treasury': 2423
        })
    def get_clans_info(self, clan_ids, fields=None):
        """
        return fake data from `clans_info` section
        """
        return [
            self._request_data('clans_info', clan_id, fields=fields)
            for clan_id in clan_ids
        ]

    @fake_method(example=lambda acc_id: {'id': acc_id, 'name': 'name'})
    def get_accounts_names(self, account_ids, fields=None):
        """
        return fake data from `accounts_names` section
        """
        return [
            self._request_data('accounts_names', account_id, fields=fields)
            for account_id in account_ids
        ]

    @fake_method(example=lambda clan_id: [{
        'account_id': 2324 + i,
        'role_name': 'officer',
        'role_bw_flag': 1 << i,
        'clan_id': clan_id,
        'joined_at': datetime.now()
    } for i in range(11)])
    def get_clan_members(self, clan_id, fields=None):
        """
        return fake data from `clan_members` section
        """
        return self._request_data('clan_members', clan_id, fields=fields)

    @fake_method(
        example={
            'clan_id': 2790,
            'favorite_arena_6': 1,
            'favorite_arena_8': 3,
            'favorite_arena_10': 65549,
            'favorite_primetime': dt_time(19, 0)
        })
    def get_clan_favorite_attributes(self, clan_id, fields=None):
        """
        return fake data from `clan_favorite_attributes` section
        """
        return self._request_data('clan_favorite_attributes',
                                  clan_id,
                                  fields=fields)

    @fake_method(example={'total': 17})
    def get_account_applications_count_since(self, account_id, since=None):
        """
        return fake data from `account_applications_count_since` section
        """
        return self._request_data('account_applications_count_since',
                                  account_id)

    @fake_method(example={'total': 14})
    def get_clan_invites_count_since(self, clan_id, since=None):
        """
        return fake data from `clan_invites_count_since` section
        """
        return self._request_data('clan_invites_count_since', clan_id)

    @fake_method(
        example={
            'account_id': 234,
            'joined_at': datetime.now(),
            'clan_id': 343,
            'role_bw_flag': 13,
            'role_name': 'commander',
            'in_clan_cooldown_till': datetime.now(),
            'clan_tag': 'fake',
            'clan_color': 123
        })
    def get_accounts_clans(self, account_ids, fields):
        """
        return fake data from `accounts_clans` section
        """
        return [
            self._request_data('accounts_clans', i, fields=fields)
            for i in account_ids
        ]

    @fake_method(example=lambda (account_id, statuses): [{
        'status':
        random.choice(statuses or ('active', 'declined', 'cancelled',
                                   'accepted', 'expired', 'error', 'deleted')),
        'created_at':
        datetime.now(),
        'updated_at':
        datetime.now(),
        'sender_id':
        random.randrange(1, 10000),
        'id':
        random.randrange(1, 1000000),
        'account_id':
        account_id,
        'clan_id':
        random.randrange(1, 10000),
        'status_changer_id':
        random.randrange(1, 10000),
        'comment':
        'Welcome {}!'.format(random.randrange(1, 10000))
        if random.choice((1, 0)) else ''
    } for i in range(random.randrange(0, 1000))])
    @paginated_method
    def get_account_applications(self, fields=None, statuses=None):
        """
        return fake data from `account_applications` section
        """
        return self._request_data('account_applications',
                                  (self.account, tuple(statuses or [])),
                                  fields=fields)

    @fake_method(example=lambda (clan_id, statuses): [{
        'status':
        random.choice(statuses or ('active', 'declined', 'cancelled',
                                   'accepted', 'expired', 'error', 'deleted')),
        'created_at':
        datetime.now(),
        'updated_at':
        datetime.now(),
        'sender_id':
        random.randrange(1, 10000),
        'id':
        random.randrange(1, 1000000),
        'account_id':
        random.randrange(1, 10000),
        'clan_id':
        clan_id,
        'status_changer_id':
        random.randrange(1, 10000),
        'comment':
        'Welcome {}!'.format(random.randrange(1, 10000))
        if random.choice((1, 0)) else ''
    } for i in range(random.randrange(0, 1000))])
    @paginated_method
    def get_clan_applications(self, clan_id, fields=None, statuses=None):
        """
        return fake data from `clan_applications` section
        """
        return self._request_data('clan_applications',
                                  (clan_id, tuple(statuses or [])),
                                  fields=fields)

    @fake_method(example=lambda search: ([] if len(
        search) % 2 else [{
            'name': 'Clan Name %d' % random.randrange(1, 1000),
            'tag': 'TCLAN',
            'motto': 'Clan Motto',
            'leader_id': random.randrange(1, 10000),
            'clan_id': random.randrange(1, 100),
            'members_count': random.randrange(1, 50),
            'created_at': datetime.now(),
            'accepts_join_requests': random.choice((True, False))
        } for i in range(random.randrange(1, 36))]))
    @paginated_method
    def search_clans(self, search, fields=None):
        """
        return fake data from `clans_info` section
        """
        return self._request_data('search_clans', search)

    @fake_method(example=lambda account:
                 [{
                     'name': 'Clan Name %d' % random.randrange(1, 1000),
                     'tag': 'TCLAN',
                     'motto': 'Clan Motto',
                     'leader_id': random.randrange(1, 10000),
                     'clan_id': random.randrange(1, 100),
                     'members_count': random.randrange(1, 50),
                     'created_at': datetime.now(),
                     'accepts_join_requests': random.choice((True, False))
                 } for i in range(random.randrange(1, 36))])
    @paginated_method
    def get_recommended_clans(self, fields=None):
        """
        return fake data from `clans_info` section
        """
        return self._request_data('recommended_clans', self.account)

    @fake_method(example=lambda (clan_id, statuses): [{
        'status':
        random.choice(statuses or ('active', 'declined', 'cancelled',
                                   'accepted', 'expired', 'error', 'deleted')),
        'created_at':
        datetime.now(),
        'updated_at':
        datetime.now(),
        'sender_id':
        random.randrange(1, 10000),
        'id':
        random.randrange(1, 1000000),
        'account_id':
        random.randrange(1, 10000),
        'clan_id':
        clan_id,
        'comment':
        'Welcome {}!'.format(random.randrange(1, 10000))
        if random.choice((1, 0)) else '',
        'status_changer_id':
        2132
    } for i in range(random.randrange(0, 1000))])
    @paginated_method
    def get_clan_invites(self, clan_id, fields=None, statuses=None):
        """
        return fake data from `clan_invites` section
        """
        return self._request_data('clan_invites',
                                  (clan_id, tuple(statuses or [])),
                                  fields=fields)

    @fake_method(example=lambda (account_id, statuses): [{
        'status':
        random.choice(statuses or ('active', 'declined', 'cancelled',
                                   'accepted', 'expired', 'error', 'deleted')),
        'created_at':
        datetime.now(),
        'updated_at':
        datetime.now(),
        'sender_id':
        random.randrange(1, 10000),
        'id':
        random.randrange(1, 1000000),
        'account_id':
        account_id,
        'clan_id':
        random.randrange(1, 10000),
        'status_changer_id':
        2132,
        'comment':
        'Welcome {}!'.format(random.randrange(1, 10000))
        if random.choice((1, 0)) else ''
    } for i in range(random.randrange(0, 1000))])
    @paginated_method
    def get_account_invites(self, fields=None, statuses=None):
        """
        return fake data from `account_invites` section
        """
        return self._request_data('account_invites',
                                  (self.account, tuple(statuses or [])),
                                  fields=fields)

    @fake_method(
        example=lambda account_id: {
            'global_rating': random.randrange(100, 10000),
            'battle_avg_xp': random.randrange(100, 10000),
            'battles_count': random.randrange(1, 1000),
            'battle_avg_performance': random.uniform(0, 1),
            'xp_amount': random.randrange(100, 1000),
            'account_id': account_id
        })
    def get_accounts_info(self, account_ids, fields=None):
        """
        return fake data from `accounts_info` section
        """
        return [
            self._request_data('accounts_info', acc_id, fields=fields)
            for acc_id in account_ids
        ]

    @fake_method(example=[{
        'front_name':
        'some_front',
        'province_id':
        'some_province',
        'front_name_localized':
        'some_front_localized',
        'province_id_localized':
        'some_province_localized',
        'revenue':
        324,
        'hq_connected':
        True,
        'prime_time':
        dt_time(18, 0, 0),
        'periphery':
        333,
        'game_map':
        'some_map',
        'pillage_cooldown':
        1,
        'pillage_end_datetime':
        datetime.now() + timedelta(hours=3),
        'turns_owned':
        12
    }, {
        'front_name': 'some_front2',
        'province_id': 'some_province2',
        'front_name_localized': 'some_front_localized2',
        'province_id_localized': 'some_province_localized2',
        'revenue': 333,
        'hq_connected': True,
        'prime_time': dt_time(19, 0, 0),
        'periphery': 444,
        'game_map': 'some_map2',
        'pillage_cooldown': None,
        'pillage_end_datetime': None,
        'turns_owned': 12,
        'arena_id': 5
    }])
    def get_clan_provinces(self, clan_id, fields=None):
        """
        return fake data from `clan_provinces` section
        """
        return self._request_data('clan_provinces', clan_id, fields=fields)

    @fake_method(
        example={
            'battles_lost': 12,
            'influence_points': 121,
            'provinces_captured': 23,
            'provinces_count': 234,
            'battles_played': 332,
            'battles_won': 232,
            'battles_played_on_6_level': 21,
            'battles_won_on_6_level': 12,
            'battles_played_on_8_level': 32,
            'battles_won_on_8_level': 21,
            'battles_played_on_10_level': 43,
            'battles_won_on_10_level': 23
        })
    def get_clan_globalmap_stats(self, clan_id, fields=None):
        """
        return fake data from `clan_globalmap_stats` section
        """
        return self._request_data('clan_globalmap_stats',
                                  clan_id,
                                  fields=fields)

    @fake_method(example=[{
        'front_name': 'front_name',
        'front_name_localized': 'front_name_localized',
        'min_vehicle_level': 2,
        'max_vehicle_level': 4
    }])
    def get_fronts_info(self, front_names=None, fields=None):
        """
        return fake data from `fronts_info` section
        """
        return self._request_data('fronts_info', front_names, fields=fields)

    @fake_method(
        example={
            'defence_mode_is_activated':
            True,
            'defence_hour':
            dt_time(10, 0),
            'sortie_battles_count':
            23,
            'sortie_wins':
            12,
            'sortie_losses':
            19,
            'sortie_fort_resource_in_absolute':
            100,
            'sortie_fort_resource_in_champion':
            71,
            'sortie_fort_resource_in_middle':
            60,
            'defence_battles_count':
            234,
            'defence_combat_wins':
            21,
            'sortie_middle_battles_count':
            12,
            'sortie_champion_battles_count':
            32,
            'sortie_absolute_battles_count':
            23,
            'defence_enemy_base_capture_count':
            43,
            'defence_capture_enemy_building_total_count':
            55,
            'defence_loss_own_building_total_count':
            65,
            'defence_attack_efficiency':
            23.2,
            'defence_success_attack_count':
            122,
            'defence_attack_count':
            13,
            'defence_defence_efficiency':
            32.2,
            'defence_defence_count':
            24,
            'defence_success_defence_count':
            5,
            'total_resource_amount':
            321,
            'defence_resource_loss_count':
            112,
            'defence_resource_capture_count':
            322,
            'fb_battles_count_8':
            23,
            'fb_battles_count_10':
            12,
            'level':
            2,
            'buildings': [{
                'type': 1,
                'direction': 0,
                'level': 2,
                'position': 2
            }, {
                'type': 2,
                'direction': 1,
                'level': 3,
                'position': 2
            }]
        })
    def get_stronghold_info(self, clan_id, fields=None):
        """
        return fake data from `stronghold_info` section
        """
        return self._request_data('stronghold_info', clan_id, fields=fields)

    @fake_method(
        example={
            'buildings_count':
            4,
            'directions_count':
            3,
            'buildings': [{
                'type': 1,
                'hp': 32,
                'storage': 123,
                'level': 4,
                'position': 7,
                'direction': 1
            }],
            'directions': [1, 2],
            'off_day':
            3,
            'vacation_start':
            datetime.utcnow() + timedelta(days=1),
            'vacation_finish':
            datetime.utcnow() + timedelta(days=4),
            'periphery_id':
            333,
            'clan_tag':
            'tag',
            'clan_name':
            'some_name',
            'clan_id':
            21,
            'level':
            2,
            'sortie_wins_period':
            7,
            'sortie_battles_wins_percentage_period':
            20.0,
            'sortie_battles_count_period':
            122,
            'defence_battles_count_period':
            21
        })
    def get_strongholds_statistics(self, clan_id, fields=None):
        """
        return fake data from `strongholds_statistics` section
        """
        return self._request_data('strongholds_statistics',
                                  clan_id,
                                  fields=fields)

    @fake_method(example={'clan_id': 234, 'defence_hour': dt_time(10, 0)})
    def get_strongholds_state(self, clan_id, fields=None):
        """
        return fake data from `strongholds_state` section
        """
        return self._request_data('strongholds_state', clan_id, fields=fields)

    @fake_method(example=[{'clan_id': 234, 'account_id': 3, 'id': 23}])
    def create_invites(self, clan_id, account_ids, comment, fields=None):
        """
        return fake data from `create_invites` section
        """
        return self._request_data('create_invites', (clan_id, account_ids),
                                  fields=fields)

    @fake_method(example=[{'clan_id': 224, 'account_id': 3, 'id': 123}])
    def create_applications(self, clan_ids, comment, fields=None):
        """
        return fake data from `create_applications` section
        """
        return self._request_data('create_applications',
                                  clan_ids,
                                  fields=fields)

    @fake_method(example=lambda obj_id: {
        'transaction_id': 213,
        'id': obj_id,
        'account_id': 343,
        'clan_id': 17
    })
    def accept_application(self, application_id, fields=None):
        """
        return fake data from `accept_application` section
        """
        return self._request_data('accept_application',
                                  application_id,
                                  fields=fields)

    @fake_method(example=lambda obj_id: {
        'id': obj_id,
        'account_id': 343,
        'clan_id': 17
    })
    def decline_application(self, application_id, fields=None):
        """
        return fake data from `decline_application` section
        """
        return self._request_data('decline_application',
                                  application_id,
                                  fields=fields)

    @fake_method(example=lambda obj_id: {
        'transaction_id': 213,
        'id': obj_id,
        'account_id': 343,
        'clan_id': 17
    })
    def accept_invite(self, invite_id, fields=None):
        """
        return fake data from `accept_invite` section
        """
        return self._request_data('accept_invite', invite_id, fields=fields)

    @fake_method(example=lambda obj_id: {
        'id': obj_id,
        'account_id': 343,
        'clan_id': 17
    })
    def decline_invite(self, invite_id, fields=None):
        """
        return fake data from `decline_invite` section
        """
        return self._request_data('decline_invite', invite_id, fields=fields)

    @fake_method(example=[{
        'id': 991,
        'account_id': 1001,
        'clan_id': 19
    }, {
        'id': 992,
        'account_id': 1001,
        'clan_id': 19
    }, {
        'id': 993,
        'account_id': 1001,
        'clan_id': 19
    }])
    def bulk_decline_invites(self, invite_ids):
        """
        return fake data from `bulk_decline_invites` section
        """
        return self._request_data('bulk_decline_invites', invite_ids)

    @fake_method(
        example={
            'permissions': {
                'manage_reserves': [
                    'commander', 'combat_officer', 'executive_officer',
                    'personnel_officer'
                ]
            },
            'time_to_ready':
            900,
            'max_level':
            10,
            'battle_series_duration':
            3600,
            'enemy_clan':
            None,
            'industrial_resource_multiplier':
            1,
            'max_players_count':
            15,
            'type':
            'FORT_BATTLE',
            'max_legionaries_count':
            0,
            'available_reserves': {
                'ARTILLERY_STRIKE': [],
                'HIGH_CAPACITY_TRANSPORT': [],
                'REQUISITION': [],
                'AIRSTRIKE': []
            },
            'direction':
            'A',
            'min_players_count':
            1,
            'matchmaker_next_tick':
            1475578800,
            'battle_series_status': [{
                'battle_reward': 0,
                'gameplay_id': 0,
                'geometry_id': 6,
                'first_resp_clan_id': None,
                'second_resp_clan_id': None,
                'attacker': None,
                'clan_owner_id': 14000012972L,
                'current_battle': False,
                'map_id': 6
            }, {
                'battle_reward': 0,
                'gameplay_id': 0,
                'geometry_id': 14,
                'first_resp_clan_id': None,
                'second_resp_clan_id': None,
                'attacker': None,
                'clan_owner_id': 14000012972L,
                'current_battle': False,
                'map_id': 14
            }, {
                'battle_reward': 0,
                'gameplay_id': 0,
                'geometry_id': 20,
                'first_resp_clan_id': None,
                'second_resp_clan_id': None,
                'attacker': None,
                'clan_owner_id': 14000012972L,
                'current_battle': False,
                'map_id': 20
            }],
            'battle_duration':
            600,
            'requisition_bonus_percent':
            None,
            'public':
            False,
            'selected_reserves': [None, None, None],
            'min_level':
            1
        })
Пример #57
0
            )
        elif after_entity.attributes.get(
                ATTR_DEVICE_CLASS
        ) == SensorDeviceClass.TIMESTAMP and after_entity.state not in (
                STATE_UNAVAILABLE,
                STATE_UNKNOWN,
        ):
            after_datetime = dt_util.parse_datetime(after_entity.state)
            if after_datetime is None:
                return False
            after = dt_util.as_local(after_datetime).time()
        else:
            return False

    if before is None:
        before = dt_time(23, 59, 59, 999999)
    elif isinstance(before, str):
        if not (before_entity := hass.states.get(before)):
            raise ConditionErrorMessage("time",
                                        f"unknown 'before' entity {before}")
        if before_entity.domain == "input_datetime":
            before = dt_time(
                before_entity.attributes.get("hour", 23),
                before_entity.attributes.get("minute", 59),
                before_entity.attributes.get("second", 59),
            )
        elif before_entity.attributes.get(
                ATTR_DEVICE_CLASS
        ) == SensorDeviceClass.TIMESTAMP and before_entity.state not in (
                STATE_UNAVAILABLE,
                STATE_UNKNOWN,
Пример #58
0
    def _handler(self, request, response):
        init_process_logger('log.txt')
        response.outputs['output_log'].file = 'log.txt'

        LOGGER.info('Start process')
        response.update_status('execution started at : {}'.format(dt.now()), 5)

        process_start_time = time.time()  # measure process execution time ...
        start_time = time.time()  # measure init ...

        ################################
        # reading in the input arguments
        ################################

        # response.update_status('execution started at : %s ' % dt.now(), 5)
        # start_time = time.time()  # measure init ...

        ################################
        # reading in the input arguments
        ################################

        try:
            response.update_status('read input parameter : %s ' % dt.now(), 10)
            resource = archiveextract(resource=rename_complexinputs(request.inputs['resource']))
            refSt = request.inputs['refSt'][0].data
            refEn = request.inputs['refEn'][0].data
            dateSt = request.inputs['dateSt'][0].data
            dateEn = request.inputs['dateEn'][0].data
            seasonwin = request.inputs['seasonwin'][0].data
            nanalog = request.inputs['nanalog'][0].data

            bboxDef = '-20,40,30,70'  # in general format
            # level = 500

            level = request.inputs['level'][0].data
            if (level == 500):
                dummylevel = 1000  # dummy workaround for cdo sellevel
            else:
                dummylevel = 500
            LOGGER.debug('LEVEL selected: %s hPa' % (level))

            bbox = []
            bboxStr = request.inputs['BBox'][0].data
            LOGGER.debug('BBOX selected by user: %s ' % (bboxStr))
            bboxStr = bboxStr.split(',')

            # Checking for wrong cordinates and apply default if nesessary
            if (abs(float(bboxStr[0])) > 180 or
                    abs(float(bboxStr[1]) > 180) or
                    abs(float(bboxStr[2]) > 90) or
                    abs(float(bboxStr[3])) > 90):
                bboxStr = bboxDef  # request.inputs['BBox'].default  # .default doesn't work anymore!!!
                LOGGER.debug('BBOX is out of the range, using default instead: %s ' % (bboxStr))
                bboxStr = bboxStr.split(',')

            # for i in bboxStr: bbox.append(int(i))
            bbox.append(float(bboxStr[0]))
            bbox.append(float(bboxStr[2]))
            bbox.append(float(bboxStr[1]))
            bbox.append(float(bboxStr[3]))
            LOGGER.debug('BBOX for ocgis: %s ' % (bbox))
            LOGGER.debug('BBOX original: %s ' % (bboxStr))

            normalize = request.inputs['normalize'][0].data
            plot = request.inputs['plot'][0].data
            distance = request.inputs['dist'][0].data
            outformat = request.inputs['outformat'][0].data
            timewin = request.inputs['timewin'][0].data
            detrend = request.inputs['detrend'][0].data

            LOGGER.info('input parameters set')
            response.update_status('Read in and convert the arguments', 20)
        except Exception as e:
            msg = 'failed to read input prameter %s ' % e
            LOGGER.error(msg)
            raise Exception(msg)

        ######################################
        # convert types and set environment
        ######################################
        try:

            # not nesessary if fix ocgis_module.py
            refSt = dt.combine(refSt, dt_time(12, 0))
            refEn = dt.combine(refEn, dt_time(12, 0))
            dateSt = dt.combine(dateSt, dt_time(12, 0))
            dateEn = dt.combine(dateEn, dt_time(12, 0))

            # Check if 360_day calendar:
            try:
                if type(resource) is not list:
                    resource = [resource]

                modcal, calunits = get_calendar(resource[0])
                if '360_day' in modcal:
                    if refSt.day == 31:
                        refSt = refSt.replace(day=30)
                        LOGGER.debug('Date has been changed for: %s' % (refSt))
                    if refEn.day == 31:
                        refEn = refEn.replace(day=30)
                        LOGGER.debug('Date has been changed for: %s' % (refEn))
                    if dateSt.day == 31:
                        dateSt = dateSt.replace(day=30)
                        LOGGER.debug('Date has been changed for: %s' % (dateSt))
                    if dateEn.day == 31:
                        dateEn = dateEn.replace(day=30)
                        LOGGER.debug('Date has been changed for: %s' % (dateEn))
            except:
                LOGGER.debug('Could not detect calendar')

            if normalize == 'None':
                seacyc = False
            else:
                seacyc = True

            if outformat == 'ascii':
                outformat = '.txt'
            elif outformat == 'netCDF':
                outformat = '.nc'
            else:
                LOGGER.error('output format not valid')

            start = min(refSt, dateSt)
            end = max(refEn, dateEn)

            LOGGER.info('environment set')
        except Exception as e:
            msg = 'failed to set environment %s ' % e
            LOGGER.error(msg)
            raise Exception(msg)

        LOGGER.debug("init took %s seconds.", time.time() - start_time)
        response.update_status('Read in and convert the arguments', 30)

        ########################
        # input data preperation
        ########################

        # TODO: Check if files containing more than one dataset

        response.update_status('Start preparing input data', 40)
        start_time = time.time()  # mesure data preperation ...
        try:
            # TODO: Add selection of the level. maybe bellow in call(..., level_range=[...,...])

            if type(resource) == list:
                # resource.sort()
                resource = sorted(resource, key=lambda i: path.splitext(path.basename(i))[0])
            else:
                resource = [resource]

            # ===============================================================
            # REMOVE resources which are out of interest from the list
            # (years > and < than requested for calculation)

            tmp_resource = []

            for re in resource:
                s,e = get_timerange(re)
                tmpSt = dt.strptime(s, '%Y%m%d')
                tmpEn = dt.strptime(e, '%Y%m%d')
                if ((tmpSt <= end) and (tmpEn >= start)):
                    tmp_resource.append(re)
                    LOGGER.debug('Selected file: %s ' % (re))
            resource = tmp_resource

            # Try to fix memory issue... (ocgis call for files like 20-30 gb... )
            # IF 4D - select pressure level before domain cut
            #
            # resource properties
            ds = Dataset(resource[0])
            variable = get_variable(resource[0])
            var = ds.variables[variable]
            dims = list(var.dimensions)
            dimlen = len(dims)

            try:
                model_id = ds.getncattr('model_id')
            except AttributeError:
                model_id = 'Unknown model'

            LOGGER.debug('MODEL: %s ' % (model_id))

            lev_units = 'hPa'

            if (dimlen > 3):
                lev = ds.variables[dims[1]]
                # actually index [1] need to be detected... assuming zg(time, plev, lat, lon)
                lev_units = lev.units

                if (lev_units == 'Pa'):
                    level = level * 100
                    dummylevel = dummylevel * 100
                    # TODO: OR check the NAME and units of vertical level and find 200 , 300, or 500 mbar in it
                    # Not just level = level * 100.

            # Get Levels
            from cdo import Cdo
            cdo = Cdo(env=environ)

            lev_res = []
            if(dimlen > 3):
                for res_fn in resource:
                    tmp_f = 'lev_' + path.basename(res_fn)
                    try:
                        tmp_f = call(resource=res_fn, variable=variable, spatial_wrapping='wrap',
                                     level_range=[int(level), int(level)], prefix=tmp_f[0:-3])
                    except:
                        comcdo = '%s,%s' % (level, dummylevel)
                        cdo.sellevel(comcdo, input=res_fn, output=tmp_f)
                    lev_res.append(tmp_f)
            else:
                lev_res = resource

            # ===============================================================
            # TODO: Before domain, Regrid to selected grid! (???) if no rean.
            # ================================================================

            # Get domain
            regr_res = []
            for res_fn in lev_res:
                tmp_f = 'dom_' + path.basename(res_fn)
                comcdo = '%s,%s,%s,%s' % (bbox[0], bbox[2], bbox[1], bbox[3])
                try:
                    tmp_f = call(resource=res_fn, geom=bbox, spatial_wrapping='wrap', prefix=tmp_f[0:-3])
                except:
                    cdo.sellonlatbox(comcdo, input=res_fn, output=tmp_f)
                regr_res.append(tmp_f)

            # ============================
            # Block to Detrend data
            # TODO 1 Keep trend as separate file
            # TODO 2 Think how to add options to plot abomalies AND original data...
            #        May be do archive and simulation = call.. over NOT detrended data and keep it as well
            if (dimlen > 3):
                res_tmp = get_level(regr_res, level=level)
                variable = 'z%s' % level
            else:
                res_tmp = call(resource=regr_res, spatial_wrapping='wrap')

            if detrend == 'None':
                orig_model_subset = res_tmp
            else:
                orig_model_subset = remove_mean_trend(res_tmp, varname=variable)

            # ============================

#            archive_tmp = call(resource=regr_res, time_range=[refSt, refEn], spatial_wrapping='wrap')
#            simulation_tmp = call(resource=regr_res, time_range=[dateSt, dateEn], spatial_wrapping='wrap')

            ################################
            # Prepare names for config.txt #
            ################################

            # refDatesString = dt.strftime(refSt, '%Y-%m-%d') + "_" + dt.strftime(refEn, '%Y-%m-%d')
            # simDatesString = dt.strftime(dateSt, '%Y-%m-%d') + "_" + dt.strftime(dateEn, '%Y-%m-%d')

            # Fix < 1900 issue...
            refDatesString = refSt.isoformat().strip().split("T")[0] + "_" + refEn.isoformat().strip().split("T")[0]
            simDatesString = dateSt.isoformat().strip().split("T")[0] + "_" + dateEn.isoformat().strip().split("T")[0]

            archiveNameString = "base_" + variable + "_" + refDatesString + '_%.1f_%.1f_%.1f_%.1f' \
                                % (bbox[0], bbox[2], bbox[1], bbox[3])
            simNameString = "sim_" + variable + "_" + simDatesString + '_%.1f_%.1f_%.1f_%.1f' \
                            % (bbox[0], bbox[2], bbox[1], bbox[3])

            archive = call(resource=res_tmp, time_range=[refSt, refEn], spatial_wrapping='wrap', prefix=archiveNameString)
            simulation = call(resource=res_tmp, time_range=[dateSt, dateEn], spatial_wrapping='wrap', prefix=simNameString)

            #######################################################################################

            if seacyc is True:
                seasoncyc_base, seasoncyc_sim = analogs.seacyc(archive, simulation, method=normalize)
            else:
                seasoncyc_base = None
                seasoncyc_sim = None
        except Exception as e:
            msg = 'failed to prepare archive and simulation files %s ' % e
            LOGGER.debug(msg)
            raise Exception(msg)
        ip, output = mkstemp(dir='.', suffix='.txt')
        output_file = path.abspath(output)
        files = [path.abspath(archive), path.abspath(simulation), output_file]

        LOGGER.debug("data preperation took %s seconds.", time.time() - start_time)

        ############################
        # generating the config file
        ############################

        # TODO: add MODEL name as argument

        response.update_status('writing config file', 50)
        start_time = time.time()  # measure write config ...

        try:
            config_file = analogs.get_configfile(
                files=files,
                seasoncyc_base=seasoncyc_base,
                seasoncyc_sim=seasoncyc_sim,
                base_id=model_id,
                sim_id=model_id,
                timewin=timewin,
                varname=variable,
                seacyc=seacyc,
                cycsmooth=91,
                nanalog=nanalog,
                seasonwin=seasonwin,
                distfun=distance,
                outformat=outformat,
                calccor=True,
                silent=False,
                # period=[dt.strftime(refSt, '%Y-%m-%d'), dt.strftime(refEn, '%Y-%m-%d')],
                period=[refSt.isoformat().strip().split("T")[0], refEn.isoformat().strip().split("T")[0]],
                bbox="%s,%s,%s,%s" % (bbox[0], bbox[2], bbox[1], bbox[3]))
        except Exception as e:
            msg = 'failed to generate config file %s ' % e
            LOGGER.debug(msg)
            raise Exception(msg)

        LOGGER.debug("write_config took %s seconds.", time.time() - start_time)

        ##############
        # CASTf90 call
        ##############
        import subprocess
        import shlex

        start_time = time.time()  # measure call castf90
        response.update_status('Start CASTf90 call', 60)

        # -----------------------
        try:
            import ctypes
            # TODO: This lib is for linux
            mkl_rt = ctypes.CDLL('libmkl_rt.so')
            nth = mkl_rt.mkl_get_max_threads()
            LOGGER.debug('Current number of threads: %s' % (nth))
            mkl_rt.mkl_set_num_threads(ctypes.byref(ctypes.c_int(64)))
            nth = mkl_rt.mkl_get_max_threads()
            LOGGER.debug('NEW number of threads: %s' % (nth))
            # TODO: Does it \/\/\/ work with default shell=False in subprocess... (?)
            environ['MKL_NUM_THREADS'] = str(nth)
            environ['OMP_NUM_THREADS'] = str(nth)
        except Exception as e:
            msg = 'Failed to set THREADS %s ' % e
            LOGGER.debug(msg)
        # -----------------------

        # ##### TEMPORAL WORKAROUND! With instaled hdf5-1.8.18 in anaconda ###############
        # ##### MUST be removed after castf90 recompiled with the latest hdf version
        # ##### NOT safe
        environ['HDF5_DISABLE_VERSION_CHECK'] = '1'
        # hdflib = os.path.expanduser("~") + '/anaconda/lib'
        # hdflib = os.getenv("HOME") + '/anaconda/lib'
        import pwd
        hdflib = pwd.getpwuid(getuid()).pw_dir + '/anaconda/lib'
        environ['LD_LIBRARY_PATH'] = hdflib
        # ################################################################################

        try:
            # response.update_status('execution of CASTf90', 50)
            cmd = 'analogue.out %s' % path.relpath(config_file)
            # system(cmd)
            args = shlex.split(cmd)
            output, error = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
            LOGGER.info('analogue.out info:\n %s ' % output)
            LOGGER.debug('analogue.out errors:\n %s ' % error)
            response.update_status('**** CASTf90 suceeded', 70)
        except Exception as e:
            msg = 'CASTf90 failed %s ' % e
            LOGGER.error(msg)
            raise Exception(msg)

        LOGGER.debug("castf90 took %s seconds.", time.time() - start_time)

        # TODO: Add try - except for pdfs
        if plot == 'Yes':
            analogs_pdf = analogs.plot_analogs(configfile=config_file)
        else:
            analogs_pdf = 'dummy_plot.pdf'
            with open(analogs_pdf, 'a'): utime(analogs_pdf, None)

        response.update_status('preparing output', 80)

        response.outputs['analog_pdf'].file = analogs_pdf
        response.outputs['config'].file = config_file
        response.outputs['analogs'].file = output_file
        response.outputs['output_netcdf'].file = simulation
        response.outputs['target_netcdf'].file = archive

        if seacyc is True:
            response.outputs['base_netcdf'].file = seasoncyc_base
            response.outputs['sim_netcdf'].file = seasoncyc_sim
        else:
            # TODO: Still unclear how to overpass unknown number of outputs
            dummy_base = 'dummy_base.nc'
            dummy_sim = 'dummy_sim.nc'
            with open(dummy_base, 'a'): utime(dummy_base, None)
            with open(dummy_sim, 'a'): utime(dummy_sim, None)
            response.outputs['base_netcdf'].file = dummy_base
            response.outputs['sim_netcdf'].file = dummy_sim

        ########################
        # generate analog viewer
        ########################

        formated_analogs_file = analogs.reformat_analogs(output_file)
        # response.outputs['formated_analogs'].storage = FileStorage()
        response.outputs['formated_analogs'].file = formated_analogs_file
        LOGGER.info('analogs reformated')
        response.update_status('reformatted analog file', 90)

        viewer_html = analogs.render_viewer(
            # configfile=response.outputs['config'].get_url(),
            configfile=config_file,
            # datafile=response.outputs['formated_analogs'].get_url())
            datafile=formated_analogs_file)
        response.outputs['output'].file = viewer_html
        response.update_status('Successfully generated analogs viewer', 95)
        LOGGER.info('rendered pages: %s ', viewer_html)

        response.update_status('execution ended', 100)
        LOGGER.debug("total execution took %s seconds.",
                     time.time() - process_start_time)
        return response