def test_can_format_negative_offsets(self): assert_equals('-00:30', timezone_offset_to_string(TimeDelta(minutes=-30))) assert_equals('-01:30', timezone_offset_to_string(TimeDelta(minutes=-90))) assert_equals('-14:00', timezone_offset_to_string(TimeDelta(hours=-14)))
def schedule_resource(resource): try: latest_schedule = ResourceSchedule.objects.filter(resource=resource) latest_schedule = latest_schedule.latest('id') except ResourceSchedule.DoesNotExist: latest_schedule = None new_schedule = ResourceSchedule() new_schedule.resource = resource new_schedule.execution_status = ResourceSchedule.STATUS_SCHEDULED if latest_schedule: if latest_schedule.execution_status == ResourceSchedule.STATUS_FINISHED: if resource.schedule_type == Resource.TYPE_DAY: date_time = latest_schedule.schedule_date_time + TimeDelta(days=1) elif resource.schedule_type == Resource.TYPE_WEEK: date_time = latest_schedule.schedule_date_time + TimeDelta(weeks=1) elif resource.schedule_type == Resource.TYPE_MONTH: date_time = latest_schedule.schedule_date_time + TimeDelta(weeks=4) elif resource.schedule_type == Resource.TYPE_YEAR: date_time = latest_schedule.schedule_date_time + TimeDelta(weeks=48) new_schedule.schedule_date_time = date_time new_schedule.save() else: new_schedule.schedule_date_time = resource.schedule_date_time new_schedule.save() return new_schedule
def process_trip(proxy, scraped_csv_file, params): formated_params = { 'flight_from': params[0], 'flight_to': params[1], 'day_from': params[2], 'day_to': params[3], 'date_from': (DateTime.today() + TimeDelta(days=params[2])).strftime('%d/%m/%Y'), 'date_to': (DateTime.today() + TimeDelta(days=params[3])).strftime('%d/%m/%Y'), } try: browser = get_browser(proxy) browser = search(browser, formated_params) check_result(browser) scraped_data = parse_result(browser, formated_params) save_result(scraped_data, scraped_csv_file) close_browser(browser) print(["SUCCESS", proxy, scraped_csv_file, formated_params]) except: print("process_trip except") close_browser(browser) print(["FAILLED", proxy, scraped_csv_file, formated_params])
def get_day_trade_minute_line(product, date): """ 返回一天内指定合约交易时段内所有分钟计算点 最后一分钟没有交易 :param symbol: :param date: DateTime :return: """ entries = get_trade_timetable_template(product) result = [] for w in range(len(entries)): s, e = entries[w][:2] dts = DateTime(date.year, date.month, date.day, s.hour, s.minute, s.second) dte = DateTime(date.year, date.month, date.day, e.hour, e.minute, e.second) # - TimeDelta(minutes=1) if len(entries[w]) == 3 and entries[w][-1].count('-'): # 跨天 dts += TimeDelta(days=1) dte += TimeDelta(days=1) else: if dte < dts: # 跨天的行情 dte = dte + TimeDelta(days=1) dte -= TimeDelta(minutes=1) # 不包括收尾分钟 mins = list(rrule(MINUTELY, interval=1, dtstart=dts, until=dte)) result += mins return result
def test_fails_if_date_is_older_than_max_delta(self): a_few_seconds_ago = DateTime.now() - TimeDelta(seconds=30) self.assert_fail(a_few_seconds_ago) one_minute = TimeDelta(minutes=1) assert_almost_now(a_few_seconds_ago, max_delta=one_minute) yesterday = DateTime.now() - TimeDelta(days=1) self.assert_fail(yesterday, max_delta=one_minute)
def test_current_community_has_highest_priority(self): other_attrs = dict(deactivated=False, ctype=Community.TYPE_PUBLIC_PAGE) dt = timezone.now() communities = [ Community.objects.create(vkid=1, followers=0, wall_checked_at=dt + TimeDelta(hours=1), **other_attrs), Community.objects.create(vkid=2, followers=20, wall_checked_at=dt, **other_attrs), Community.objects.create(vkid=3, followers=10, wall_checked_at=dt + TimeDelta(hours=2), **other_attrs), ] wu = WallUpdater(None) wu._load_accessible_communities(len(communities)) self.assertEqual(wu._current_community().vkid, 2)
def test_update_time_is_more_important_than_followers_num(self): other_attrs = dict(deactivated=False, ctype=Community.TYPE_PUBLIC_PAGE) dt = timezone.now() communities = [ Community(vkid=1, followers=0, wall_checked_at=dt + TimeDelta(hours=1), **other_attrs), Community(vkid=2, followers=20, wall_checked_at=dt, **other_attrs), Community(vkid=3, followers=10, wall_checked_at=dt + TimeDelta(hours=2), **other_attrs), ] self.assertEqual( [c.vkid for c in sorted(communities, key=WallUpdater._priority_of_community)], [3, 1, 2] )
def test_write_log_new_day_set_log_name(self): """ test method: tests of file logger constructor """ m_open = mock_open() # patch target = 'package.module.ClassName' with patch('builtins.open', m_open): sut = logger.FileLogger('') current_log_name = sut.log_name d = Date.today() - TimeDelta(days=1) sut.set_log_name(Date.today() - TimeDelta(days=1)) self.assertNotEquals(current_log_name, sut.log_name) sensor = logger.Sensor('key1', 'Door', 'somewhere', 12, 'R', 0, '06:00:00', '17:00:00', ['closed', 'open'], None, sut, None) sut.write_log(sensor.log(1)) m_open.assert_called_with(current_log_name, 'a')
def dateTimesEqualish(a: DateTime, b: DateTime) -> bool: """ Compare two :class:`DateTimes`. Because floating point math, apply some "close enough" logic to deal with the fact that floats stored in SQLite may be slightly off when retrieved. """ return a - b < TimeDelta(microseconds=20)
def data_clear_days(symbol,start,end='',readonly=False): """ 删除指定时间范围内非交易时间段的tick数据 """ import logging logger = logging.getLogger() if isinstance(start,str): start = parse(start) if not end: end = start else: end = parse(end) end = end + TimeDelta(days=1) conn = mongodb_conn coll = conn[CTP_TICK_DB][symbol] spaces = get_trade_timetable_template(symbol) # print spaces f = open('data_clear_{}.txt'.format(str(start.date())),'w') if readonly: rs = coll.find({'datetime': {'$gte':start,'$lt':end}}) else: rs = coll.find({'datetime': {'$gte':start,'$lt':end},'flag':0}) for r in rs: if not time_work_right_short(spaces,r['datetime']): # coll.delete_one({'_id':r['_id']}) if not readonly: coll.update_one(filter={'_id':r['_id']},update={'$set':{'flag':1}}) # print 'Removed Record:',str(r['datetime']) logger.debug( 'Removed Record:'+ r['symbol'] +' '+ str(r['datetime']) ) f.write("{} {} {}\n".format(r['symbol'],str(r['datetime']),str(r))) f.close()
def whenNightClose(self): # 夜盘结束 凌晨 2: 35 开始运行 # 计算所有合约 的 前一天的分钟k线(不包括日线) self.log_print("Start K-line-Min Making..") date = datetime.now() - TimeDelta(days=-1) self.make_kline_min(str(date.date())) self.log_print("End K-line-Min Made.")
def _get_new_posts(self): comm = self._current_community() while True: try: self._check_time = timezone.now() wall_data = self._vkapi.get_community_wall(comm.vkid) break except TryAgain: self._sleep(1) if comm.wall_checked_at is not None: planned_check_time = comm.wall_checked_at + TimeDelta( seconds=WALL_UPDATE_PERIOD) if self._check_time > planned_check_time: logger.warning( 'updating the community(id=%s) is %.2f seconds late', comm.vkid, (self._check_time - planned_check_time).total_seconds()) posts = [] if wall_data is None: logger.warning('cannot get the wall of the community(id=%s)', comm.vkid) else: logger.info('got %s posts for the community(id=%s)', len(wall_data), comm.vkid) for post_data in wall_data: try: post = self._parse_post(post_data) posts.append(post) except VkApiParsingError as err: logger.error('community(id=%s) post(id=%s): %s', comm.vkid, post_data.get('id'), repr(err)) return posts
def get_communities(self, ids): if len(ids) > COMMUNITIES_PER_REQUEST: raise ValueError('too many ids = {0} (max=500)'.format(len(ids))) with self._lock: token = min(self._tokens, key=lambda t: t.last_used) elapsed = (timezone.now() - token.last_used).total_seconds() delay = max(0, REQUEST_DELAY_PER_TOKEN - elapsed) token.last_used = timezone.now() + TimeDelta(seconds=delay) time.sleep(delay) response = self._request( 'groups.getById', group_ids=','.join(str(id_) for id_ in ids), fields= 'type,is_closed,verified,age_limits,name,description,members_count,status', access_token=token.key, v='5.74') communities = response.get('response') if communities is None: err = VkApiResponseError.from_response(response) logger.warning('%s, token=%s', repr(err), token.key) raise TryAgain() return communities
def fromSignHoursMinutes(cls, sign, hours, minutes): """ Construct a L{FixedOffsetTimeZone} from an offset described by sign ('+' or '-'), hours, and minutes. @note: For protocol compatibility with AMP, this method never uses 'Z' @param sign: A string describing the positive or negative-ness of the offset. @param hours: The number of hours in the offset. @type hours: L{int} @param minutes: The number of minutes in the offset @type minutes: L{int} @return: A time zone with the given offset, and a name describing the offset. @rtype: L{FixedOffsetTimeZone} """ name = "%s%02i:%02i" % (sign, hours, minutes) if sign == "-": hours = -hours minutes = -minutes elif sign != "+": raise ValueError("Invalid sign for timezone %r" % (sign, )) return cls(TimeDelta(hours=hours, minutes=minutes), name)
def __init__(self, frequency=None, initial_state=False, count=None): self.hz = frequency self.frequency = TimeDelta(seconds=1.0/frequency) self.state = initial_state # Output(initial_state) self.count = count super(Oscillator, self).__init__()
def get_timespace_of_trade_day(date, product='M'): """ 根据指定的日期date ,返回此交易日的行情数据时间段(开始-结束) date的前一个交易日的夜盘开始 , 需要搜索前一个交易日的夜盘开始到本date交易日的下午收盘结束 20:59(-1) - 15:30 之前请务必清洗掉非正常行情交易tick :return [start,end) (2019-5-10 21:00 , 2019-5-11 15:30) """ result = () if isinstance(date, str): date = parse(date) date = date.date() days = product_trade_days.get(product, {}) sorted_days = days.keys() idx = sorted_days.index(date) if idx == -1: # 无效的交易日 getLogger().error('date: {} is not defined in trade_days.txt'.format( str(date))) return () start = date - TimeDelta(days=1) # 默认前一天为前一个交易日 if idx != 0: # 如果不是第一条日期记录则找前一条 idx -= 1 start = sorted_days[idx] # 规定日线从前一个交易日的夜盘开始,到交易日(date)的下午收盘时间为止 # start = DateTime.combine(start,Time(20,59,0,0)) start = DateTime.combine(start, Time(21, 0, 0, 0)) date = DateTime.combine(date, Time(15, 30, 0, 0)) # start.replace(hour=20,minute=59,second=0,microsecond=0) # date.replace(hour=15,minute=30,second=0,microsecond=0) # return (start, date)
def _get_tokens(grant_type, auth_token): global access_token global refresh_token global credential_expiration request_headers = { "Authorization": "Basic {}".format(encoded_id_secret), "Content-Type": "application/x-www-form-urlencoded" } request_body = {"grant_type": grant_type} if grant_type == "authorization_code": request_body["code"] = auth_token request_body["redirect_uri"] = config.SPOTIFY_REDIRECT_URI request_body["client_id"] = config.SPOTIFY_CLIENT_ID elif grant_type == "refresh_token": request_body["refresh_token"] = auth_token response = requests.post("https://accounts.spotify.com/api/token", data=request_body, headers=request_headers) response_data = json.loads(response.text) access_token = response_data["access_token"] if "refresh_token" in response_data.keys(): refresh_token = response_data["refresh_token"] credential_expiration = DateTime.utcnow() + TimeDelta( seconds=response_data["expires_in"]) sync_authfile()
def _calc_day_night_alt_intervals(): twenty_four_hours = TimeDelta(hours=24) intervals = [] extra_length = PatientDataFrame._ALT_FIRST_DAY_START_TIME - PatientDataFrame._START_TIME intervals.append((PatientDataFrameKey.EXTRA_TIME.as_string(), int(extra_length / PatientDataFrame._TIME_INTERVAL))) first_day_length = (PatientDataFrame._ALT_FIRST_DAY_FINISH_TIME - PatientDataFrame._ALT_FIRST_DAY_START_TIME) intervals.append( (PatientDataFrameKey.DAY_TIME.as_string(), int(first_day_length / PatientDataFrame._TIME_INTERVAL))) extra_length = (PatientDataFrame._ALT_FIRST_NIGHT_START_TIME - PatientDataFrame._ALT_FIRST_DAY_FINISH_TIME) intervals.append((PatientDataFrameKey.EXTRA_TIME.as_string(), int(extra_length / PatientDataFrame._TIME_INTERVAL))) first_night_length = (PatientDataFrame._ALT_FIRST_NIGHT_END_TIME - PatientDataFrame._ALT_FIRST_NIGHT_START_TIME) intervals.append( (PatientDataFrameKey.NIGHT_TIME.as_string(), int(first_night_length / PatientDataFrame._TIME_INTERVAL))) sec_day_start_time = PatientDataFrame._ALT_FIRST_DAY_START_TIME + twenty_four_hours extra_length = sec_day_start_time - PatientDataFrame._ALT_FIRST_NIGHT_END_TIME intervals.append((PatientDataFrameKey.EXTRA_TIME.as_string(), int(extra_length / PatientDataFrame._TIME_INTERVAL))) sec_day_length = PatientDataFrame._FINISH_TIME - sec_day_start_time intervals.append( (PatientDataFrameKey.DAY_TIME.as_string(), int(sec_day_length / PatientDataFrame._TIME_INTERVAL))) return intervals
def rewind_time(a: Time, b: Union[Time, TimeDelta]) -> Time: a_ = DateTime.combine(Date.today(), a) if isinstance(b, Time): b = TimeDelta(minutes=b.minute) return (a_ - b).time()
def dateTimes(beforeNow: bool = False, fromNow: bool = False) -> SearchStrategy: # DateTime assert not (beforeNow and fromNow) # # min_value >= UTC epoch because otherwise we can't store dates as UTC # timestamps. # # We actually add a day of fuzz below because min_value doesn't allow # non-naive values (?!) so that ensures we have a value after the epoch # # For all current uses of model date-times in model objects in this module, # limiting values to those past the is totally OK. # fuzz = TimeDelta(days=1) if beforeNow: max = DateTime.now() - fuzz else: max = DateTime(9999, 12, 31, 23, 59, 59, 999999) if fromNow: min = DateTime.now() + fuzz else: min = DateTime(1970, 1, 1) + fuzz return _datetimes(min_value=min, max_value=max, timezones=timeZones())
def timeZone(): """Returns a TimeZone object expressing the user's time-zone preference.""" global _gotTZ, _tz # If we already got it, just return it. if _gotTZ: return _tz # If the TZ environment variable is set, just use that. if envTZ: _logger.debug( f"time.timeZone(): Using environment variable TZ={envTZ}.") tz = gettz(envTZ) else: # Get the time zome preference from the system config. tzOff = tzOffset( ) # Retrieve user's time zone preference as hours vs. UTC. td = TimeDelta(hours=tzOff) # Convert to a timedelta object. tz = TimeZone(td) # Create the timezone object. _gotTZ = True _tz = tz return tz
def test_format_datetime_rfc2822(self, d): offset_str, offset = d offset_td = TimeDelta(minutes=offset) tz = ConstantTZInfo(offset=offset_td) dt = DateTime(2020, 7, 21, hour=23, minute=2, second=59, tzinfo=tz) expected_str = 'Tue, 21 Jul 2020 23:02:59 ' + offset_str assert_equals(expected_str, format_datetime_rfc2822(dt))
def get_prev_weekday(x: Optional[Date] = None) -> Date: """ Returns the previous week day as of given (optional) date. :param x: Optional date in time. :return: Previous business day. >>> get_prev_weekday(Date(2020, 1, 1)) datetime.date(2019, 12, 31) >>> get_prev_weekday(Date(2020, 1, 2)) datetime.date(2020, 1, 1) >>> get_prev_weekday(Date(2020, 1, 3)) datetime.date(2020, 1, 2) >>> get_prev_weekday(Date(2020, 1, 4)) datetime.date(2020, 1, 3) >>> get_prev_weekday(Date(2020, 1, 5)) datetime.date(2020, 1, 3) >>> get_prev_weekday(Date(2020, 1, 6)) datetime.date(2020, 1, 3) >>> get_prev_weekday(Date(2020, 1, 7)) datetime.date(2020, 1, 6) """ ## Get the day: x = x or get_today() ## Define the offset: offset = max(1, (x.weekday() + 6) % 7 - 3) ## Compute the day and return: return x - TimeDelta(days=offset)
def get_period_starts(x: Optional[Date] = None) -> PeriodStarts: """ Returns important dates as of the given date. >>> get_period_starts(Date(2018, 8, 19))["year_start"] datetime.date(2018, 1, 1) >>> get_period_starts(Date(2018, 8, 19))["half_start"] datetime.date(2018, 7, 1) >>> get_period_starts(Date(2018, 8, 19))["quarter_start"] datetime.date(2018, 7, 1) >>> get_period_starts(Date(2018, 8, 19))["month_start"] datetime.date(2018, 8, 1) >>> get_period_starts(Date(2018, 8, 19))["week_start"] datetime.date(2018, 8, 13) >>> get_period_starts(Date(2018, 8, 19))["yesterday"] datetime.date(2018, 8, 18) """ asof = x or get_today() return OrderedDict( [ ("year_start", get_year_start(asof)), ("half_start", get_year_half_start(asof)), ("quarter_start", get_quarter_start(asof)), ("month_start", get_month_start(asof)), ("week_start", get_week_start(asof)), ("yesterday", asof - TimeDelta(days=1)), ] )
def __iter__(self) -> Iterator[Date]: """ Returns an iterator for dates within the date-range in ascending order. :return: An :py:class:`typing.Iterator` of :py:class:`datetime.date` instances. """ return iter((self.since + TimeDelta(days=i) for i in range(0, (self.until - self.since).days + 1)))
def _drange(start: Date, end: Date) -> Iterator[Date]: """ Returns a date range. """ while start <= end: yield start start = start + TimeDelta(days=1)
def cleanup_posts(): now = django.utils.timezone.now() num, _ = Post.objects.filter( published_at__lt=now - POST_MAX_AGE, checked_at__lt=now - TimeDelta(hours=25), # to avoid blocks in db ).delete() return num
def __init__(self, time=5): if isinstance(time, int): self.delay = TimeDelta(seconds=time) elif isinstance(time, (DateTime, TimeDelta)): self.delay = time else: raise TypeError self.seconds = self.delay.seconds if isinstance(self.delay, TimeDelta) else (self.delay - DateTime.now()).seconds super(ScheduledExample, self).__init__()
def test_load_communities(self): other_attrs = dict(deactivated=False, ctype=Community.TYPE_PUBLIC_PAGE) dt = timezone.now() Community.objects.create(vkid=1, checked_at=dt + TimeDelta(hours=2), **other_attrs), Community.objects.create(vkid=2, checked_at=dt, **other_attrs), Community.objects.create(vkid=3, **other_attrs), Community.objects.create(vkid=4, checked_at=dt + TimeDelta(hours=1), **other_attrs), cu = CommunitiesUpdater(None) with patch('datacollector.commupdater.COMMUNITIES_BUFFER_MAX_LENGTH', new=3): cu._load_communities() self.assertEqual([c.vkid for c in cu._communities_buffer], [3, 2, 4])
def dateTimesEqual(self, a: DateTime, b: DateTime) -> bool: """ Compare two :class:`DateTime` objects. Apply some "close enough" logic to deal with the possibility that date-times stored in a database may be slightly off when retrieved. """ # Floats stored may be slightly off when round-tripped. return a - b < TimeDelta(microseconds=20)