def parse_stamps(self, expr=STAMP_RE, fmt='%H:%M, %d %B %Y (%Z)'): stamps = [] algo = self.archiver.config['algo'] try: maxage = str2time(re.search(r"^old\((\w+)\)$", algo).group(1)) except AttributeError as e: e.args = ("Malformed archive algorithm",) raise ArchiveError(e) for thread in self.threads: if mwp_parse(thread['header']).get(0).level != 2: # the header is not level 2 stamps = [] continue for stamp in expr.finditer(thread['content']): # This for loop can probably be optimised, but ain't nobody # got time fo' dat #if stamp.group(1) in MONTHS: try: stamps.append(Arrow.strptime(stamp.group(0), fmt)) except ValueError: # Invalid stamps should not be parsed, ever continue if stamps: # The most recent stamp should be used to see if we should archive most_recent = max(stamps) thread['stamp'] = most_recent thread['oldenough'] = Arrow.utcnow() - most_recent > maxage pass # No stamps were found, abandon thread stamps = []
def friendly_time(jinja_ctx, context, **kw): """Format timestamp in human readable format. * Context must be a datetimeobject * Takes optional keyword argument timezone which is a timezone name as a string. Assume the source datetime is in this timezone. """ now = context if not now: return "" tz = kw.get("source_timezone", None) if tz: tz = timezone(tz) else: tz = datetime.timezone.utc # Meke relative time between two timestamps now = now.astimezone(tz) arrow = Arrow.fromdatetime(now) other = Arrow.fromdatetime(datetime.datetime.utcnow()) return arrow.humanize(other)
def fire_arrow(self, person): if person.cooldown < 0: person.cooldown = 0.6 self.arrow_sound.play() position = (person.position.x, person.position.z) arrow = Arrow(self.arrowTex, self.program, 1, 1, position, person.collisionMap, self.normal_mesh) arrow.point(person.angle) self.arrows.append(arrow)
def test_local_to_utc(self): arr = Arrow(datetime.now(), tz='local') result = arr.to('UTC') self.assert_dt_equal(result.datetime, datetime.utcnow()) self.assert_ts_equal(result.timestamp, time.time())
def test_eq_utc_converstions(self): arr = Arrow(datetime(11, 1, 1)) utc_1 = arr.utc() utc_2 = arr.to('UTC') self.assertEqual(utc_1.datetime, utc_2.datetime) self.assertEqual(utc_1, utc_2)
def get_for(date): date = date.replace(tzinfo='utc') for week in get_dates(access_token): for day, visits in week: if day == date: return VisitResult( visits, # visits for day Arrow.now(AU_PERTH) # when data was retrieved ) return VisitResult([], Arrow.now(AU_PERTH))
def cached_get_for(date): if not hasattr(cached_get_for, '_cache'): cached_get_for._cache = {} if date in cached_get_for._cache: data, timestamp = cached_get_for._cache[date] if (Arrow.now() - timestamp) < timedelta(hours=1): return data cached_get_for._cache[date] = (get_for(date), Arrow.now()) return cached_get_for._cache[date][0]
def user_action_stat(interval,user_out_name,song_out_name): #统计用户三日的数据量 ## step 1: 读入数据 base_time = 1426348800 #3-15-0-0-0的unix时间戳 base_time_stamp = Arrow.fromtimestamp(base_time) interval_seconds = interval * 24 * 3600 parts = load_csv_as_dict('%s/data_source/%s' %(PROJECT_PATH,'mars_tianchi_user_actions.csv')) user_dict = defaultdict( lambda: defaultdict(lambda: defaultdict(lambda: 0.0))) # 默认dict的一个trick! song_dict = defaultdict(lambda:defaultdict(lambda:defaultdict(lambda:0.0))) count = 0 ## step 2:统计数据 for part in parts: user_id = part['user_id'] song_id = part['song_id'] action_type = part['action_type'] gmt_create = eval(part['gmt_create']) date_interval_belong = int((Arrow.fromtimestamp(gmt_create) - base_time_stamp).total_seconds())/interval_seconds user_dict[user_id][date_interval_belong][action_type] += 1 song_dict[song_id][date_interval_belong][action_type] += 1 count += 1 if count % 1000 == 0: print 'statistical %s records' % count print 'total users: %s' % len(user_dict) print 'total songs: %s' % len(song_dict) ## step 3:写入到feature文件 fs = open('%s/feature/%s.csv' % (PROJECT_PATH,user_out_name),'w') fs.write('user_id,date_interval_%s ,plays,downloads,favors\n' % interval) count = 0 for user in user_dict: date_dict = user_dict[user] for date in date_dict: action = date_dict[date] fs.write('%s,%s,%s,%s,%s\n' % (user,date,action['1'],action['2'],action['3'])) count = count + 1 if count % 1000 == 0: print 'write %s length' % count fs.close() print 'user_dict is write done' fs = open('%s/feature/%s.csv' % (PROJECT_PATH,song_out_name),'w') fs.write('song_id,date_interval_%s,plays,downloads,favors\n' % interval) count = 0 for song in song_dict: date_dict = song_dict[song] for date in date_dict: action = date_dict[date] fs.write('%s,%s,%s,%s,%s\n' % (song,date,action['1'],action['2'],action['3'])) count += 1 if count % 1000 == 0: print 'write %s length' % count fs.close() print 'song_dict is write done'
def test_zone_to_zone(self): dt_1 = datetime.utcnow() + timedelta(hours=-2) dt_2 = datetime.utcnow() + timedelta(hours=2) arr_1 = Arrow(dt_1, timedelta(hours=-2)) arr_2 = Arrow(dt_2, timedelta(hours=2)) result_1 = arr_1.to(timedelta(hours=2)) result_2 = arr_2.to(timedelta(hours=-2)) self.assert_dt_equal(result_1.datetime, arr_2.datetime) self.assert_dt_equal(result_2.datetime, arr_1.datetime)
def menu(window, clock): arrow_right = Arrow("right", window.get_size()) arrow_left = Arrow("left", window.get_size()) loop = True while loop: for event in pygame.event.get(): if event.type == QUIT: loop = False if event.type == KEYDOWN: if event.key == K_ESCAPE: loop = False if event.key == K_RIGHT: print "right" arrow_right.big = True if event.key == K_LEFT: print "left" arrow_left.big = True window.fill((30,130,184)) arrow_right.draw(window) arrow_left.draw(window) pygame.display.flip() clock.tick(30)
def _process_dates(self): """internal method to parse the gcal_url for start and end date info and set the _start_date_arrow and _end_date_arrow to instances of arrow objs """ #dont rerun if _start_date_arrow or _end_date_arrow is set or if gcal_url not found if (self._start_date_arrow or self._end_date_arrow) or not self.gcal_url: return gcal_url = self.gcal_url gcal_url_date_time_match = self.gcal_url_date_time_pattern.search(gcal_url) if not gcal_url_date_time_match: return (gcal_url_start_date_str, gcal_url_end_date_str) = gcal_url_date_time_match.groups() # add time to date if no time spesified if 'T' not in gcal_url_start_date_str: gcal_url_start_date_str += 'T000000' if 'T' not in gcal_url_end_date_str: gcal_url_end_date_str += 'T000000' self._start_date_arrow = Arrow.strptime(gcal_url_start_date_str, self.gcal_url_date_time_format, tzinfo=self.event_timezone) self._end_date_arrow = Arrow.strptime(gcal_url_end_date_str, self.gcal_url_date_time_format, tzinfo=self.event_timezone)
def view(party_id): """List orga presence and task time slots for that party.""" party = Party.query.get_or_404(party_id) presences = Presence.query \ .for_party(party) \ .options(db.joinedload('orga')) \ .all() tasks = Task.query.for_party(party).all() time_slots = [party] + tasks min_starts_at = find_earliest_time_slot_start(time_slots) max_ends_at = find_latest_time_slot_end(time_slots) hour_starts_arrow = Arrow.range('hour', min_starts_at, max_ends_at) hour_starts = [hour_start.datetime.replace(tzinfo=None) for hour_start in hour_starts_arrow] hour_ranges = list(map(DateTimeRange._make, pairwise(hour_starts))) days = [(day, len(list(hour_starts))) for day, hour_starts in groupby(hour_starts, key=lambda hour: hour.date())] return { 'party': party, 'days': days, 'hour_ranges': hour_ranges, 'presences': presences, 'tasks': tasks, }
def _get_hour_starts(dt_ranges): min_starts_at = _find_earliest_start(dt_ranges) max_ends_at = _find_latest_end(dt_ranges) hour_starts_arrow = Arrow.range('hour', min_starts_at, max_ends_at) return _to_datetimes_without_tzinfo(hour_starts_arrow)
def get_xml(item, items): root = ET.Element('rss') root.attrib = {'version': "2.0"} c = ET.SubElement(root, 'channel') ET.SubElement(c, 'copyright').text = 'Copyright 2016, Liu Vaayne' if item.get('source_name') == 'wx': ET.SubElement(c, 'title').text = item.get('author') else: ET.SubElement(c, 'title').text = item.get('title') ET.SubElement(c, 'link').text = item.get('link') ET.SubElement(c, 'description').text = item.get('desc') ET.SubElement(c, 'lastBuildDate').text = Arrow.now().format('YYYY-MM-DD HH:mm:ss') for item in items: i = ET.SubElement(c, 'item') ET.SubElement(i, 'title').text = item.get('title') # ET.SubElement(i, 'image').text = item.get('image') # ET.SubElement(i, 'pubDate').text = datetime.datetime.fromtimestamp(int(item.get('post_time'))) ET.SubElement(i, 'pubDate').text = item.get('post_time').strftime('%Y-%m-%d %H:%M:%S') ET.SubElement(i, 'link').text = item.get('source_url') # ET.SubElement(i, 'summary').text = item.get('summary') # ET.SubElement(i, 'description').text = item.get('content') ET.SubElement(i, 'category').text = item.get('category') tree = ET.ElementTree(root) f = StringIO() tree.write(sys.stdout) print (f.getvalue()) return f.getvalue()
def now(tz=None): '''Returns an :class:`Arrow <arrow.Arrow>` object, representing "now". :param tz: (optional) An expression representing a timezone. Defaults to local time. The timezone expression can be: - A tzinfo struct - A string description, e.g. "US/Pacific", or "Europe/Berlin" - An ISO-8601 string, e.g. "+07:00" - A special string, one of: "local", "utc", or "UTC" Usage:: >>> import arrow >>> arrow.now() <Arrow [2013-05-07T22:19:11.363410-07:00]> >>> arrow.now('US/Pacific') <Arrow [2013-05-07T22:19:15.251821-07:00]> >>> arrow.now('+02:00') <Arrow [2013-05-08T07:19:25.618646+02:00]> >>> arrow.now('local') <Arrow [2013-05-07T22:19:39.130059-07:00]> ''' if tz is None: tz = dateutil_tz.tzlocal() elif not isinstance(tz, tzinfo): tz = parser.TzinfoParser.parse(tz) return Arrow.now(tz)
def warn(page): now_string = Arrow.utcnow().strftime("%x %X") err = traceback.format_exception(*sys.exc_info()) with open("errlog", "a") as fh: builtins.print(now_string, repr(page.title), ":", file=fh) for line in err: builtins.print("\t" + line.rstrip(), file=fh)
def parse(self, kind, aid, summary): url = 'http://api.smzdm.com/v1/%s/articles/%s' % (kind, aid) if self.blf.exist(url): return self.blf.add(url) try: r = self.req_get(url) data = r.json().get('data') title = data.get('article_title') author = data.get('article_referrals') post_time = data.get('article_date') post_time = Arrow.strptime(post_time, '%Y-%m-%d %H:%M:%S', tzinfo='Asia/Shanghai').timestamp source_url = data.get('article_url') # summary = data.get('summary') content = data.get('article_filter_content') try: content = self.get_img(BeautifulSoup('<div>%s</div>' % content, 'lxml'), 'src') except Exception as e: self.log.exception(e) image = data.get('article_pic') # self.add_result(title=title, author=author, post_time=post_time, source_name=self.spider_name, # source_url=source_url, summary=summary, # content=content, image=image, category=self.category, aid=kind) self.add_result(title=title, author=author, post_time=post_time, source_name='什么值得买', source_url=source_url, summary=summary, spider_name=self.spider_name, content=content, image=image, category=self.category, aid=kind) except Exception as e: self.log.error(e)
def now(tz=None): '''Returns an :class:`Arrow <arrow.Arrow>` object, representing "now". :param tz: (optional) An expression representing a timezone. Defaults to local time. Recognized timezone expressions: - A **tzinfo** object - A **str** describing a timezone, similar to "US/Pacific", or "Europe/Berlin" - A **str** in ISO-8601 style, as in "+07:00" - A **str**, one of the following: *local*, *utc*, *UTC* Usage:: >>> import arrow >>> arrow.now() <Arrow [2013-05-07T22:19:11.363410-07:00]> >>> arrow.now('US/Pacific') <Arrow [2013-05-07T22:19:15.251821-07:00]> >>> arrow.now('+02:00') <Arrow [2013-05-08T07:19:25.618646+02:00]> >>> arrow.now('local') <Arrow [2013-05-07T22:19:39.130059-07:00]> ''' if tz is None: tz = dateutil_tz.tzlocal() elif not isinstance(tz, tzinfo): tz = parser.TzinfoParser.parse(tz) return Arrow.now(tz)
def manual_watering(self, watering_request): # pause normal schedule jobs_paused = self.pause_schedule() start, last_duration_seconds = Arrow.utcnow(), 5 start_buffer_seconds = 5 # for every station, set a scheduling for the duration specified # stations are ran serially for station, duration in watering_request.items(): station_id = int(station) job_start = start.replace(seconds=last_duration_seconds) dt = job_start.format("YYYY-MM-DDTHH:mm:ssZZ").replace("-00:00", "+00:00") args = {"datetime": dt, "station": station_id, "fixed_duration": duration, "manual": 1} self.bg_scheduler.add_job(self.water, "date", run_date=job_start.datetime, args=[args]) last_duration_seconds = duration * 60 # reschedule the original schedule after all stations have watered job_start = start.replace(seconds=last_duration_seconds + start_buffer_seconds) self.bg_scheduler.add_job(self.resume_schedule, "date", run_date=job_start.datetime) # check if schedule contains: paused jobs, manual watering jobs, and extra job to resume paused jobs if len(self.bg_scheduler.get_jobs()) == (jobs_paused + len(watering_request) + 1): return True return False
def blacklist_project(self, project: db.Project, reset_time: arrow.Arrow): """ Add specified project to `self.ratelimit_queue`, add backend to `self.blacklist_dict` and increment `self.ratelimit_counter`. Args: project: Project to blacklist reset_time: Time when the ratelimit will be reset """ with self.blacklist_dict_lock: if project.backend not in self.blacklist_dict: _log.debug( "Rate limit threshold reached. Adding {} to blacklist.".format( project.backend ) ) self.blacklist_dict[project.backend] = reset_time.to("utc").datetime with self.ratelimit_queue_lock: if project.backend not in self.ratelimit_queue: self.ratelimit_queue[project.backend] = [] self.ratelimit_queue[project.backend].append(project.id) with self.ratelimit_counter_lock: self.ratelimit_counter += 1
def get_uptime(message): """ Get uptime, code reload time and crash counts this session Usage: uptime """ now = Arrow.now() reply('''{} Way status: [ ] Lost [X] Not lost Holding on since: {} Uptime: {} Last code reload: {} Code uptime: {} Total crashes this session: {} '''.format( VERSION_STRING, uptime.humanize(), now - uptime, code_uptime.humanize(), now - code_uptime, crash_count ), message)
def run(self, symbol): uid = self.get_uuid(symbol) if uid is None: return url = 'http://www.newrank.cn/xdnphb/detail/getAccountArticle' params = { 'uuid': uid, } r = self.req_post(url, data=params) datas = r.json() try: infos = datas['value']['lastestArticle'] for info in infos: source_url = self.parse_url(info.get('url')) if self.repeat_check(source_url): continue title = info.get('title') wx_id = info.get('account') author = info.get('author') post_time = info.get('publicTime') post_time = Arrow.strptime(post_time, '%Y-%m-%d %H:%M:%S', tzinfo='Asia/Shanghai').timestamp summary = info.get('summary') content, img = self.get_content(source_url) if info.get('imageUrl') is None: image = img else: image = info.get('imageUrl') self.add_result(title=title, author=author, post_time=post_time, source_name=author, source_url=source_url, summary=summary, spider_name=self.spider_name, content=content, image=image, category=self.category, aid=wx_id) except Exception as e: self.log.error(e)
def filter_datetime(jinja_ctx, context, **kw): """Format datetime in a certain timezone.""" now = context if not now: return "" tz = kw.get("timezone", None) if tz: tz = timezone(tz) else: tz = datetime.timezone.utc locale = kw.get("locale", "en_US") arrow = Arrow.fromdatetime(now, tzinfo=tz) # Convert to target timezone tz = kw.get("target_timezone") if tz: arrow = arrow.to(tz) else: tz = arrow.tzinfo format = kw.get("format", "YYYY-MM-DD HH:mm") text = arrow.format(format, locale=locale) if kw.get("show_timezone"): text = text + " ({})".format(tz) return text
def get_course_list(self): # 获取课程状态 if not self.__session: raise AttributeError('未获取有效的session对象') # TODO 此sesseion判断并不严谨,需更改 result = [] now = int(Arrow.now().float_timestamp * 1000) # generate javascript style timestamp _response = self.__session.get( 'http://www.elearning.clic/ilearn/en/cst_learner2/jsp/home_my_course.jsp?_={time}'.format( time=now)) # 取得课程信息 root = etree.HTML(_response.text) td_list = root.xpath(u'//span[@class="td_td_style"]') # 查找有效表格 a_list = root.xpath(u'//a[@title="Click to study"]') # 查找有效课程url td_len = len(td_list) # 取得有效td元素个数 a_len = len(a_list) # 取得有效链接个数 if td_len is not 0 and a_len is not 0: # 如果找到有效td元素和a元素,就以每4组td生成一个字典 for n in range(int(td_len / 4)): sub = 0 * n result.append({ 'course_name': td_list[sub].text.strip(), 'course_section_rate': float(td_list[sub + 1].text.strip().partition('\n')[0]), 'course_time_rate': float(td_list[sub + 2].text.strip().partition('\n')[0]), 'course_finished': td_list[sub + 3].text.strip() == '已完成', 'course_url': a_list[n].attrib['href'] }) self.last_response = _response self.__course = result for k, v in enumerate(result): print('序号:{0} \t课名: {1}\t课程进度: {2}\t课时进度: {3}\t完成: {4}\n'.format(k, v['course_name'], v['course_section_rate'], v['course_time_rate'], v['course_finished'] is True))
def __init__(self, source, n, str_type, count_type, start_date=Arrow(1970,1,1).datetime, stop_date=utcnow().datetime): super(Vector, self).__init__(n=n, str_type=str_type) if source not in c['Comment'].collection_names(): raise ValueError("{} is not a collection in the Comment database".format(source)) if str_type not in StringLike.__subclasses__(): raise ValueError("{} is not a valid string type class".format(str_type)) for date in [start_date, stop_date]: if not isinstance(date, datetime): raise TypeError("{} is not a datetime.datetime object".format(date)) self.count_type = count_type self.start_date = Arrow.fromdatetime(start_date).datetime self.stop_date = Arrow.fromdatetime(stop_date).datetime self.body = c['Body'][source] self.cache = c['BodyCache'][source] self.comment = c['Comment'][source] self.__fromdb__()
def arrow(date=None, tz=None): if date is None: return utcnow() if tz is None else now(tz) else: if tz is None: try: tz = parser.TzinfoParser.parse(date) return now(tz) except: return Arrow.fromdatetime(date) else: tz = parser.TzinfoParser.parse(tz) return Arrow.fromdatetime(date, tz)
def utcnow(): '''Returns an :class:`Arrow <arrow.Arrow>` object, representing "now" in UTC time. Usage:: >>> import arrow >>> arrow.utcnow() <Arrow [2013-05-08T05:19:07.018993+00:00]> ''' return Arrow.utcnow()
def clean_stop_datetime(self): start = self.cleaned_data.get('start_datetime') stop = self.cleaned_data['stop_datetime'] # Day entry can't stop before the start if start and stop and stop <= start: raise forms.ValidationError(_("Stop time can't be less or equal to start time")) # Day entry can't stop in more than one futur day from the targeted day date if stop and stop.date() > Arrow.fromdate(self.daydate).replace(days=1).date(): raise forms.ValidationError(_("Stop time can't be more than the next day")) return stop
def get_dates(_): days = parse_days(get_content()) # we used to be able to support previous weeks; each item is a week return [ ( ( Arrow.fromdatetime(parse(day)), parse_locations(locations) ) for day, locations in days ) ]
def get_time_humanized(self): now = self.created otherdate = datetime.now() if otherdate: dt = otherdate - now offset = dt.seconds + (dt.days * 60*60*24) if offset: delta_s = offset % 60 offset /= 60 delta_m = offset % 60 offset /= 60 delta_h = offset % 24 offset /= 24 delta_d = offset else: return "just now" if delta_h > 1: return Arrow.now().replace(hours=delta_h * -1, minutes=delta_m * -1).humanize() if delta_m > 1: return Arrow.now().replace(seconds=delta_s * -1, minutes=delta_m * -1).humanize() else: return Arrow.now().replace(days=delta_d * -1).humanize()
class AnalogClock(BaseClock): """Класс круглых стрелочных часов""" def __init__(self, *args): start_time = current_time( ) # Нужно для компенсации потерь времени на инициализацию super().__init__(*args) self.time = Time(self.time, limiter=(12, 60, 60, 1000)) self._radius = min(self.size) // 2 * 0.95 self._center = (self.location[0] + self.size[0] / 2, self.location[1] + self.size[1] / 2) self._clock_face = ClockFace(self._radius, self._center) self._hour_arrow = Arrow(self._center, self._radius // 3, self._radius / 20, self.time.hour, 12) self._minute_arrow = Arrow(self._center, self._radius * 0.65, self._radius / 30, self.time.minute, 60, self._hour_arrow) self._second_arrow = Arrow(self._center, self._radius * 0.75, self._radius / 40, round(self.time.second), 60, self._minute_arrow) # Компенсация потерь времени на инициализацию self.time += current_time() - start_time self.update() def set_time(self, time): super().set_time(time) self.update() def set_hour(self, hour): super().set_hour(hour) self.update() def set_minute(self, minute): super().set_minute(minute) self.update() def set_second(self, second): super().set_second(second) self.update() def update(self): self._second_arrow.set_position(round(self.time.second)) self._minute_arrow.set_position(self.time.minute) self._hour_arrow.set_position(self.time.hour)
def initialize_room(self, room: Room, now: Arrow) -> Room: """ 部屋の情報を初期化する。次のバックログの見積もりを始める際に実施する。 :param now: :param room: 部屋 :return: """ item = { "room_id": room.room_id, "opened": False, "ttl": now.shift(days=1).int_timestamp, } for member in room.members: item[f"mem_{member.member_id}"] = { "nickname": member.nickname, "point": None } self.table.put_item(Item=item) new_room = self.query_room(room.room_id) if new_room: return new_room raise Exception
def blacklist_project(self, project: db.Project, reset_time: arrow.Arrow): """ Add specified project to `self.ratelimit_queue`, add backend to `self.blacklist_dict` and increment `self.ratelimit_counter`. Args: project: Project to blacklist reset_time: Time when the ratelimit will be reset """ with self.blacklist_dict_lock: if project.backend not in self.blacklist_dict: _log.debug( "Rate limit threshold reached. Adding {} to blacklist.". format(project.backend)) self.blacklist_dict[project.backend] = reset_time.to( "utc").datetime with self.ratelimit_queue_lock: if project.backend not in self.ratelimit_queue: self.ratelimit_queue[project.backend] = [] self.ratelimit_queue[project.backend].append(project.id) with self.ratelimit_counter_lock: self.ratelimit_counter += 1
def run(): """运行游戏""" screen_width = 700 # 弹出屏幕的宽度 screen_height = 800 # 弹出屏幕的高度 screen_size = (screen_width, screen_height) background_color = (255, 87, 51) # 弹出屏幕颜色,橙色 # https://htmlcolorcodes.com/color-chart/ 可以找到颜色10进制代码 # 创建一个屏幕对象 pygame.init() screen = pygame.display.set_mode(screen_size) pygame.display.set_caption("Connect 4") board = [[0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0]] arrow = Arrow(screen) # 创建箭头对象 count = [0] # 数放球球的次数, 为了传引用所以是列表 statu = ['NONE'] # 记录当前状态 # 开始游戏 while True: # 根据当前循环次数判断玩家 checkEvents(screen, arrow, board, count, statu) updateScreen(background_color, screen, arrow, board) # 查询游戏进度,是否产生赢家 if statu[0] != 'NONE': if statu[0] == "no winner": print("Game END!! no winner") else: time.sleep(1) print("player " + str(statu[0]) + " wins!!!") func.pout(board) displayWinner(background_color, screen, statu[0]) break
def __init__(self, *args): start_time = current_time( ) # Нужно для компенсации потерь времени на инициализацию super().__init__(*args) self.time = Time(self.time, limiter=(12, 60, 60, 1000)) self._radius = min(self.size) // 2 * 0.95 self._center = (self.location[0] + self.size[0] / 2, self.location[1] + self.size[1] / 2) self._clock_face = ClockFace(self._radius, self._center) self._hour_arrow = Arrow(self._center, self._radius // 3, self._radius / 20, self.time.hour, 12) self._minute_arrow = Arrow(self._center, self._radius * 0.65, self._radius / 30, self.time.minute, 60, self._hour_arrow) self._second_arrow = Arrow(self._center, self._radius * 0.75, self._radius / 40, round(self.time.second), 60, self._minute_arrow) # Компенсация потерь времени на инициализацию self.time += current_time() - start_time self.update()
def today(): return Arrow.utcnow().format('YYYY-MM-DD')
def __init__(self, cardN, cardPos): self.arrow = Arrow(Vector(*pygame.mouse.get_pos())) self.cardN = cardN self.cardPos = cardPos self.arrow.set_destination(cardPos)
def test_generate_optimizer(mocker, hyperopt_conf) -> None: hyperopt_conf.update({ 'spaces': 'all', 'hyperopt_min_trades': 1, }) trades = [('TRX/BTC', 0.023117, 0.000233, 100)] labels = ['currency', 'profit_percent', 'profit_abs', 'trade_duration'] backtest_result = pd.DataFrame.from_records(trades, columns=labels) mocker.patch('freqtrade.optimize.hyperopt.Backtesting.backtest', MagicMock(return_value=backtest_result)) mocker.patch( 'freqtrade.optimize.hyperopt.get_timerange', MagicMock(return_value=(Arrow(2017, 12, 10), Arrow(2017, 12, 13)))) patch_exchange(mocker) mocker.patch('freqtrade.optimize.hyperopt.load', MagicMock()) optimizer_param = { 'adx-value': 0, 'fastd-value': 35, 'mfi-value': 0, 'rsi-value': 0, 'adx-enabled': False, 'fastd-enabled': True, 'mfi-enabled': False, 'rsi-enabled': False, 'trigger': 'macd_cross_signal', 'sell-adx-value': 0, 'sell-fastd-value': 75, 'sell-mfi-value': 0, 'sell-rsi-value': 0, 'sell-adx-enabled': False, 'sell-fastd-enabled': True, 'sell-mfi-enabled': False, 'sell-rsi-enabled': False, 'sell-trigger': 'macd_cross_signal', 'roi_t1': 60.0, 'roi_t2': 30.0, 'roi_t3': 20.0, 'roi_p1': 0.01, 'roi_p2': 0.01, 'roi_p3': 0.1, 'stoploss': -0.4, 'trailing_stop': True, 'trailing_stop_positive': 0.02, 'trailing_stop_positive_offset_p1': 0.05, 'trailing_only_offset_is_reached': False, } response_expected = { 'loss': 1.9840569076926293, 'results_explanation': (' 1 trades. 1/0/0 Wins/Draws/Losses. ' 'Avg profit 2.31%. Median profit 2.31%. Total profit ' '0.00023300 BTC ( 2.31\N{GREEK CAPITAL LETTER SIGMA}%). ' 'Avg duration 100.0 min.').encode(locale.getpreferredencoding(), 'replace').decode('utf-8'), 'params_details': { 'buy': { 'adx-enabled': False, 'adx-value': 0, 'fastd-enabled': True, 'fastd-value': 35, 'mfi-enabled': False, 'mfi-value': 0, 'rsi-enabled': False, 'rsi-value': 0, 'trigger': 'macd_cross_signal' }, 'roi': { 0: 0.12000000000000001, 20.0: 0.02, 50.0: 0.01, 110.0: 0 }, 'sell': { 'sell-adx-enabled': False, 'sell-adx-value': 0, 'sell-fastd-enabled': True, 'sell-fastd-value': 75, 'sell-mfi-enabled': False, 'sell-mfi-value': 0, 'sell-rsi-enabled': False, 'sell-rsi-value': 0, 'sell-trigger': 'macd_cross_signal' }, 'stoploss': { 'stoploss': -0.4 }, 'trailing': { 'trailing_only_offset_is_reached': False, 'trailing_stop': True, 'trailing_stop_positive': 0.02, 'trailing_stop_positive_offset': 0.07 } }, 'params_dict': optimizer_param, 'results_metrics': { 'avg_profit': 2.3117, 'draws': 0, 'duration': 100.0, 'losses': 0, 'winsdrawslosses': ' 1 0 0', 'median_profit': 2.3117, 'profit': 2.3117, 'total_profit': 0.000233, 'trade_count': 1, 'wins': 1 }, 'total_profit': 0.00023300 } hyperopt = Hyperopt(hyperopt_conf) hyperopt.dimensions = hyperopt.hyperopt_space() generate_optimizer_value = hyperopt.generate_optimizer( list(optimizer_param.values())) assert generate_optimizer_value == response_expected
def day(): return int(Arrow.utcnow().format('DD'))
def sql2py(self, val): if isinstance(val, str): return arrow.get(val) else: return Arrow.fromdatetime(super().sql2py(val))
def due_on(t: Task, date: arrow.Arrow) -> bool: task = arrow.get(t.due).isocalendar() date = date.isocalendar() return task == date
def test_extract_trades_of_period(testdatadir): pair = "UNITTEST/BTC" # 2018-11-14 06:07:00 timerange = TimeRange('date', None, 1510639620, 0) data = load_pair_history(pair=pair, timeframe='1m', datadir=testdatadir, timerange=timerange) trades = DataFrame({ 'pair': [pair, pair, pair, pair], 'profit_ratio': [0.0, 0.1, -0.2, -0.5], 'profit_abs': [0.0, 1, -2, -5], 'open_date': to_datetime([ Arrow(2017, 11, 13, 15, 40, 0).datetime, Arrow(2017, 11, 14, 9, 41, 0).datetime, Arrow(2017, 11, 14, 14, 20, 0).datetime, Arrow(2017, 11, 15, 3, 40, 0).datetime, ], utc=True), 'close_date': to_datetime([ Arrow(2017, 11, 13, 16, 40, 0).datetime, Arrow(2017, 11, 14, 10, 41, 0).datetime, Arrow(2017, 11, 14, 15, 25, 0).datetime, Arrow(2017, 11, 15, 3, 55, 0).datetime, ], utc=True) }) trades1 = extract_trades_of_period(data, trades) # First and last trade are dropped as they are out of range assert len(trades1) == 2 assert trades1.iloc[0].open_date == Arrow(2017, 11, 14, 9, 41, 0).datetime assert trades1.iloc[0].close_date == Arrow(2017, 11, 14, 10, 41, 0).datetime assert trades1.iloc[-1].open_date == Arrow(2017, 11, 14, 14, 20, 0).datetime assert trades1.iloc[-1].close_date == Arrow(2017, 11, 14, 15, 25, 0).datetime
def test_hyperopt_format_results(hyperopt): bt_result = { 'results': pd.DataFrame({ "pair": ["UNITTEST/BTC", "UNITTEST/BTC", "UNITTEST/BTC", "UNITTEST/BTC"], "profit_ratio": [0.003312, 0.010801, 0.013803, 0.002780], "profit_abs": [0.000003, 0.000011, 0.000014, 0.000003], "open_date": [ Arrow(2017, 11, 14, 19, 32, 00).datetime, Arrow(2017, 11, 14, 21, 36, 00).datetime, Arrow(2017, 11, 14, 22, 12, 00).datetime, Arrow(2017, 11, 14, 22, 44, 00).datetime ], "close_date": [ Arrow(2017, 11, 14, 21, 35, 00).datetime, Arrow(2017, 11, 14, 22, 10, 00).datetime, Arrow(2017, 11, 14, 22, 43, 00).datetime, Arrow(2017, 11, 14, 22, 58, 00).datetime ], "open_rate": [0.002543, 0.003003, 0.003089, 0.003214], "close_rate": [0.002546, 0.003014, 0.003103, 0.003217], "trade_duration": [123, 34, 31, 14], "is_open": [False, False, False, True], "stake_amount": [0.01, 0.01, 0.01, 0.01], "sell_reason": [ SellType.ROI, SellType.STOP_LOSS, SellType.ROI, SellType.FORCE_SELL ] }), 'config': hyperopt.config, 'locks': [], 'final_balance': 0.02, 'backtest_start_time': 1619718665, 'backtest_end_time': 1619718665, } results_metrics = generate_strategy_stats({'XRP/BTC': None}, '', bt_result, Arrow(2017, 11, 14, 19, 32, 00), Arrow(2017, 12, 14, 19, 32, 00), market_change=0) results_explanation = HyperoptTools.format_results_explanation_string( results_metrics, 'BTC') total_profit = results_metrics['profit_total_abs'] results = { 'loss': 0.0, 'params_dict': None, 'params_details': None, 'results_metrics': results_metrics, 'results_explanation': results_explanation, 'total_profit': total_profit, 'current_epoch': 1, 'is_initial_point': True, } result = HyperoptTools._format_explanation_string(results, 1) assert ' 0.71%' in result assert 'Total profit 0.00003100 BTC' in result assert '0:50:00 min' in result
def get_timeframe(input1): return Arrow(2017, 11, 14, 21, 17), Arrow(2017, 11, 14, 22, 59)
def test_backtest(default_conf, fee, mocker, testdatadir) -> None: default_conf['ask_strategy']['use_sell_signal'] = False mocker.patch('freqtrade.exchange.Exchange.get_fee', fee) patch_exchange(mocker) backtesting = Backtesting(default_conf) pair = 'UNITTEST/BTC' timerange = TimeRange('date', None, 1517227800, 0) data = history.load_data(datadir=testdatadir, timeframe='5m', pairs=['UNITTEST/BTC'], timerange=timerange) processed = backtesting.strategy.ohlcvdata_to_dataframe(data) min_date, max_date = get_timerange(processed) results = backtesting.backtest( processed=processed, stake_amount=default_conf['stake_amount'], start_date=min_date, end_date=max_date, max_open_trades=10, position_stacking=False, ) assert not results.empty assert len(results) == 2 expected = pd.DataFrame({ 'pair': [pair, pair], 'profit_percent': [0.0, 0.0], 'profit_abs': [0.0, 0.0], 'open_date': pd.to_datetime([ Arrow(2018, 1, 29, 18, 40, 0).datetime, Arrow(2018, 1, 30, 3, 30, 0).datetime ], utc=True), 'open_rate': [0.104445, 0.10302485], 'open_fee': [0.0025, 0.0025], 'close_date': pd.to_datetime([ Arrow(2018, 1, 29, 22, 35, 0).datetime, Arrow(2018, 1, 30, 4, 10, 0).datetime ], utc=True), 'close_rate': [0.104969, 0.103541], 'close_fee': [0.0025, 0.0025], 'amount': [0.00957442, 0.0097064], 'trade_duration': [235, 40], 'open_at_end': [False, False], 'sell_reason': [SellType.ROI, SellType.ROI] }) pd.testing.assert_frame_equal(results, expected) data_pair = processed[pair] for _, t in results.iterrows(): ln = data_pair.loc[data_pair["date"] == t["open_date"]] # Check open trade rate alignes to open rate assert ln is not None assert round(ln.iloc[0]["open"], 6) == round(t["open_rate"], 6) # check close trade rate alignes to close rate or is between high and low ln = data_pair.loc[data_pair["date"] == t["close_date"]] assert (round(ln.iloc[0]["open"], 6) == round(t["close_rate"], 6) or round(ln.iloc[0]["low"], 6) < round( t["close_rate"], 6) < round(ln.iloc[0]["high"], 6))
def _format_date(date): """Format the given date into ISO format.""" arrow = Arrow.fromtimestamp(calendar.timegm(date), tzinfo=utc) return arrow.date().isoformat()
def setUp(self): super(ArrowTests, self).setUp() self.arrow = Arrow(datetime.utcnow(), tz='UTC')
class ArrowTests(BaseArrowTests): def setUp(self): super(ArrowTests, self).setUp() self.arrow = Arrow(datetime.utcnow(), tz='UTC') def test_str(self): expected = '{0}.{1} +00:00 (UTC)'.format( time.strftime('%x %X', self.arrow.datetime.timetuple()), self.arrow.datetime.microsecond) self.assertEqual(self.arrow.__str__(), expected) def test_repr(self): expected = 'Arrow({0}.{1} +00:00 (UTC))'.format( time.strftime('%x %X', self.arrow.datetime.timetuple()), self.arrow.datetime.microsecond) self.assertEqual(self.arrow.__repr__(), expected) def test_tz(self): self.arrow._timezone = self.utc self.assertEqual(self.arrow.tz, self.utc) def test_to(self): self.arrow._datetime = datetime.utcnow().replace( tzinfo=self.utc.tzinfo) self.arrow._timezone = self.utc result = self.arrow.to('local') self.assert_dt_equal( result.datetime, self.arrow._datetime.astimezone(self.local.tzinfo)) def test_utc_utc(self): self.arrow._datetime = datetime.now().replace(tzinfo=self.local.tzinfo) self.arrow._timezone = self.local result = self.arrow.utc() self.assert_dt_equal(result.datetime, self.arrow._datetime.astimezone(self.utc.tzinfo)) def test_utc_local(self): self.arrow._datetime = datetime.utcnow().replace( tzinfo=self.utc.tzinfo) self.arrow._timezone = self.utc result = self.arrow.utc() self.assert_dt_equal(result.datetime, self.arrow._datetime.astimezone(self.utc.tzinfo)) def test_datetime(self): dt = datetime.utcnow().replace(tzinfo=tz.tzutc()) self.arrow._datetime = dt self.arrow._timezone = self.utc result = self.arrow.datetime self.assertEqual(result, dt) def test_timestamp_utc(self): dt = datetime.utcnow() self.arrow._datetime = dt self.arrow._timezone = self.utc result = self.arrow.timestamp self.assertEqual(result, calendar.timegm(dt.timetuple())) def test_timestamp_local(self): dt = datetime.now() self.arrow._datetime = dt self.arrow._timezone = self.local result = self.arrow.timestamp self.assertEqual(result, time.mktime(dt.timetuple())) def test_get_datetime_int(self): result = self.arrow._get_datetime(int(time.time()), self.utc) self.assert_dt_equal(result, datetime.utcnow()) def test_get_datetime_float_utc(self): result = self.arrow._get_datetime(time.time(), self.utc) self.assert_dt_equal(result, datetime.utcnow()) def test_get_datetime_float_local(self): result = self.arrow._get_datetime(time.time(), self.local) self.assert_dt_equal(result, datetime.now()) def test_get_datetime_str_float_utc(self): result = self.arrow._get_datetime(str(time.time()), self.utc) self.assert_dt_equal(result, datetime.utcnow()) def test_get_datetime_str_int_utc(self): result = self.arrow._get_datetime(str(int(time.time())), self.utc) self.assert_dt_equal(result, datetime.utcnow()) def test_get_datetime_str_float_local(self): result = self.arrow._get_datetime(str(time.time()), self.local) self.assert_dt_equal(result, datetime.now()) def test_get_datetime_str_int_local(self): result = self.arrow._get_datetime(str(int(time.time())), self.local) self.assert_dt_equal(result, datetime.now()) def test_get_datetime_datetime(self): dt = datetime.utcnow() result = self.arrow._get_datetime(dt, self.utc) self.assert_dt_equal(result, dt) def test_get_datetime_parse_str(self): with self.assertRaises(RuntimeError): self.arrow._get_datetime('abcdefg', self.utc) def test_get_datetime_unrecognized(self): with self.assertRaises(RuntimeError): self.arrow._get_datetime(object, self.utc)
def quickembed(name: str, desc: str, begin: Arrow, end: Arrow) -> Embed: return Embed(title=name, description=html2md(desc)).add_field( name="Start", value=begin.to('local').format(arrow.FORMAT_COOKIE)).add_field( name="End", value=end.to('local').format(arrow.FORMAT_COOKIE))
def test_generate_backtest_stats(default_conf, testdatadir): default_conf.update({'strategy': 'DefaultStrategy'}) StrategyResolver.load_strategy(default_conf) results = { 'DefStrat': { 'results': pd.DataFrame({ "pair": [ "UNITTEST/BTC", "UNITTEST/BTC", "UNITTEST/BTC", "UNITTEST/BTC" ], "profit_ratio": [0.003312, 0.010801, 0.013803, 0.002780], "profit_abs": [0.000003, 0.000011, 0.000014, 0.000003], "open_date": [ Arrow(2017, 11, 14, 19, 32, 00).datetime, Arrow(2017, 11, 14, 21, 36, 00).datetime, Arrow(2017, 11, 14, 22, 12, 00).datetime, Arrow(2017, 11, 14, 22, 44, 00).datetime ], "close_date": [ Arrow(2017, 11, 14, 21, 35, 00).datetime, Arrow(2017, 11, 14, 22, 10, 00).datetime, Arrow(2017, 11, 14, 22, 43, 00).datetime, Arrow(2017, 11, 14, 22, 58, 00).datetime ], "open_rate": [0.002543, 0.003003, 0.003089, 0.003214], "close_rate": [0.002546, 0.003014, 0.003103, 0.003217], "trade_duration": [123, 34, 31, 14], "is_open": [False, False, False, True], "sell_reason": [ SellType.ROI, SellType.STOP_LOSS, SellType.ROI, SellType.FORCE_SELL ] }), 'config': default_conf, 'locks': [], 'backtest_start_time': Arrow.utcnow().int_timestamp, 'backtest_end_time': Arrow.utcnow().int_timestamp, } } timerange = TimeRange.parse_timerange('1510688220-1510700340') min_date = Arrow.fromtimestamp(1510688220) max_date = Arrow.fromtimestamp(1510700340) btdata = history.load_data(testdatadir, '1m', ['UNITTEST/BTC'], timerange=timerange, fill_up_missing=True) stats = generate_backtest_stats(btdata, results, min_date, max_date) assert isinstance(stats, dict) assert 'strategy' in stats assert 'DefStrat' in stats['strategy'] assert 'strategy_comparison' in stats strat_stats = stats['strategy']['DefStrat'] assert strat_stats['backtest_start'] == min_date.datetime assert strat_stats['backtest_end'] == max_date.datetime assert strat_stats['total_trades'] == len(results['DefStrat']['results']) # Above sample had no loosing trade assert strat_stats['max_drawdown'] == 0.0 results = { 'DefStrat': { 'results': pd.DataFrame({ "pair": [ "UNITTEST/BTC", "UNITTEST/BTC", "UNITTEST/BTC", "UNITTEST/BTC" ], "profit_ratio": [0.003312, 0.010801, -0.013803, 0.002780], "profit_abs": [0.000003, 0.000011, -0.000014, 0.000003], "open_date": [ Arrow(2017, 11, 14, 19, 32, 00).datetime, Arrow(2017, 11, 14, 21, 36, 00).datetime, Arrow(2017, 11, 14, 22, 12, 00).datetime, Arrow(2017, 11, 14, 22, 44, 00).datetime ], "close_date": [ Arrow(2017, 11, 14, 21, 35, 00).datetime, Arrow(2017, 11, 14, 22, 10, 00).datetime, Arrow(2017, 11, 14, 22, 43, 00).datetime, Arrow(2017, 11, 14, 22, 58, 00).datetime ], "open_rate": [0.002543, 0.003003, 0.003089, 0.003214], "close_rate": [0.002546, 0.003014, 0.0032903, 0.003217], "trade_duration": [123, 34, 31, 14], "open_at_end": [False, False, False, True], "sell_reason": [ SellType.ROI, SellType.STOP_LOSS, SellType.ROI, SellType.FORCE_SELL ] }), 'config': default_conf } } assert strat_stats['max_drawdown'] == 0.0 assert strat_stats['drawdown_start'] == datetime(1970, 1, 1, tzinfo=timezone.utc) assert strat_stats['drawdown_end'] == datetime(1970, 1, 1, tzinfo=timezone.utc) assert strat_stats['drawdown_end_ts'] == 0 assert strat_stats['drawdown_start_ts'] == 0 assert strat_stats['pairlist'] == ['UNITTEST/BTC'] # Test storing stats filename = Path(testdatadir / 'btresult.json') filename_last = Path(testdatadir / LAST_BT_RESULT_FN) _backup_file(filename_last, copy_file=True) assert not filename.is_file() store_backtest_stats(filename, stats) # get real Filename (it's btresult-<date>.json) last_fn = get_latest_backtest_filename(filename_last.parent) assert re.match(r"btresult-.*\.json", last_fn) filename1 = (testdatadir / last_fn) assert filename1.is_file() content = filename1.read_text() assert 'max_drawdown' in content assert 'strategy' in content assert 'pairlist' in content assert filename_last.is_file() _clean_test_file(filename_last) filename1.unlink()
def test_extract_trades_of_period(): pair = "UNITTEST/BTC" timerange = TimeRange(None, 'line', 0, -1000) data = load_pair_history(pair=pair, ticker_interval='1m', datadir=None, timerange=timerange) # timerange = 2017-11-14 06:07 - 2017-11-14 22:58:00 trades = DataFrame( {'pair': [pair, pair, pair, pair], 'profit_percent': [0.0, 0.1, -0.2, -0.5], 'profit_abs': [0.0, 1, -2, -5], 'open_time': to_datetime([Arrow(2017, 11, 13, 15, 40, 0).datetime, Arrow(2017, 11, 14, 9, 41, 0).datetime, Arrow(2017, 11, 14, 14, 20, 0).datetime, Arrow(2017, 11, 15, 3, 40, 0).datetime, ], utc=True ), 'close_time': to_datetime([Arrow(2017, 11, 13, 16, 40, 0).datetime, Arrow(2017, 11, 14, 10, 41, 0).datetime, Arrow(2017, 11, 14, 15, 25, 0).datetime, Arrow(2017, 11, 15, 3, 55, 0).datetime, ], utc=True) }) trades1 = extract_trades_of_period(data, trades) # First and last trade are dropped as they are out of range assert len(trades1) == 2 assert trades1.iloc[0].open_time == Arrow(2017, 11, 14, 9, 41, 0).datetime assert trades1.iloc[0].close_time == Arrow(2017, 11, 14, 10, 41, 0).datetime assert trades1.iloc[-1].open_time == Arrow(2017, 11, 14, 14, 20, 0).datetime assert trades1.iloc[-1].close_time == Arrow(2017, 11, 14, 15, 25, 0).datetime
def arrow2grid(arrow_ins: Arrow) -> List[str]: return arrow_ins.format("YYYY-M-D-H-m-s").split("-")
def test_generate_optimizer(mocker, hyperopt_conf) -> None: hyperopt_conf.update({ 'spaces': 'all', 'hyperopt_min_trades': 1, }) backtest_result = { 'results': pd.DataFrame({ "pair": ["UNITTEST/BTC", "UNITTEST/BTC", "UNITTEST/BTC", "UNITTEST/BTC"], "profit_ratio": [0.003312, 0.010801, 0.013803, 0.002780], "profit_abs": [0.000003, 0.000011, 0.000014, 0.000003], "open_date": [ Arrow(2017, 11, 14, 19, 32, 00).datetime, Arrow(2017, 11, 14, 21, 36, 00).datetime, Arrow(2017, 11, 14, 22, 12, 00).datetime, Arrow(2017, 11, 14, 22, 44, 00).datetime ], "close_date": [ Arrow(2017, 11, 14, 21, 35, 00).datetime, Arrow(2017, 11, 14, 22, 10, 00).datetime, Arrow(2017, 11, 14, 22, 43, 00).datetime, Arrow(2017, 11, 14, 22, 58, 00).datetime ], "open_rate": [0.002543, 0.003003, 0.003089, 0.003214], "close_rate": [0.002546, 0.003014, 0.003103, 0.003217], "trade_duration": [123, 34, 31, 14], "is_open": [False, False, False, True], "stake_amount": [0.01, 0.01, 0.01, 0.01], "sell_reason": [ SellType.ROI, SellType.STOP_LOSS, SellType.ROI, SellType.FORCE_SELL ] }), 'config': hyperopt_conf, 'locks': [], 'final_balance': 1000, } mocker.patch('freqtrade.optimize.hyperopt.Backtesting.backtest', return_value=backtest_result) mocker.patch('freqtrade.optimize.hyperopt.get_timerange', return_value=(Arrow(2017, 12, 10), Arrow(2017, 12, 13))) patch_exchange(mocker) mocker.patch('freqtrade.optimize.hyperopt.load', return_value={'XRP/BTC': None}) optimizer_param = { 'adx-value': 0, 'fastd-value': 35, 'mfi-value': 0, 'rsi-value': 0, 'adx-enabled': False, 'fastd-enabled': True, 'mfi-enabled': False, 'rsi-enabled': False, 'trigger': 'macd_cross_signal', 'sell-adx-value': 0, 'sell-fastd-value': 75, 'sell-mfi-value': 0, 'sell-rsi-value': 0, 'sell-adx-enabled': False, 'sell-fastd-enabled': True, 'sell-mfi-enabled': False, 'sell-rsi-enabled': False, 'sell-trigger': 'macd_cross_signal', 'roi_t1': 60.0, 'roi_t2': 30.0, 'roi_t3': 20.0, 'roi_p1': 0.01, 'roi_p2': 0.01, 'roi_p3': 0.1, 'stoploss': -0.4, 'trailing_stop': True, 'trailing_stop_positive': 0.02, 'trailing_stop_positive_offset_p1': 0.05, 'trailing_only_offset_is_reached': False, } response_expected = { 'loss': 1.9147239021396234, 'results_explanation': (' 4 trades. 4/0/0 Wins/Draws/Losses. ' 'Avg profit 0.77%. Median profit 0.71%. Total profit ' '0.00003100 BTC ( 0.00\N{GREEK CAPITAL LETTER SIGMA}%). ' 'Avg duration 0:50:00 min.').encode(locale.getpreferredencoding(), 'replace').decode('utf-8'), 'params_details': { 'buy': { 'adx-enabled': False, 'adx-value': 0, 'fastd-enabled': True, 'fastd-value': 35, 'mfi-enabled': False, 'mfi-value': 0, 'rsi-enabled': False, 'rsi-value': 0, 'trigger': 'macd_cross_signal' }, 'roi': { 0: 0.12000000000000001, 20.0: 0.02, 50.0: 0.01, 110.0: 0 }, 'sell': { 'sell-adx-enabled': False, 'sell-adx-value': 0, 'sell-fastd-enabled': True, 'sell-fastd-value': 75, 'sell-mfi-enabled': False, 'sell-mfi-value': 0, 'sell-rsi-enabled': False, 'sell-rsi-value': 0, 'sell-trigger': 'macd_cross_signal' }, 'stoploss': { 'stoploss': -0.4 }, 'trailing': { 'trailing_only_offset_is_reached': False, 'trailing_stop': True, 'trailing_stop_positive': 0.02, 'trailing_stop_positive_offset': 0.07 } }, 'params_dict': optimizer_param, 'params_not_optimized': { 'buy': {}, 'sell': {} }, 'results_metrics': ANY, 'total_profit': 3.1e-08 } hyperopt = Hyperopt(hyperopt_conf) hyperopt.min_date = Arrow(2017, 12, 10) hyperopt.max_date = Arrow(2017, 12, 13) hyperopt.init_spaces() hyperopt.dimensions = hyperopt.dimensions generate_optimizer_value = hyperopt.generate_optimizer( list(optimizer_param.values())) assert generate_optimizer_value == response_expected
def icc_registered(report_datetime=None): report_datetime_utc = Arrow.fromdatetime(report_datetime) return IntegratedCareClinicRegistration.objects.filter( site=Site.objects.get_current(), date_opened__gte=report_datetime_utc.date(), ).exists()
def friendly_time(d: datetime.datetime) -> str: """Return "minutes ago" style date""" ad = Arrow.fromdatetime(d) other = Arrow.fromdatetime(datetime.datetime.utcnow()) return ad.humanize(other)
def transform_python(self, value: arrow.Arrow): return value.isoformat()
async def get_logs(self, date: arrow.Arrow = arrow.now()) -> str: params = json_obj() if date: params.date = date.format('YYYY/MM/DD') return (await self._get('logs', **params)).json()['result']
def test_for_polish_timezone(): _run_test_between_dates(since=Arrow(year=2020, month=6, day=11, hour=7), until=Arrow(year=2020, month=6, day=11, hour=8))
def test_backtest_record(default_conf, fee, mocker): names = [] records = [] patch_exchange(mocker) mocker.patch('freqtrade.exchange.Exchange.get_fee', fee) mocker.patch('freqtrade.optimize.backtesting.file_dump_json', new=lambda n, r: (names.append(n), records.append(r))) backtesting = Backtesting(default_conf) results = pd.DataFrame({ "pair": ["UNITTEST/BTC", "UNITTEST/BTC", "UNITTEST/BTC", "UNITTEST/BTC"], "profit_percent": [0.003312, 0.010801, 0.013803, 0.002780], "profit_abs": [0.000003, 0.000011, 0.000014, 0.000003], "open_time": [ Arrow(2017, 11, 14, 19, 32, 00).datetime, Arrow(2017, 11, 14, 21, 36, 00).datetime, Arrow(2017, 11, 14, 22, 12, 00).datetime, Arrow(2017, 11, 14, 22, 44, 00).datetime ], "close_time": [ Arrow(2017, 11, 14, 21, 35, 00).datetime, Arrow(2017, 11, 14, 22, 10, 00).datetime, Arrow(2017, 11, 14, 22, 43, 00).datetime, Arrow(2017, 11, 14, 22, 58, 00).datetime ], "open_rate": [0.002543, 0.003003, 0.003089, 0.003214], "close_rate": [0.002546, 0.003014, 0.003103, 0.003217], "open_index": [1, 119, 153, 185], "close_index": [118, 151, 184, 199], "trade_duration": [123, 34, 31, 14], "open_at_end": [False, False, False, True], "sell_reason": [SellType.ROI, SellType.STOP_LOSS, SellType.ROI, SellType.FORCE_SELL] }) backtesting._store_backtest_result("backtest-result.json", results) assert len(results) == 4 # Assert file_dump_json was only called once assert names == ['backtest-result.json'] records = records[0] # Ensure records are of correct type assert len(records) == 4 # reset test to test with strategy name names = [] records = [] backtesting._store_backtest_result("backtest-result.json", results, "DefStrat") assert len(results) == 4 # Assert file_dump_json was only called once assert names == ['backtest-result-DefStrat.json'] records = records[0] # Ensure records are of correct type assert len(records) == 4 # ('UNITTEST/BTC', 0.00331158, '1510684320', '1510691700', 0, 117) # Below follows just a typecheck of the schema/type of trade-records oix = None for (pair, profit, date_buy, date_sell, buy_index, dur, openr, closer, open_at_end, sell_reason) in records: assert pair == 'UNITTEST/BTC' assert isinstance(profit, float) # FIX: buy/sell should be converted to ints assert isinstance(date_buy, float) assert isinstance(date_sell, float) assert isinstance(openr, float) assert isinstance(closer, float) assert isinstance(open_at_end, bool) assert isinstance(sell_reason, str) isinstance(buy_index, pd._libs.tslib.Timestamp) if oix: assert buy_index > oix oix = buy_index assert dur > 0
def modify_df(beg: Arrow, end: Arrow, modifier: Callable): arr = [] for day in Arrow.range('day', beg, end): arr.append([day.date(), modifier(day)]) return create_df_indexed_by_date(create_df(arr))