def check_no_trade_period(self): # 現在時刻が範囲内かどうか https://codeday.me/jp/qa/20190219/264470.html def time_in_range(start, end, x): """Return true if x is in the range [start, end]""" if start <= end: return start <= x <= end else: return start <= x or x <= end within_period = False now = datetime_time((datetime.utcnow() + timedelta(hours=9)).hour, (datetime.utcnow() + timedelta(hours=9)).minute, 0) weekday = (datetime.utcnow() + timedelta(hours=9)).weekday() if 'no_trade' in self._config: try: if self._config['no_trade'] != None: for p in self._config['no_trade']: start = datetime_time(int(p['period'][0:2]), int(p['period'][3:5]), 0) end = datetime_time(int(p['period'][6:8]), int(p['period'][9:11]), 0) if (len(p['period']) <= 11 or int(p['period'][12]) == weekday) and time_in_range(start, end, now): self._logger.info('no_trade period : {}'.format( p['period'])) within_period = True except Exception as e: self._logger.error( 'no_trade period is not correct: {}'.format(e)) self._logger.info('no_trade : {}'.format( self._config['no_trade'])) return within_period
def test_parse_line_time(self): data = '23:21:12 aaa\n19:22:44 bbb' time = Time(11, 'time') str = String(3, 'str') definition = (time, str) expected1 = [ {'key':'time', 'value':datetime_time(23, 21, 12)}, {'key':'str', 'value':'aaa'}, ] expected2 = [ {'key':'time', 'value':datetime_time(19, 22, 44)}, {'key':'str', 'value':'bbb'}, ] fw = FixedWidth(definition, data) elems = list(fw) actual1 = elems[0] actual2 = elems[1] self.compare_line(expected1, actual1) self.compare_line(expected2, actual2)
def test_parse_line_time_custom_format(self): data = '02_21_12 aaa\n07_22_44 bbb' time = Time(11, 'time', parse_kwargs={'date_formats': ['%H_%M_%S']}) str = String(3, 'str') definition = (time, str) expected1 = [ {'key':'time', 'value':datetime_time(2, 21, 12)}, {'key':'str', 'value':'aaa'}, ] expected2 = [ {'key':'time', 'value':datetime_time(7, 22, 44)}, {'key':'str', 'value':'bbb'}, ] fw = FixedWidth(definition, data) elems = list(fw) actual1 = elems[0] actual2 = elems[1] self.compare_line(expected1, actual1) self.compare_line(expected2, actual2)
def test_serialize_line_time_from_date_input(self): a = Time(16, 'a') b = Time(16, 'b') definition = (a, b) response = Bunch() response.a = datetime_time(15, 39, 1) response.b = datetime_time(23, 33, 44) fw = FixedWidth(definition) result = fw.serialize(response) self.assertEquals(result, '15:39:01 23:33:44 ')
def test_serialize_line_time_from_date_input_with_output_format(self): a = Time(12, 'a', '%I--%M//%S') b = Time(12, 'b', '%I--%M//%S') definition = (a, b) response = Bunch() response.a = datetime_time(15, 39, 1) response.b = datetime_time(23, 33, 44) fw = FixedWidth(definition) result = fw.serialize(response) self.assertEquals(result, '03--39//01 11--33//44 ')
def _convert_seconds_to_iso_time(all_seconds: int) -> str: """Convert seconds to iso time. Args: all_seconds: the total number of seconds to convert. Return: A string represnting the converted iso time in %H:%M:%S format. e.g. "02:24:37". Raises: aioswitcher.erros.CalculationError: when failed to convert the argument. Note: This is a private function containing blocking code. Please consider using ``convert_seconds_to_iso_time`` (without the `_`), to schedule as a task in the event loop. """ try: minutes, seconds = divmod(int(all_seconds), 60) hours, minutes = divmod(minutes, 60) return datetime_time(hour=hours, minute=minutes, second=seconds).isoformat() except HANDLED_EXCEPTIONS as ex: raise CalculationError("failed to convert seconds to iso time") from ex
def turnon_joinverify(self): self.dp.add_handler( CommandHandler("start", self.start, filters=(Filters.command & Filters.private))) self.dp.add_handler( CommandHandler("restart", self.start, filters=(Filters.command & Filters.private))) self.dp.add_handler( CommandHandler("continue", self.sendCaptcha, filters=(Filters.command & Filters.private))) self.dp.add_handler( CommandHandler("quit", self.sendByebye, filters=(Filters.command & Filters.private))) self.dp.add_handler( CommandHandler("getlink", self.getlink, pass_job_queue=True, filters=(Filters.command & Filters.private))) #new chatmem and left chat mem self.dp.add_handler( MessageHandler(Filters.status_update.new_chat_members, self.newmember)) self.dp.add_handler( MessageHandler(Filters.status_update.left_chat_member, self.leftmember)) self.dp.add_handler(CallbackQueryHandler(self.button)) UPDATE_HOUR = 20 from datetime import time as datetime_time self.job_queue.run_daily(self.refresh_kernel, datetime_time(UPDATE_HOUR))
def shift_time_forward(data): local_data = data.copy() local_data['datetime'] = [ datetime_time((time.hour + 4) % 24, time.minute) for time in local_data['datetime'] ] return local_data
def targetDateSpan(date_or_year, crop=None, variety=None): if crop is not None: path = 'crops.%s' % crop if variety is not None: path = '%s.variety.%s' % (path, varietyName(variety)) config = fromConfig(path) else: config = fromConfig('default') if isinstance(date_or_year, (datetime_date, datetime_time)): target_year = _targetYearFromDate(date_or_year, config) else: target_year = date_or_year start_date = (target_year - 1, ) + config.start_day start_date = datetime_time(*start_date) end_date = (target_year, ) + config.end_day end_date = datetime_time(*end_date) return start_date, end_date
def add_to_time_distribution(self, time): night_end = datetime_time(6, 0, 0) morning_end = datetime_time(12, 0, 0) day_end = datetime_time(18, 0, 0) if time < night_end: self.time_distribution['night'] = self.time_distribution.get( 'night', 0) + 1 elif time < morning_end: self.time_distribution['morning'] = self.time_distribution.get( 'morning', 0) + 1 elif time < day_end: self.time_distribution['day'] = self.time_distribution.get( 'day', 0) + 1 else: self.time_distribution['evening'] = self.time_distribution.get( 'evening', 0) + 1
def prepare_REST_event(parsed_data): event = {"attributes": {}} missing = set(ATTR_MAP.keys()) event_date = None event_time = None for k, v in parsed_data.items(): if k in ATTR_MAP: missing.remove(k) dst_key = ATTR_MAP[k] if k in MORE_MAPS: v = MORE_MAPS[k][v.upper()] # Check if we want to put this mapped attr # into 'attributes' or the root of the event if "/" in dst_key or dst_key in BARE_ATTRS: event["attributes"][dst_key] = v else: event[dst_key] = v else: if k == "EVENT.DATE": try: year, month, day = v.split(".") event_date = datetime(int(year), int(month), int(day)) except (ValueError, TypeError) as e: pass elif k == "EVENT.TIME": try: hour, minute, second = v.split(":") event_time = datetime_time(int(hour), int(minute), int(second)) except (ValueError, TypeError) as e: pass elif k == "_API_KEY": # We don't want to pass this on as an attribute pass else: dst_key = "zabbix/{rekey}".format( rekey=zabbix_key_to_signifai_key(k)) event['attributes'][dst_key] = v if event_date: if event_time: event_date = event_date.replace( hour=event_time.hour, minute=event_time.minute, second=event_time.second ) else: event_date = datetime.now() if missing: raise ValueError("Missing attributes: {attribs}".format( attribs=str.join(", ", missing))) event["timestamp"] = int(time.mktime(event_date.timetuple())) event["event_source"] = "zabbix" return event
def dayRanges(d1, d2, periodHours=24, startHour=0): """ list of (start,end) in unix seconds that covers N-hour periods aligned to startHour in your local time zone >>> from pprint import pprint >>> rgs = dayRanges(date(2008,1,5), date(2008,1,7), startHour=7) >>> pprint([[str(datetime.fromtimestamp(x)) for x in r] for r in rgs]) [['2008-01-05 07:00:00', '2008-01-06 06:59:59'], ['2008-01-06 07:00:00', '2008-01-07 06:59:59'], ['2008-01-07 07:00:00', '2008-01-08 06:59:59']] """ ret = [] t1 = datetime.combine(d1, datetime_time(startHour, 0, 0)) while t1 < datetime.combine(d2, datetime_time(23, 59, 59)): t2 = t1 + timedelta(hours=periodHours) - timedelta.resolution ret.append((time.mktime(t1.timetuple()), time.mktime(t2.timetuple()))) t1 = t1 + timedelta(hours=periodHours) return ret
def _convert_seconds_to_iso_time(all_seconds: int) -> str: """Convert seconds to iso time (%H:%M:%S).""" try: minutes, seconds = divmod(int(all_seconds), 60) hours, minutes = divmod(minutes, 60) return datetime_time(hour=hours, minute=minutes, second=seconds).isoformat() except HANDLED_EXCEPTIONS as ex: raise CalculationError("failed to convert seconds to iso time") from ex
def dayRanges(d1, d2, periodHours=24, startHour=0): """ list of (start,end) in unix seconds that covers N-hour periods aligned to startHour in your local time zone >>> from pprint import pprint >>> rgs = dayRanges(date(2008,1,5), date(2008,1,7), startHour=7) >>> pprint([[str(datetime.fromtimestamp(x)) for x in r] for r in rgs]) [['2008-01-05 07:00:00', '2008-01-06 06:59:59'], ['2008-01-06 07:00:00', '2008-01-07 06:59:59'], ['2008-01-07 07:00:00', '2008-01-08 06:59:59']] """ ret = [] t1 = datetime.combine(d1, datetime_time(startHour, 0, 0)) while t1 < datetime.combine(d2, datetime_time(23,59,59)): t2 = t1 + timedelta(hours=periodHours) - timedelta.resolution ret.append((time.mktime(t1.timetuple()), time.mktime(t2.timetuple()))) t1 = t1 + timedelta(hours=periodHours) return ret
def get_start_datetime(): try: start_date_env = os.getenv(START_DATE) if start_date_env == 'yesterday': start_of_day = datetime.combine(date.today(), datetime_time()) return start_of_day - timedelta(days=1) return datetime.strptime(start_date_env, DATE_FORMAT) except ValueError: get_logger().exception( f'START_DATE has an invalid format. Please follow the format "{DATE_FORMAT}"' + ' or use the keyword "yesterday".') exit(1)
def parse_time(self, string: str) -> datetime_time: if not string: return None if ":" in string: hour, min = map(int, string.split(":")) elif "." in string: hour, min = map(int, string.split(".")) else: hour = int(string) min = 0 if hour == 24: hour = 0 return datetime_time(hour=hour, minute=min)
def at(self, time_str): """Schedule the job every day at a specific time. Calling this is only valid for jobs scheduled to run every N day(s). """ assert self.unit in ('days', 'hours') or self.start_day hour, minute = [t for t in time_str.split(':')] minute = int(minute) if self.unit == 'days' or self.start_day: hour = int(hour) assert 0 <= hour <= 23 elif self.unit == 'hours': hour = 0 assert 0 <= minute <= 59 self.at_time = datetime_time(hour, minute) return self
def coerce_dt_awareness(date_or_datetime, tz=None): """ Coerce the given `datetime` or `date` object into a timezone-aware or timezone-naive `datetime` result, depending on which is appropriate for the project's settings. """ if tz is None: tz = get_current_timezone() if isinstance(date_or_datetime, datetime): dt = date_or_datetime else: dt = datetime.combine(date_or_datetime, datetime_time()) is_project_tz_aware = settings.USE_TZ if is_project_tz_aware and is_naive(dt): return make_aware(dt, tz) elif not is_project_tz_aware and is_aware(dt): return make_naive(dt, tz) # No changes necessary return dt
def post(self, key): user = users.get_current_user() #feeds = db.GqlQuery("SELECT * FROM Feed WHERE __key__ = :1 AND user = :2", db.Key(key), user) #feed = feeds.fetch(1)[0] feed = Feed.get(key) dt = self.request.get("dt") if dt and dt.index(":") > -1: hr, min = dt.split(":") feed.digest_time = datetime_time(int(hr), int(min)) digest_type = self.request.get("d") if digest_type == "i": # instant digest feed.last_custom_digest_days = feed.digest_days feed.digest_days = 0 elif digest_type == "c": days_bitfield = 0 d = ["d0", "d1", "d2", "d3", "d4", "d5", "d6"] for i in xrange(len(d)): if self.request.get(d[i]): days_bitfield |= 1 << i if days_bitfield == 0: # user could have switched back from instant. restore settings days_bitfield = feed.last_custom_digest_days feed.digest_days = days_bitfield else: print "xxx" return feed.save() # Update User and Feed for _digest_next datetime updateUserNextDigest(user, getUserPrefs(user)) self.redirect("/feeds/update/%s" % key)
def is_night(): if not NIGHT_ENABLED: return now = datetime.now().time() return now >= datetime_time(17, 00) or now < datetime_time(9, 00)
def main(self): if TURNON_JOIN_GROUP_VERIFY: self.turnon_joinverify() else: self.dp.add_handler( CommandHandler("start", self.start_withoutjoinverify)) self.dp.add_handler( CommandHandler("help", self.help, filters=(Filters.command & Filters.private))) self.dp.add_handler(CommandHandler("printchatid", ChatBot.printchatid)) self.dp.add_handler(CommandHandler("printuserid", ChatBot.printuserid)) self.dp.add_handler( CommandHandler("ping", ChatBot.ping, filters=(Filters.command & Filters.private))) self.dp.add_handler( CallbackQueryHandler(self.button, pass_job_queue=True)) self.dp.add_handler( CommandHandler("refresh", self.refresh, filters=(Filters.command & Filters.private))) self.job_queue.run_daily(self.dailyrefresh, datetime_time(REFRESH_TIME)) #for reportbug rb_handler = ConversationHandler( entry_points=[ CommandHandler('reportbug', self.reportbug, filters=(Filters.command & Filters.private)) ], states={ REPORTINGBUG: [ MessageHandler(Filters.private & (~Filters.command), self.forwardbug) ] }, fallbacks=[ CommandHandler('quitreport', self.quitreport, filters=(Filters.command & Filters.private)) ]) self.dp.add_handler(rb_handler, group=0) #for private chat pc_handler = ConversationHandler( entry_points=[ CommandHandler('startpc', self.startPC, filters=(Filters.command & Filters.private), pass_args=True) ], states={ GETTINGNAME: [ MessageHandler(Filters.private & Filters.text, self.getPeerName) ], GETTINGMSG: [ MessageHandler(Filters.private & Filters.text, self.getPCVerifyMsg) ] }, fallbacks=[ CommandHandler('cancelpc', self.cancelPC, filters=(Filters.command & Filters.private)) ]) self.dp.add_handler(pc_handler, group=2) self.dp.add_handler( MessageHandler( Filters.private & Filters.reply & (~Filters.command), self.pc_message)) self.turnon_forwardmsg() self.turnon_groupalert() self.dp.add_error_handler(self.error) self.updater.start_polling() self.updater.idle()
def make_person_sequence(self): start_time = time.time() # 환자별로 데이터의 시작시간과 종료시간을 구한다. timerange_df = self.cohort_df.groupby('SUBJECT_ID').agg({ 'COHORT_START_DATE': 'min', 'COHORT_END_DATE': 'max' }) timerange_df['START_DATE'] = timerange_df.COHORT_START_DATE.dt.date timerange_df[ 'START_HOURGRP'] = timerange_df.COHORT_START_DATE.dt.hour // self.group_hour timerange_df['END_DATE'] = timerange_df.COHORT_END_DATE.dt.date timerange_df[ 'END_HOURGRP'] = timerange_df.COHORT_END_DATE.dt.hour // self.group_hour timerange_df = timerange_df.drop( ['COHORT_START_DATE', 'COHORT_END_DATE'], axis=1) demographic_ary = self.person_df.sort_values('PERSON_ID', ascending=True).values condition_ary = self.condition_df.sort_values( ['PERSON_ID', 'DATE', 'HOURGRP'], ascending=True).values measurement_ary = self.measurement_df.sort_values( ['PERSON_ID', 'DATE', 'HOURGRP'], ascending=True).values timerange_ary = timerange_df.sort_values( 'SUBJECT_ID', ascending=True).reset_index().values demographic_cols = ["AGE_HOUR", "GENDER"] condition_cols = self.condition_df.columns[3:] measurement_cols = self.measurement_df.columns[3:] # 빈 Time Range 없게 시간대 정보를 채움 max_hourgrp = (24 // self.group_hour) - 1 key_list = [] for person_id, start_date, start_hourgrp, end_date, end_hourgrp in timerange_ary: cur_date = start_date cur_hourgrp = start_hourgrp while True: key_list.append((person_id, cur_date, cur_hourgrp)) cur_hourgrp += 1 # 1 그룹시간만큼 탐색 if cur_hourgrp > max_hourgrp: # 다음 날짜로 넘어감 cur_date = cur_date + timedelta(days=1) cur_hourgrp = 0 if cur_date > end_date or \ (cur_date == end_date and cur_hourgrp >= end_hourgrp): # 끝까지 탐색함 break # 시간대 정보에 따라 데이터를 채워 넣는다 demographic_idx = condition_idx = measurement_idx = 0 prev_person_id = None prev_conditions = None data_cols = list(demographic_cols) + list(measurement_cols) + list( condition_cols) data_list = np.zeros((len(key_list), len(data_cols)), dtype=np.float32) for idx, row in enumerate(key_list): person_id, date, hourgrp = row col_start_idx = col_end_idx = 0 col_end_idx += len(demographic_cols) # Demographic 추가 while True: if demographic_idx >= len(demographic_ary): break demographic_row = demographic_ary[demographic_idx] demographic_person_id = demographic_row[0] # 시간 계산을 위해 tz를 동일하게 맞춤. demographic_age = datetime.combine( date, datetime_time(hour=hourgrp, tzinfo=timezone.utc)).astimezone( pytz.utc) - demographic_row[1] demographic_gender = demographic_row[2] demographic_data = [ demographic_age.total_seconds() // 3600., demographic_gender ] state = 0 # 0: 다음 데이터 탐색 1: 맞는 데이터 찾음 2: 맞는 데이터 없음 if demographic_person_id > person_id: # 다음 환자로 넘어감 state = 2 elif demographic_person_id == person_id: # 맞는 데이터 state = 1 if state == 0: # 계속 탐색 demographic_idx += 1 elif state == 1: # 데이터 찾음 data_list[idx, col_start_idx:col_end_idx] = demographic_data break elif state == 2: # 맞는 데이터가 없음 break # Measurement 탐색 col_start_idx = col_end_idx col_end_idx += len(measurement_cols) while True: if measurement_idx >= len(measurement_ary): break measurement_row = measurement_ary[measurement_idx] measurement_person_id = measurement_row[0] measurement_date = measurement_row[1] measurement_hourgrp = measurement_row[2] measurement_data = measurement_row[3:] state = 0 # 0: 다음 데이터 탐색 1: 맞는 데이터 찾음 2: 맞는 데이터 없음 if measurement_person_id > person_id: # 다음 환자로 넘어감 state = 2 elif measurement_person_id == person_id: if measurement_date > date: # 다음 날짜로 넘어감 state = 2 elif measurement_date == date: if measurement_hourgrp > hourgrp: # 다음 그룹시간으로 넘어감 state = 2 elif measurement_hourgrp == hourgrp: # 맞는 데이터 state = 1 if state == 0: # 계속 탐색 measurement_idx += 1 elif state == 1: # 데이터 찾음 data_list[idx, col_start_idx:col_end_idx] = measurement_data measurement_idx += 1 break elif state == 2: # 맞는 데이터가 없음 break # Condition 탐색 col_start_idx = col_end_idx col_end_idx += len(condition_cols) # 이전과 다른 환자임. condition정보 reset if prev_person_id != person_id: prev_conditions = np.array([0] * len(condition_cols)) while True: if condition_idx >= len(condition_ary): break condition_row = condition_ary[condition_idx] condition_person_id = condition_row[0] condition_date = condition_row[1] condition_hourgrp = condition_row[2] condition_data = condition_row[3:] state = 0 # 0: 다음 데이터 탐색 1: 맞는 데이터 찾음 2: 맞는 데이터 없음 if condition_person_id > person_id: # 다음 환자로 넘어감 state = 2 elif condition_person_id == person_id: if condition_date > date: # 다음 날짜로 넘어감 state = 2 elif condition_date == date: if condition_hourgrp > hourgrp: # 다음 그룹시간으로 넘어감 state = 2 elif condition_hourgrp == hourgrp: # 맞는 데이터 state = 1 if state == 0: # 계속 탐색 condition_idx += 1 elif state == 1: # 데이터 찾음 # 이전 Condition 정보와 나중 Condition 정보를 합친다 prev_conditions = np.array(prev_conditions) + np.array( condition_data) data_list[idx, col_start_idx:col_end_idx] = prev_conditions condition_idx += 1 break elif state == 2: # 맞는 데이터가 없음 break prev_person_id = person_id self.feature_ary = data_list self.feature_key_df = pd.DataFrame( key_list, columns=['PERSON_ID', 'DATE', 'HOURGRP']) print("data_loader make_person_sequence time:", time.time() - start_time)
def get_all_conversation(request): data = {'error': True, 'message': "Có lỗi trong quá trình xử lý"} if not (request.user.is_staff or request.user.is_superuser): return JsonResponse(data) if (request.method == 'POST'): start = request.POST.get('start', '') if (start == ""): start = 0 filter = request.POST.get('filter', 'all') str_search = request.POST.get('str_search', '') today = datetime.now().date() tomorrow = today + timedelta(1) today_start = datetime.combine(today, datetime_time()) today_end = datetime.combine(tomorrow, datetime_time()) if filter == 'open': room_lists = Room.objects.filter( status="open", full_name__contains=str_search).order_by('-id')[start:10] elif filter == 'closed': room_lists = Room.objects.filter( status="closed", full_name__contains=str_search).order_by('-id')[start:10] elif filter == 'today': room_lists = Room.objects.filter( created__lte=today_end, created__gte=today_start, full_name__contains=str_search).order_by('-id')[start:10] else: room_lists = Room.objects.filter( full_name__contains=str_search).order_by('-id')[start:10] data['error'] = False if len(room_lists) > 0: arr = [] for item in room_lists: item = Room.objects.get(id=item.id) email = item.email hash_email = "" if email != "": hash_md5 = hashlib.md5() hash_md5.update(email.encode('utf-8')) hash_email = hash_md5.hexdigest() arr.append({ 'id': item.id, 'channel_type': item.channel_type.name, 'key_hash': item.key_hash, 'full_name': item.full_name, 'email': item.email, 'phone': item.phone, 'hash_email': hash_email, 'created': item.created.strftime(settings.DATETIME_FORMAT), 'not_viewed': len(item.get_all_message_backend_not_viewed(request.user)) }) data['message'] = u'Thành công' data['room_list'] = arr return JsonResponse(data)
def test_binding(conn_cnx, db_parameters): """ Paramstyle qmark basic tests """ with conn_cnx(paramstyle=u'qmark') as cnx: cnx.cursor().execute(""" create or replace table {name} ( c1 BOOLEAN, c2 INTEGER, c3 NUMBER(38,2), c4 VARCHAR(1234), c5 FLOAT, c6 BINARY, c7 BINARY, c8 TIMESTAMP_NTZ, c9 TIMESTAMP_NTZ, c10 TIMESTAMP_NTZ, c11 TIMESTAMP_NTZ, c12 TIMESTAMP_LTZ, c13 TIMESTAMP_LTZ, c14 TIMESTAMP_LTZ, c15 TIMESTAMP_LTZ, c16 TIMESTAMP_TZ, c17 TIMESTAMP_TZ, c18 TIMESTAMP_TZ, c19 TIMESTAMP_TZ, c20 DATE, c21 TIME, c22 TIMESTAMP_NTZ, c23 TIME, c24 STRING ) """.format(name=db_parameters['name'])) current_utctime = datetime.utcnow() current_localtime = pytz.utc.localize(current_utctime, is_dst=False).astimezone( pytz.timezone(PST_TZ)) current_localtime_without_tz = datetime.now() current_localtime_with_other_tz = pytz.utc.localize( current_localtime_without_tz, is_dst=False).astimezone(pytz.timezone(JST_TZ)) dt = date(2017, 12, 30) tm = datetime_time(hour=1, minute=2, second=3, microsecond=456) struct_time_v = time.strptime("30 Sep 01 11:20:30", "%d %b %y %H:%M:%S") tdelta = timedelta(seconds=tm.hour * 3600 + tm.minute * 60 + tm.second, microseconds=tm.microsecond) try: with conn_cnx(paramstyle=u'qmark', timezone=PST_TZ) as cnx: cnx.cursor().execute( """ insert into {name} values( ?,?,?, ?,?,?, ?,?,?, ?,?,?, ?,?,?, ?,?,?, ?,?,?, ?,?,?) """.format(name=db_parameters['name']), ( True, 1, Decimal("1.2"), 'str1', 1.2, # Py2 has bytes in str type, so Python Connector bytes(b'abc') if not PY2 else bytearray(b'abc'), bytearray(b'def'), current_utctime, current_localtime, current_localtime_without_tz, current_localtime_with_other_tz, (u"TIMESTAMP_LTZ", current_utctime), (u"TIMESTAMP_LTZ", current_localtime), (u"TIMESTAMP_LTZ", current_localtime_without_tz), (u"TIMESTAMP_LTZ", current_localtime_with_other_tz), (u"TIMESTAMP_TZ", current_utctime), (u"TIMESTAMP_TZ", current_localtime), (u"TIMESTAMP_TZ", current_localtime_without_tz), (u"TIMESTAMP_TZ", current_localtime_with_other_tz), dt, tm, (u"TIMESTAMP_NTZ", struct_time_v), (u"TIME", tdelta), (u"TEXT", None))) ret = cnx.cursor().execute( """ select * from {name} where c1=? and c2=? """.format(name=db_parameters['name']), (True, 1)).fetchone() assert ret[0], "BOOLEAN" assert ret[2] == Decimal("1.2"), "NUMBER" assert ret[4] == 1.2, "FLOAT" assert ret[5] == b'abc' assert ret[6] == b'def' assert ret[7] == current_utctime assert convert_datetime_to_epoch( ret[8]) == convert_datetime_to_epoch(current_localtime) assert convert_datetime_to_epoch( ret[9]) == convert_datetime_to_epoch( current_localtime_without_tz) assert convert_datetime_to_epoch( ret[10]) == convert_datetime_to_epoch( current_localtime_with_other_tz) assert convert_datetime_to_epoch( ret[11]) == convert_datetime_to_epoch(current_utctime) assert convert_datetime_to_epoch( ret[12]) == convert_datetime_to_epoch(current_localtime) assert convert_datetime_to_epoch( ret[13]) == convert_datetime_to_epoch( current_localtime_without_tz) assert convert_datetime_to_epoch( ret[14]) == convert_datetime_to_epoch( current_localtime_with_other_tz) assert convert_datetime_to_epoch( ret[15]) == convert_datetime_to_epoch(current_utctime) assert convert_datetime_to_epoch( ret[16]) == convert_datetime_to_epoch(current_localtime) assert convert_datetime_to_epoch( ret[17]) == convert_datetime_to_epoch( current_localtime_without_tz) assert convert_datetime_to_epoch( ret[18]) == convert_datetime_to_epoch( current_localtime_with_other_tz) assert ret[19] == dt assert ret[20] == tm assert convert_datetime_to_epoch( ret[21]) == calendar.timegm(struct_time_v) assert timedelta(seconds=ret[22].hour * 3600 + ret[22].minute * 60 + ret[22].second, microseconds=ret[22].microsecond) == tdelta assert ret[23] is None finally: with conn_cnx() as cnx: cnx.cursor().execute(""" drop table if exists {name} """.format(name=db_parameters['name']))
class StaticTimeStampedModel(models.Model): start = LinkedTZDateTimeField(default=settings.TEST_DATETIME, time_override=datetime_time(0, 0)) end = LinkedTZDateTimeField(default=settings.TEST_DATETIME, time_override=datetime_time( 23, 59, 59, 999999))
def summarize_position(self): def time_in_range(start, end, x): if start <= end: return start <= x <= end else: return start <= x or x <= end influx_client = None if 'pos_server_discord_influxdb' in self._parameters._config: try: influx_client = InfluxDBClient( host=self._parameters. _config['pos_server_discord_influxdb'][0], port=self._parameters. _config['pos_server_discord_influxdb'][1], database='bots') influx_client.query('show measurements') except Exception as e: print(e) influx_client = None datas = {} last_position_diff = {} last_profit = {} # botごとの前回の損益額 self.api_pending_time = time.time() counter = 0 while True: time.sleep(1) counter += 1 with self.lock: for product, data in self._database.items(): for key, value in data.items(): if time.time() - value['timestamp'] > 300: del self._database[product][key] datas = {} break for product, data in self._database.items(): if data == {}: continue if product in self._last_base_value.keys(): datas[product] = { 'pos': 0.0, 'profit': 0, 'api1': 0, 'api2': 0, 'base': self._last_base_value[product] } else: datas[product] = { 'pos': 0.0, 'profit': 0, 'api1': 0, 'api2': 0, 'base': 0.0 } for key, value in data.items(): if counter % 30 == 0: self._logger.info( "{} : api1({:>3}) : api2({:>3}) : profit({:>+7.0f}) : Pos({:>+11.8f}) : Base({:+f}) : {:.1f} : {}" .format(product, value['api1'], value['api2'], value['profit'], value['pos'], value['base'], time.time() - value['timestamp'], key)) # 損益をInfluxに保存 if key not in last_profit: last_profit[key] = value['profit'] if influx_client != None: influx_data = [{ "measurement": "bot_profit", "tags": { 'bot': key, }, "fields": { 'profit': value['profit'], 'profit_diff': value['profit'] - last_profit[key], 'position': value['pos'], 'apicount': value['api1'], } }] try: start = datetime_time(0, 0, 0) end = datetime_time(0, 2, 0) now = datetime_time( (datetime.utcnow() + timedelta(hours=9)).hour, (datetime.utcnow() + timedelta(hours=9)).minute, 0) # 0:00~0:02はグラフに入れない if not time_in_range(start, end, now): influx_client.write_points(influx_data) last_profit[key] = value['profit'] except Exception as e: self._logger.exception( "Error while exporting to InfluxDB : {}, {}" .format(e, traceback.print_exc())) datas[product]['pos'] += value['pos'] datas[product]['profit'] += value['profit'] datas[product]['api1'] = max(datas[product]['api1'], value['api1']) datas[product]['api2'] += value['api2'] if 'base' in datas[ product] and datas[product]['base'] != value[ 'base'] and datas[product]['base'] != 0: self._logger.error('base_offset error') datas[product]['base'] = value['base'] self._last_base_value[product] = value['base'] if counter % 30 == 0: self._logger.info('-' * 70) self._logger.info( ' profit position (base target ) fromAPI diff' ) api1 = api2 = 0 total_profit = 0 for product, data in datas.items(): if data == {}: continue self._parameters._config['product'] = product.strip() if counter % 30 == 0: if self._parameters._config['product'] == 'BTC_JPY': minimum_order_size = 0.001 else: minimum_order_size = 0.01 if time.time() < self.api_pending_time: self._logger.info('API pending time') actual = data['pos'] + data['base'] else: if self._order == None: actual = 0 else: actual = self._order._getpositions_api( ) if time.time() > self.api_pending_time else data[ 'pos'] + data['base'] position_diff = round(actual - data['pos'] - data['base'], 8) if product not in last_position_diff: last_position_diff[product] = deque([0], maxlen=500) last_position_diff[product].append(position_diff) self._logger.info( '{:>11}: {:>+9.0f} : {:>15.8f} ({:>+f} ={:>15.8f}) : {:>15.8f} : {:+.8f} {}' .format( product, data['profit'], data['pos'], data['base'], data['pos'] + data['base'], actual, position_diff, ' ' if (abs(position_diff) < minimum_order_size or self._order == None) else '****' if max(list(last_position_diff[product])[-4:]) == min( list(last_position_diff[product])[-4:]) else '***' if max( list(last_position_diff[product])[-3:]) == min( list(last_position_diff[product]) [-3:]) else '**' if max(list(last_position_diff[product])[-2:]) == min(list(last_position_diff[product])[-2:] ) else '*')) # 損益をInfluxに保存 if product.replace(' ', '') not in last_profit: last_profit[product.replace(' ', '')] = data['profit'] if influx_client != None: influx_data = [{ "measurement": "bot_profit", "tags": { 'bot': product.replace(' ', '') + str(self._parameters._config['pos_server'][1]), }, "fields": { 'profit': data['profit'], 'profit_diff': data['profit'] - last_profit[product.replace(' ', '')], 'position': data['pos'], } }] try: start = datetime_time(0, 0, 0) end = datetime_time(0, 2, 0) now = datetime_time( (datetime.utcnow() + timedelta(hours=9)).hour, (datetime.utcnow() + timedelta(hours=9)).minute, 0) if not time_in_range(start, end, now): # 0:00~0:02はグラフに入れない influx_client.write_points(influx_data) last_profit[product.replace(' ', '')] = data['profit'] except Exception as e: self._logger.exception( "Error while exporting to InfluxDB : {}, {}". format(e, traceback.print_exc())) if product not in self._pos_history.keys(): self._pos_history[product] = deque( maxlen=int(self._parameters. _config['pos_server_graph_period'] * 120)) start = datetime_time(0, 0, 0) end = datetime_time(0, 2, 0) now = datetime_time( (datetime.utcnow() + timedelta(hours=9)).hour, (datetime.utcnow() + timedelta(hours=9)).minute, 0) if not time_in_range(start, end, now): # 0:00~0:02はグラフに入れない self._pos_history[product].append([ time.time(), self.spot_price if product.strip() == 'BTC_JPY' else self.fx_price, data['profit'], data['pos'], self.api ]) total_profit += data['profit'] if self._order != None: # 4度続けてポジションがズレていれば成売買で補正行う if max(list(last_position_diff[product])[-4:]) == min( list(last_position_diff[product])[-4:] ) and abs(position_diff) >= minimum_order_size: maxsize = self._parameters._config[ 'adjust_max_size'] if 'adjust_max_size' in self._parameters._config else 100 if position_diff < 0: self._market_buy(min(-position_diff, maxsize), nocheck=True) else: self._market_sell(min(position_diff, maxsize), nocheck=True) api1 = max(api1, data['api1']) api2 += data['api2'] # api1 += sum(self._parameters.api_counter) # このプログラム自体のapiアクセス回数 self.api = api1 if counter % 30 == 0: self._logger.info( ' api1 : {:.0f} api2 : {:.0f}'.format( api1, api2)) self._logger.info('-' * 70) if counter % 60 == 0: self._profit_history.append([ (datetime.utcnow() + timedelta(hours=9)).timestamp(), total_profit ]) current_minutes = int(time.time() / 60) if self._minute != current_minutes: self._minute = current_minutes # 一定期間ごとにプロット (profit_intervalが0ならば日付が変わったときだけ1回) if ((self._parameters._config['pos_server_discord_interval'] == 0 and self._today != (datetime.utcnow() + timedelta(hours=9)).strftime("%d")) or (self._parameters._config['pos_server_discord_interval'] != 0 and (current_minutes % self._parameters._config['pos_server_discord_interval']) == 0)): for product, history in self._pos_history.items(): if len(history) > 4 and self._database[product] != {}: self.plot_position_graph(history, 'position.png') message = '{} ポジション通知 {}'.format( (datetime.utcnow() + timedelta(hours=9)).strftime('%H:%M:%S'), product) self._send_discord(message, 'position.png') discord_send_str = '' total_pf = 0 total_size = 0 for product, data in self._database.items(): for key, value in data.items(): discord_send_str += "{} : api1({:>3}) : api2({:>3}) : profit({:>+7.0f}) : Pos({:>+11.8f}) : Base({:+f}) : {:.1f} : {}\n".format( product, value['api1'], value['api2'], value['profit'], value['pos'], value['base'], time.time() - value['timestamp'], key) total_pf += int(value['profit']) total_size += float(value['pos']) discord_send_str += '-' * 70 discord_send_str += 'TOTAL pf: {:>4} size: {:>1.8f}'.format( total_pf, round(total_size, 8)) if discord_send_str != '' and 'pos_server_discord_send_text' in self._parameters._config and self._parameters._config[ 'pos_server_discord_send_text']: self._send_discord(discord_send_str) if len( self._profit_history ) > 4 and 'pos_server_discord_bitflyer_color' in self._parameters._config and self._parameters._config[ 'pos_server_discord_bitflyer_color']: message = '{} 損益通知 Profit:{:+.0f}'.format( (datetime.utcnow() + timedelta(hours=9)).strftime('%H:%M:%S'), self.__plot_plofit_graph_bfcolor('profit.png')) self._send_discord(message, 'profit.png') # 日付が変わったら損益をリセット if self._today != (datetime.utcnow() + timedelta(hours=9)).strftime("%d"): self._today = (datetime.utcnow() + timedelta(hours=9)).strftime("%d") self._pos_history = {} self._profit_history.clear() for key, value in last_profit.items(): last_profit[key] = 0 # botごとの前回の損益額をリセット if self._parameters._config['created_at'] != os.path.getmtime( self._parameters._config_file): self._parameters.load_config_file( self._parameters._config_file)
def test_binding(conn_cnx, db_parameters, bulk_array_optimization): """Paramstyle qmark binding tests to cover basic data types.""" CREATE_TABLE = """create or replace table {name} ( c1 BOOLEAN, c2 INTEGER, c3 NUMBER(38,2), c4 VARCHAR(1234), c5 FLOAT, c6 BINARY, c7 BINARY, c8 TIMESTAMP_NTZ, c9 TIMESTAMP_NTZ, c10 TIMESTAMP_NTZ, c11 TIMESTAMP_NTZ, c12 TIMESTAMP_LTZ, c13 TIMESTAMP_LTZ, c14 TIMESTAMP_LTZ, c15 TIMESTAMP_LTZ, c16 TIMESTAMP_TZ, c17 TIMESTAMP_TZ, c18 TIMESTAMP_TZ, c19 TIMESTAMP_TZ, c20 DATE, c21 TIME, c22 TIMESTAMP_NTZ, c23 TIME, c24 STRING, c25 STRING, c26 STRING ) """ INSERT = """ insert into {name} values( ?,?,?, ?,?,?, ?,?,?, ?,?,?, ?,?,?, ?,?,?, ?,?,?, ?,?,?,?,?) """ with conn_cnx(paramstyle="qmark") as cnx: cnx.cursor().execute(CREATE_TABLE.format(name=db_parameters["name"])) current_utctime = datetime.utcnow() current_localtime = pytz.utc.localize(current_utctime, is_dst=False).astimezone( pytz.timezone(PST_TZ) ) current_localtime_without_tz = datetime.now() current_localtime_with_other_tz = pytz.utc.localize( current_localtime_without_tz, is_dst=False ).astimezone(pytz.timezone(JST_TZ)) dt = date(2017, 12, 30) tm = datetime_time(hour=1, minute=2, second=3, microsecond=456) struct_time_v = time.strptime("30 Sep 01 11:20:30", "%d %b %y %H:%M:%S") tdelta = timedelta( seconds=tm.hour * 3600 + tm.minute * 60 + tm.second, microseconds=tm.microsecond ) data = ( True, 1, Decimal("1.2"), "str1", 1.2, # Py2 has bytes in str type, so Python Connector bytes(b"abc"), bytearray(b"def"), current_utctime, current_localtime, current_localtime_without_tz, current_localtime_with_other_tz, ("TIMESTAMP_LTZ", current_utctime), ("TIMESTAMP_LTZ", current_localtime), ("TIMESTAMP_LTZ", current_localtime_without_tz), ("TIMESTAMP_LTZ", current_localtime_with_other_tz), ("TIMESTAMP_TZ", current_utctime), ("TIMESTAMP_TZ", current_localtime), ("TIMESTAMP_TZ", current_localtime_without_tz), ("TIMESTAMP_TZ", current_localtime_with_other_tz), dt, tm, ("TIMESTAMP_NTZ", struct_time_v), ("TIME", tdelta), ("TEXT", None), "", ',an\\\\escaped"line\n', ) try: with conn_cnx(paramstyle="qmark", timezone=PST_TZ) as cnx: csr = cnx.cursor() if bulk_array_optimization: cnx._session_parameters[CLIENT_STAGE_ARRAY_BINDING_THRESHOLD] = 1 csr.executemany(INSERT.format(name=db_parameters["name"]), [data]) else: csr.execute(INSERT.format(name=db_parameters["name"]), data) ret = ( cnx.cursor() .execute( """ select * from {name} where c1=? and c2=? """.format( name=db_parameters["name"] ), (True, 1), ) .fetchone() ) assert len(ret) == 26 assert ret[0], "BOOLEAN" assert ret[2] == Decimal("1.2"), "NUMBER" assert ret[4] == 1.2, "FLOAT" assert ret[5] == b"abc" assert ret[6] == b"def" assert ret[7] == current_utctime assert convert_datetime_to_epoch(ret[8]) == convert_datetime_to_epoch( current_localtime ) assert convert_datetime_to_epoch(ret[9]) == convert_datetime_to_epoch( current_localtime_without_tz ) assert convert_datetime_to_epoch(ret[10]) == convert_datetime_to_epoch( current_localtime_with_other_tz ) assert convert_datetime_to_epoch(ret[11]) == convert_datetime_to_epoch( current_utctime ) assert convert_datetime_to_epoch(ret[12]) == convert_datetime_to_epoch( current_localtime ) assert convert_datetime_to_epoch(ret[13]) == convert_datetime_to_epoch( current_localtime_without_tz ) assert convert_datetime_to_epoch(ret[14]) == convert_datetime_to_epoch( current_localtime_with_other_tz ) assert convert_datetime_to_epoch(ret[15]) == convert_datetime_to_epoch( current_utctime ) assert convert_datetime_to_epoch(ret[16]) == convert_datetime_to_epoch( current_localtime ) assert convert_datetime_to_epoch(ret[17]) == convert_datetime_to_epoch( current_localtime_without_tz ) assert convert_datetime_to_epoch(ret[18]) == convert_datetime_to_epoch( current_localtime_with_other_tz ) assert ret[19] == dt assert ret[20] == tm assert convert_datetime_to_epoch(ret[21]) == calendar.timegm(struct_time_v) assert ( timedelta( seconds=ret[22].hour * 3600 + ret[22].minute * 60 + ret[22].second, microseconds=ret[22].microsecond, ) == tdelta ) assert ret[23] is None assert ret[24] == "" assert ret[25] == ',an\\\\escaped"line\n' finally: with conn_cnx() as cnx: cnx.cursor().execute( """ drop table if exists {name} """.format( name=db_parameters["name"] ) )
def seasonDate(target_year, month, day): if month < 7: return datetime_time(target_year, month, day) else: return datetime_time(target_year - 1, month, day)
import os import sys import time # dattetime today = datetime.now() print(today) print(datetime.date(today)) print(datetime.time(today)) print(datetime.ctime(today)) print(datetime.utcnow()) print(datetime.timestamp(today)) print(datetime.fromtimestamp(datetime.timestamp(today))) date1 = date(2019, 7, 14) time1 = datetime_time(14, 14, 14) print(datetime.combine(date1, time1)) print(datetime.strptime("12/2/18 20:59", '%d/%m/%y %H:%M')) print(today.strftime("%Y年%m月%d日 %H:%M:%S %p")) # math # 内置 print(round(7.54)) number1 = (1, 2, 3) number2 = (1.1, 2.2, 3.3) print(sum(number1)) print(sum(number2)) # math库 print(math.trunc(7.54))
def summarize_position(self): def time_in_range(start, end, x): if start <= end: return start <= x <= end else: return start <= x or x <= end with self.lock: summarize={'pos':0, 'profit':0} self._logger.info('\n\n') self._logger.info('-'*100) for strategy, value in self._database.items(): self._logger.info("profit({:>+17.8f}) : Pos({:>+17.8f}) : Base({:>+10.3f}) : {:5.1f} : {}".format( value['profit'], value['pos'], value['base'], time.time()-value['timestamp'], strategy)) try: # 0:00~0:02はグラフに入れない now = datetime_time((datetime.utcnow()+timedelta(hours=9)).hour, (datetime.utcnow()+timedelta(hours=9)).minute, 0) if not time_in_range(datetime_time(0, 0, 0), datetime_time(0, 2, 0), now): # 損益をInfluxに保存 self.influxdb.write( measurement="bfsx2", tags={'exchange': "{}_{}".format(self.exchange,self.symbol), 'bot': strategy}, profit = value['profit'], profit_diff = value['profit']-self._last_profit.get(strategy,value['profit']), position = value['pos']) self._last_profit[strategy] = value['profit'] else: self._last_profit[strategy] = 0 except Exception as e: self._logger.exception("Error while exporting to InfluxDB : {}, {}".format( e, traceback.print_exc())) summarize['pos'] += value['pos'] summarize['profit'] += value['profit'] if summarize.get('base',value['base'])!=value['base'] : self._logger.error('base_offset error') summarize['base'] = value['base'] self._logger.info('-'*100) self._logger.info(' profit position ( base target ) fromAPI diff') # 実際のポジション取得 actual = self.check_current_pos() if self.api._auth!=None else 0 # 同じずれが繰り返すとカウントアップ pos_diff = round(actual-summarize['pos']-summarize.get('base',0), 8) if self._last_pos_diff != pos_diff or abs(pos_diff)<self.minimum_order_size : self._diff_count = 0 if abs(pos_diff)>=self.minimum_order_size and self._diff_count<5: self._diff_count += 1 self._last_pos_diff = pos_diff if len(self._database)==0 : self._diff_count = 0 self._logger.info('{:>+17.8f} : {:>17.8f} ({:>+10.3f} ={:>17.8f}) : {:>17.8f} : {:+17.8f} {}'.format( summarize['profit'], summarize['pos'], summarize.get('base',0), summarize['pos'] + summarize.get('base',0), actual, pos_diff,'*'*self._diff_count)) self._latest_summarize = summarize # 4度続けてポジションがズレていれば成売買で補正行う if self.api._auth!=None and self.parameters.get('adjust_position',True) and self._diff_count>=4 : self._limit_try_count +=1 maxsize = self.parameters.get('adjust_max_size',100) if self._limit_try_count> self.parameters.get('try_limit_order',0) : if pos_diff < 0: self.sendorder(order_type='MARKET', side='BUY', size=min(-pos_diff,maxsize)) else: self.sendorder(order_type='MARKET', side='SELL', size=min(pos_diff,maxsize)) self._diff_count = 0 else: if pos_diff < 0: self.sendorder(order_type='LIMIT', side='BUY', size=min(-pos_diff,maxsize), price=self.ltp-self.parameters.get('imit_order_offset',0), auto_cancel_after=20) else: self.sendorder(order_type='LIMIT', side='SELL', size=min(pos_diff,maxsize), price=self.ltp+self.parameters.get('imit_order_offset',0), auto_cancel_after=20) else: self._limit_try_count =0
def is_day(): now = datetime.now().time() return now < datetime_time(17, 00) and now >= datetime_time(9, 00)
def _de(self, value, fieldtype, field): if not fieldtype: # no datatype information set if isinstance(value, basestring) and value.startswith(u"_json_"): value = json.loads(value[len(u"_json_"):]) if isinstance(value, bytes): value = unicode(value, self.codepage) return value if isinstance(fieldtype, dict): fieldtype = fieldtype[u"datatype"] # call serialize callback function if fieldtype in self.deserializeCallbacks: return self.deserializeCallbacks[fieldtype](value, field) if fieldtype in ("date", "datetime"): # -> to datetime if isinstance(value, basestring): value = ConvertToDateTime(value) elif isinstance(value, (float,int,long)): value = datetime.fromtimestamp(value) elif fieldtype == "time": # -> to datetime.time if isinstance(value, basestring): # misuse datetime parser value2 = ConvertToDateTime(u"2015-01-01 "+unicode(value)) if value2: value = datetime_time(value2.hour,value2.minute,value2.second,value2.microsecond) elif isinstance(value, (float,int,long)): value = datetime.fromtimestamp(value) value = datetime_time(value.hour,value.minute,value.second,value.microsecond) elif fieldtype == "timestamp": if isinstance(value, basestring): value = float(value) elif fieldtype in ("multilist", "checkbox", "mselection", "mcheckboxes", "urllist", "unitlist"): # -> to string tuple # unitlist -> to number tuple if not value: value = u"" elif isinstance(value, basestring): try: value = tuple(json.loads(value)) except ValueError: # bw 0.9.12 convert line based values if "\r\n" in value: value = value.split("\r\n") else: value = (value,) elif isinstance(value, list): value = tuple(value) if fieldtype == "unitlist": value = tuple([long(v) for v in value]) elif fieldtype == "json": # -> to python type if not value: value = None elif isinstance(value, basestring): value = json.loads(value) return value
def check_no_trade_period(self): # 現在時刻が範囲内かどうか https://codeday.me/jp/qa/20190219/264470.html def time_in_range(start, end, x): """Return true if x is in the range [start, end]""" if start <= end: return start <= x <= end else: return start <= x or x <= end if not self.notrade : self._api.noTrade = False return now = datetime_time((datetime.utcnow()+timedelta(hours=9)).hour, (datetime.utcnow()+timedelta(hours=9)).minute, 0) weekday = (datetime.utcnow()+timedelta(hours=9)).weekday() try: for p in self.notrade: # 時刻で指定 if len(p['period']) <= 13: start = datetime_time( int(p['period'][0:2]), int(p['period'][3:5]), 0) end = datetime_time( int(p['period'][6:8]), int(p['period'][9:11]), 0) if (len(p['period']) <= 11 or int(p['period'][12]) == weekday) and time_in_range(start, end, now): self._logger.info('no_trade period : {}'.format(p['period'])) self._api.noTrade = True return # 日付で指定 elif len(p['period']) == 33: now = datetime.now() # Python3.6まで start = datetime( year = int(p['period'][0:4]), month = int(p['period'][5:7]), day = int(p['period'][8:10]), hour = int(p['period'][11:13]), minute = int(p['period'][14:16]) ) end = datetime( year = int(p['period'][17:21]), month = int(p['period'][22:24]), day = int(p['period'][25:27]), hour = int(p['period'][28:30]), minute = int(p['period'][31:33]) ) # Python3.7以降ならfromisoformatが速い #start = datetime.fromisoformat( p['period'][:16] ) #end = datetime.fromisoformat( p['period'][17:] ) if time_in_range(start, end, now): self._logger.info('no_trade period : {}'.format(p['period'])) self._api.noTrade = True return self._api.noTrade = False return except Exception as e: self._logger.error('no_trade period is not correct: {}'.format(e)) self._logger.info('no_trade : {}'.format(self.notrade))
# mobiles, computers, # OS # Browsers, operating systems, device types and spiders statistics # top pages # top highload # top users # Geo Location of user # # def is_sublist(lst1, lst2): ls1 = [element for element in lst1 if element in lst2] ls2 = [element for element in lst2 if element in lst1] return ls1 == ls2 print(datetime_time(6, 0, 0)) regex_time = r':\d{2}:\d{2}:\d{2}' regex_date = r'\d{2}\/\w{2,3}\/\d{4}' date = "[07/Mar/2004:16:10:49 -0800]" print(re.search(regex_date, date).group()) print( datetime.strptime(re.search(regex_time, date).group()[1:], '%H:%M:%S').time()) regex = '([(\d\.)]+) - - \[(.*?)\] "(.*?)" (\d+) (\d+) ?"?([^-\s]?){0,1}"?' line = '64.242.88.10 - - [07/Mar/2004:16:05:49 -0800] "GET /twiki/bin/edit/Main/Double_bounce_sender?topicparent=Main.ConfigurationVariables HTTP/1.1" 401 12846 "http://ramillion.com/" "Mozilla/5.0 (compatible; U; DataMiner/3.14; +http://ramillion.com)"' print(re.match(regex, line).groups()) line = '62.65.39.20 - - [27/Feb/2017:04:50:47 +0000] "GET / HTTP/1.1" 200 612 "http://ramillion.com/" "Mozilla/5.0 (compatible; U; DataMiner/3.14; +http://ramillion.com)"'