async def insert_test_marker(self, ctx, n_from: int, n_to: int): fom = datetime.datetime.now() - timedelta.Timedelta(days=n_from) to = datetime.datetime.now() - timedelta.Timedelta(days=n_to) await utils.db.insertOne( f"analytics.text.a{ctx.guild.id}.cache_markers", { "to": to, "from": fom })
def create_event(request): family_session = request.session['family_session'] fam = Family.objects.get(nameofFamily=family_session) credentialsDanu = pickle.load(open("token.pkl", "rb")) serviceDanu = build("calendar", "v3", credentials=credentialsDanu) scopes = ['https://www.googleapis.com/auth/calendar'] start_time_str = request.POST['start_time'] summary = request.POST['summary'] duration = int(request.POST['duration']) description = request.POST['description'] location = request.POST['location'] matches = list(datefinder.find_dates(start_time_str)) if len(matches): start_time = matches[0] end_time = start_time + timedelta.Timedelta(hours=duration) event = { 'summary': summary, 'location': location, 'description': description, 'start': { 'dateTime': start_time.strftime("%Y-%m-%dT%H:%M:%S"), 'timeZone': "GMT+00:00", }, 'end': { 'dateTime': end_time.strftime("%Y-%m-%dT%H:%M:%S"), # 'dateTime': end_time, 'timeZone': "GMT+00:00" }, 'reminders': { 'useDefault': False, 'overrides': [ { 'method': 'email', 'minutes': 24 * 60 }, { 'method': 'popup', 'minutes': 10 }, ], }, } created = serviceDanu.events().insert(calendarId=fam.calId, body=event).execute() print('CREATED :', created) event = EventEntry( summary=summary, description=description, location=location, start_time=start_time_str, duration=duration, ) event.save() fam.calEvents.add(event) return redirect('calendar')
def get_token(): _entity = tokenEntity() if _entity.validate_request(request): _expires = timedelta.Timedelta(weeks=1) _token = create_access_token(_entity.user, expires_delta=_expires) _ret = {'access_token': _token} return jsonify(_ret), 200 return jsonify({"message": "usuario o password errado"}), 401
async def pull_text_data_from_api(self, ctx, n=5): today = datetime.datetime.now() last_day = today - timedelta.Timedelta(days=n) this_marker = { "from": today, "to": last_day, "id": ctx.message.id, } query_periods = [this_marker] query = {"from": {"$gte": last_day}} cache_markers = await utils.db.find( f"analytics.text.a{ctx.guild.id}.cache_markers", query) for marker in cache_markers: query_periods = insert_marker(marker, query_periods) def to_string(m): now = datetime.datetime.now() return f"From {(now - m['from']).days} days go, To : {(now - m['to']).days} days go" total_query_days = 0 for period in query_periods: total_query_days += (period["from"] - period["to"]).days newline = '\n' await ctx.send( f"Periods to query: {newline}{newline.join([to_string(p) for p in query_periods ])}" + '\n' + f"total: {total_query_days} days") count = 0 for period in query_periods: for channel in ctx.guild.text_channels: try: async for message in channel.history( limit=None, after=period["to"], before=period["from"]): await self._log_message(message) count += 1 except Exception as e: print(e) for marker in cache_markers: await utils.db.deleteOne( f"analytics.text.a{ctx.guild.id}.cache_markers", marker) await utils.db.insertOne( f"analytics.text.a{ctx.guild.id}.cache_markers", this_marker) await ctx.send(f"Cached {count} messages :thumbsup:")
def create_event(start_time_str, service, summary, duration=1, attendees=None, description=None, location=None): matches = list(datefinder.find_dates(start_time_str)) if len(matches): start_time = matches[0] end_time = start_time + timedelta.Timedelta(hours=duration) event = { 'summary': summary, 'location': location, 'description': description, 'start': { 'dateTime': start_time.strftime("%Y-%m-%dT%H:%M:%S"), 'timeZone': "GMT+00:00", }, 'end': { 'dateTime': end_time.strftime("%Y-%m-%dT%H:%M:%S"), 'timeZone': "GMT+00:00" }, 'attendees': [ { 'email': attendees }, ], 'reminders': { 'useDefault': False, 'overrides': [ { 'method': 'email', 'minutes': 24 * 60 }, { 'method': 'popup', 'minutes': 10 }, ], }, } print('''*** %r event added: With: %s Start: %s End: %s''' % (summary.encode('utf-8'), attendees, start_time, end_time)) return service.events().insert(calendarId='primary', body=event, sendNotifications=True).execute()
def get_game(): """ do- parse an internet site to see which games of the nba were good :return: return string that contains the wanted massage """ try: fmt = '%Y-%m-%d %H:%M:%S %Z%z' page = requests.get('http://stats.inpredictable.com/nba/preCap.php') tree = html.fromstring(page.content) # get games played in the day games = tree.xpath('//a[@target="_blank"]/text()') Excitement = tree.xpath( '/html/body/div[5]/div/div/table/tbody/tr/td[4]/text()') date = tree.xpath('/html/body/div[5]/div/span/text()') web_date = date[0].split(',')[0].split()[5] before = datetime.date.today() - timedelta.Timedelta( minutes=1440) #24 hours today_date = str(before.strftime(fmt)).split('-')[2][0:2] #print (today_date) #print (web_date) #check if the games presented are relevant for today if today_date != web_date: return "no games today" #create list of good games c = 0 good_games = [] for i in Excitement: if float(i) >= 9: good_games.append(games[c + 1]) c += 1 #print (good_games) #crete string string = "" if not good_games: return "all games were bad" for g in good_games: string += g + " " #print string return string except: return "there were no games"
async def loop(self): await self.bot.wait_until_ready() while not self.bot.is_closed() and not self.dead: hb = await utils.db.findOne("heartbeat", {}) now = datetime.datetime.now() try: last_hb = hb["datetime"] except TypeError: await log_heartbeat(now) return if last_hb < (now - timedelta.Timedelta(minutes=3)): # hay downtime await self.log_downtime(last_hb, now) else: # todo piola await log_heartbeat(now) await asyncio.sleep(60 * 2) print("hb loop down")
import pandas as pd import datetime as dt import timedelta as td import numpy as np from sklearn.model_selection import train_test_split from sklearn.preprocessing import StandardScaler from sklearn.linear_model import LogisticRegression from sklearn.tree import DecisionTreeClassifier from sklearn.ensemble import RandomForestClassifier # 지점 코드 # 서울 인천 대구 대전 부산 울산 광주 제주 순서 locationList = [108, 112, 143, 133, 159, 152, 156, 184] heightList = [0.7969, 0.6153, 1.3230, 0.7922, 1.1715, 2.1346, 1.2644, 0.5202] date = (dt.datetime.today() - td.Timedelta(days=2)).strftime('%Y%m%d') def data_snow(start, end, index): url = "http://apis.data.go.kr/1360000/AsosDalyInfoService/getWthrDataList" \ "?serviceKey=LAEin4h5h2HeNf9fuSWuorK2uW5MyuvoiWeJL3uSRZivdAzWhtcrCECKzSKrU9Dfwe8W6tdNR24tDTBZEPYiEQ%3D%3D" \ "&numOfRows=999" \ "&dataCd=ASOS" \ "&dateCd=DAY" \ "&startDt=" + str(start) + \ "&endDt=" + str(end) + \ "&stnIds=" + str(locationList[index]) request = ul.Request(url) response = ul.urlopen(request) rescode = response.getcode() weather = [[]]
async def info(self, ctx, *args): n, member = await self.parse_params(ctx, args) now = datetime.datetime.now() query = { "datetime": { "$lt": now, "$gte": now - timedelta.Timedelta(days=n) }, "member_id": member.id, "guild_id": member.guild.id } text_data = await utils.db.find(f"analytics.text.a{member.guild.id}", query) voice_data = await utils.db.find(f"analytics.voice.a{member.guild.id}", query) days = [now.date()] for i in range(1, n + 1): days.append((now - timedelta.Timedelta(days=i)).date()) days = list(reversed(days)) columns = ['#Messages', '#Characters', "#qj", "#qn", "#VcMins"] df = pd.DataFrame(index=days) for c in columns: df[c] = 0 for datapoint in voice_data: day = datapoint['datetime'].date() df["#VcMins"][day] += datapoint["length_mins"] for datapoint in text_data: day = datapoint['datetime'].date() df['#Messages'][day] += 1 df['#Characters'][day] += datapoint['length'] if datapoint['qn']: df['#qn'][day] += 1 elif datapoint['qj']: df['#qj'][day] += 1 bar_plot(df[["#Characters", "#VcMins"]], "g1.png", palette="hls") bar_plot(df[["#qn", "#qj"]], "g2.png", palette="Set2") with open('g1.png', 'rb') as f: with open('g2.png', 'rb') as f2: fs = [ discord.File(f, filename='Activity Graph.png'), discord.File(f2, filename='QueueBot Usage Graph.png') ] await ctx.send("User data for " + member.display_name + ":", files=fs)
def getPermanentUrl(self, bucketname, filename): try: return self.__minioclient.presigned_get_object(bucketname,filename, expires=timedelta.Timedelta(days=7)) except ResponseError as err: print(err)
def typhoonPredict(dateStart, dateEnd): url = "http://apis.data.go.kr/1360000/AsosDalyInfoService/getWthrDataList" \ "?serviceKey=LAEin4h5h2HeNf9fuSWuorK2uW5MyuvoiWeJL3uSRZivdAzWhtcrCECKzSKrU9Dfwe8W6tdNR24tDTBZEPYiEQ%3D%3D" \ "&numOfRows=999" \ "&dataCd=ASOS" \ "&dateCd=DAY" \ "&startDt="+dateStart+ \ "&endDt="+dateEnd+ \ "&stnIds=184" request = ul.Request(url) response = ul.urlopen(request) rescode = response.getcode() if (rescode == 200): responseData = response.read() rD = xmltodict.parse(responseData) rDJ = json.dumps(rD) rDD = json.loads(rDJ) size = int(rDD['response']['body']['totalCount']) if (size != 1): weather = [[0] * 8] * (size) else: weather = [[0] * 6] date = dt.datetime.strptime(dateStart, '%Y%m%d').date() if (size != 1): weather[0] = ['date', '1', '2', '3', '4', '5', '6', '7'] else: size = 2 for index in range(1, size): try: if (size == 2): avgRhm = rDD['response']['body']['items']['item']['avgRhm'] else: avgRhm = rDD['response']['body']['items']['item'][index][ 'avgRhm'] # 평균 상대 습도 if avgRhm is None: avgRhm = 0 except (TypeError, ValueError): avgRhm = 0 try: if (size == 2): avgPs = rDD['response']['body']['items']['item']['avgPs'] else: avgPs = rDD['response']['body']['items']['item'][index][ 'avgPs'] # 평균 해면 기압 if avgPs is None: avgPs = 0 except (TypeError, ValueError): avgPs = 0 try: if (size == 2): avgPa = rDD['response']['body']['items']['item']['avgPa'] else: avgPa = rDD['response']['body']['items']['item'][index][ 'avgPa'] # 평균 현지 기압 if avgPa is None: avgPa = 0 except (TypeError, ValueError): avgPa = 0 try: if (size == 2): avgWs = rDD['response']['body']['items']['item']['avgWs'] else: avgWs = rDD['response']['body']['items']['item'][index][ 'avgWs'] # 평균 풍속 if avgWs is None: avgWs = 0 except (TypeError, ValueError): avgWs = 0 try: if (size == 2): sumRn = rDD['response']['body']['items']['item']['sumRn'] else: sumRn = rDD['response']['body']['items']['item'][index][ 'sumRn'] # 강수량 if sumRn is None: sumRn = 0 except (TypeError, ValueError): sumRn = 0 try: if (size == 2): avgTa = rDD['response']['body']['items']['item']['avgTa'] else: avgTa = rDD['response']['body']['items']['item'][index][ 'avgTa'] # 평균 기온 if avgTa is None: avgTa = 0 except (TypeError, ValueError): avgTa = 0 if (size != 2): weather[index] = [ str(date), avgRhm, avgPs, avgPa, avgWs, sumRn, avgTa, 0 ] else: weather[0] = [avgRhm, avgPs, avgPa, avgWs, sumRn, avgTa] date = date + td.Timedelta(days=1) return weather
dateGap = (endDate - startDate).days date = startDate for temp in range(0, dateGap + 1): count = 1 while True: if (str(date) == weather[count][0]): weather[count][7] = 1 break else: count += 1 if (count > size): count = 1 print('! restart ') date = date + td.Timedelta(days=1) from sklearn.model_selection import train_test_split from sklearn.preprocessing import StandardScaler weather = np.array(weather) x = weather[1:, 1:7] y = weather[1:, 7] x = x.astype(np.float64) y = y.astype(np.float64) weather = np.nan_to_num(weather) scaler = StandardScaler()
import timedelta import GA def reportDay(day): # get analytics data for the day # get adwords data for the day # calculate roi for (in Google Ads class) # Dynamic Remarketing # SEM (Brand) # SEM (Non-Brand) GA.main() LASTXDAYS = 1 today = datetime.datetime.now().date() dayz = [] i = 0 while i < LASTXDAYS: dayscount = i + 1 thatDay = today - timedelta.Timedelta(days=dayscount) dayz.append(thatDay) i = dayscount print(dayz) for d in dayz: reportDay(d)
#!/usr/bin/env python from datetime import datetime import timedelta dt1 = datetime.now() dt2 = datetime.now() + timedelta.Timedelta(days=2, hours=2) td = timedelta.Timedelta(days=2, hours=2) print("seconds = %s" % td.total.seconds) print("minutes = %s" % td.total.minutes) print("hours = %s" % td.total.hours) print("days = %s" % td.total.days)
def data_rain(start, end, index): url = "http://apis.data.go.kr/1360000/AsosDalyInfoService/getWthrDataList" \ "?serviceKey=LAEin4h5h2HeNf9fuSWuorK2uW5MyuvoiWeJL3uSRZivdAzWhtcrCECKzSKrU9Dfwe8W6tdNR24tDTBZEPYiEQ%3D%3D" \ "&numOfRows=999" \ "&dataCd=ASOS" \ "&dateCd=DAY" \ "&startDt=" + str(start) + \ "&endDt=" + str(end) + \ "&stnIds=" + str(locationList[index]) request = ul.Request(url) response = ul.urlopen(request) rescode = response.getcode() weather = [[]] if (rescode == 200): responseData = response.read() rD = xmltodict.parse(responseData) rDJ = json.dumps(rD) rDD = json.loads(rDJ) size = int(rDD['response']['body']['totalCount']) weather = [[0] * 18] * (size) date = str(start) date = dt.datetime(int(date[0:4]), int(date[4:6]), int(date[6:8])).date() for i in range(0, size): if (size == 1): try: avgTa = rDD['response']['body']['items']['item']['avgTa'] # avgTa 평균 기온 except TypeError: avgTa = -99 try: minTa = rDD['response']['body']['items']['item']['minTa'] # minTa 최저 기온 except TypeError: minTa = -99 try: maxTa = rDD['response']['body']['items']['item']['maxTa'] # maxTa 최고 기온 except TypeError: maxTa = -99 try: avgTd = rDD['response']['body']['items']['item']['avgTd'] # avgTd 평균 이슬점온도 except TypeError: avgTd = -99 try: minRhm = rDD['response']['body']['items']['item']['minRhm'] # minRhm 최소 상대습도 except TypeError: minRhm = -99 try: avgRhm = rDD['response']['body']['items']['item']['avgRhm'] # avgRhm 평균 상대습도 except TypeError: avgRhm = -99 try: ssDur = rDD['response']['body']['items']['item']['ssDur'] # ssDur 가조시간 except TypeError: ssDur = -99 try: sumSsHr = rDD['response']['body']['items']['item'][ 'sumSsHr'] # sumSsHr 합계 일조 시간 except TypeError: sumSsHr = -99 try: hr1MaxIcsr = rDD['response']['body']['items']['item'][ 'hr1MaxIcsr'] # hr1MaxIcsr 1시간 최다 일사량 except TypeError: hr1MaxIcsr = -99 if (hr1MaxIcsr is None): hr1MaxIcsr = -99 try: sumGsr = rDD['response']['body']['items']['item']['sumGsr'] # sumGsr 합계 일사 except TypeError: sumGsr = -99 if (sumGsr is None): sumGsr = -99 try: avgTca = rDD['response']['body']['items']['item']['avgTca'] # avgTca 평균 전운량 except TypeError: avgTca = -99 try: avgLmac = rDD['response']['body']['items']['item'][ 'avgLmac'] # avgLmac 평균 중하층운량 except TypeError: avgLmac = -99 try: sumLrgEv = rDD['response']['body']['items']['item'][ 'sumLrgEv'] # sumLrgEv 합계 대형증발량 except TypeError: sumLrgEv = -99 if (sumLrgEv is None): sumLrgEv = -99 try: sumSmlEv = rDD['response']['body']['items']['item'][ 'sumSmlEv'] # sumSmlEv 합계 소형증발량 except TypeError: sumSmlEv = -99 if (sumSmlEv is None): sumSmlEv = -99 try: n99Rn = rDD['response']['body']['items']['item']['n99Rn'] # n99Rn 9-9 강수 except TypeError: n99Rn = -99 if (n99Rn is None): n99Rn = -99 try: sumRn = rDD['response']['body']['items']['item']['sumRn'] # sumRn 일강수량 except TypeError: sumRn = 0 if (sumRn is None): sumRn = 0 if (float(sumRn) > 0): sumRn = 1 else: try: avgTa = rDD['response']['body']['items']['item'][i][ 'avgTa'] # avgTa 평균 기온 except TypeError: avgTa = -99 try: minTa = rDD['response']['body']['items']['item'][i][ 'minTa'] # minTa 최저 기온 except TypeError: minTa = -99 try: maxTa = rDD['response']['body']['items']['item'][i][ 'maxTa'] # maxTa 최고 기온 except TypeError: maxTa = -99 try: avgTd = rDD['response']['body']['items']['item'][i][ 'avgTd'] # avgTd 평균 이슬점온도 except TypeError: avgTd = -99 try: minRhm = rDD['response']['body']['items']['item'][i][ 'minRhm'] # minRhm 최소 상대습도 except TypeError: minRhm = -99 try: avgRhm = rDD['response']['body']['items']['item'][i][ 'avgRhm'] # avgRhm 평균 상대습도 except TypeError: avgRhm = -99 try: ssDur = rDD['response']['body']['items']['item'][i][ 'ssDur'] # ssDur 가조시간 except TypeError: ssDur = -99 try: sumSsHr = rDD['response']['body']['items']['item'][i][ 'sumSsHr'] # sumSsHr 합계 일조 시간 except TypeError: sumSsHr = -99 try: hr1MaxIcsr = rDD['response']['body']['items']['item'][i][ 'hr1MaxIcsr'] # hr1MaxIcsr 1시간 최다 일사량 except TypeError: hr1MaxIcsr = -99 try: sumGsr = rDD['response']['body']['items']['item'][i][ 'sumGsr'] # sumGsr 합계 일사 except TypeError: sumGsr = -99 try: avgTca = rDD['response']['body']['items']['item'][i][ 'avgTca'] # avgTca 평균 전운량 except TypeError: avgTca = -99 try: avgLmac = rDD['response']['body']['items']['item'][i][ 'avgLmac'] # avgLmac 평균 중하층운량 except TypeError: avgLmac = -99 try: sumLrgEv = rDD['response']['body']['items']['item'][i][ 'sumLrgEv'] # sumLrgEv 합계 대형증발량 except TypeError: sumLrgEv = -99 try: sumSmlEv = rDD['response']['body']['items']['item'][i][ 'sumSmlEv'] # sumSmlEv 합계 소형증발량 except TypeError: sumSmlEv = -99 try: n99Rn = rDD['response']['body']['items']['item'][i][ 'n99Rn'] # n99Rn 9-9 강수 except TypeError: n99Rn = -99 try: sumRn = rDD['response']['body']['items']['item'][i][ 'sumRn'] # sumRn 일강수량 except TypeError: sumRn = 0 if (sumRn is None): sumRn = 0 if (float(sumRn) > 0): sumRn = 1 weather[i] = [ str(date), avgTa, minTa, maxTa, avgTd, minRhm, avgRhm, ssDur, sumSsHr, hr1MaxIcsr, sumGsr, avgTca, avgLmac, sumLrgEv, sumSmlEv, n99Rn, heightList[index], sumRn ] date = date + td.Timedelta(days=1) return weather
def data_typhoon(start, end): url = "http://apis.data.go.kr/1360000/AsosDalyInfoService/getWthrDataList" \ "?serviceKey=LAEin4h5h2HeNf9fuSWuorK2uW5MyuvoiWeJL3uSRZivdAzWhtcrCECKzSKrU9Dfwe8W6tdNR24tDTBZEPYiEQ%3D%3D" \ "&numOfRows=999" \ "&dataCd=ASOS" \ "&dateCd=DAY" \ "&startDt=" + str(start) + \ "&endDt=" + str(end) + \ "&stnIds=184" request = ul.Request(url) response = ul.urlopen(request) rescode = response.getcode() weather = [[]] if (rescode == 200): responseData = response.read() rD = xmltodict.parse(responseData) rDJ = json.dumps(rD) rDD = json.loads(rDJ) try: size = int(rDD['response']['body']['totalCount']) except KeyError: size = 1 weather = [[0] * 8] * (size) date = str(start) date = dt.datetime(int(date[0:4]), int(date[4:6]), int(date[6:8])).date() for index in range(0, size): try: if (size == 1): avgRhm = rDD['response']['body']['items']['item']['avgRhm'] else: avgRhm = rDD['response']['body']['items']['item'][index][ 'avgRhm'] # 평균 상대 습도 if avgRhm is None: avgRhm = 0 except (TypeError, ValueError): avgRhm = 0 try: if (size == 1): avgPs = rDD['response']['body']['items']['item']['avgPs'] else: avgPs = rDD['response']['body']['items']['item'][index][ 'avgPs'] # 평균 해면 기압 if avgPs is None: avgPs = 0 except (TypeError, ValueError): avgPs = 0 try: if (size == 1): avgPa = rDD['response']['body']['items']['item']['avgPa'] else: avgPa = rDD['response']['body']['items']['item'][index][ 'avgPa'] # 평균 현지 기압 if avgPa is None: avgPa = 0 except (TypeError, ValueError): avgPa = 0 try: if (size == 1): avgWs = rDD['response']['body']['items']['item']['avgWs'] else: avgWs = rDD['response']['body']['items']['item'][index][ 'avgWs'] # 평균 풍속 if avgWs is None: avgWs = 0 except (TypeError, ValueError): avgWs = 0 try: if (size == 1): sumRn = rDD['response']['body']['items']['item']['sumRn'] else: sumRn = rDD['response']['body']['items']['item'][index][ 'sumRn'] # 강수량 if sumRn is None: sumRn = 0 except (TypeError, ValueError): sumRn = 0 try: if (size == 1): avgTa = rDD['response']['body']['items']['item']['avgTa'] else: avgTa = rDD['response']['body']['items']['item'][index][ 'avgTa'] # 평균 기온 if avgTa is None: avgTa = 0 except (TypeError, ValueError): avgTa = 0 weather[index] = [ str(date), avgRhm, avgPs, avgPa, avgWs, sumRn, avgTa, 0 ] date = date + td.Timedelta(days=1) return weather
import GoogleAnalytics import datetime import timedelta import pandas as pd # import Helper.Data as dt import configparser config = configparser.ConfigParser() config.read('reporting_config.ini') today = datetime.datetime.now().date() yesterday = today - timedelta.Timedelta(days=1) ######## # ok - Get analytics ### sprint 1 Google Ads ### sprint 2 Facebook Ads ### sprint 3 Criteo ### sprint 4 output excel'i duzenle ### sprint 5 ali # ok - out to excel ######### # Get analytics day_report = GoogleAnalytics.get_google_analytics_day_report(yesterday) # rows, columns = day_report.shape # print(rows, columns)