def MatchDetails(url, team1=None, team2=None): options = Options() # Do not open the chrome browser options.add_argument('--headless') driver = webdriver.Chrome(options=options) driver.get(url) sleep(1) htmlSource = driver.page_source soup = BeautifulSoup(htmlSource, "lxml") team = [] if team1 is None or team2 is None: teams = soup.find_all("div", class_=re.compile("MatchHeaderTeamTitle")) for t in teams: team.append(t.string) else: team = [team1, team2] score = soup.find("div", class_=re.compile("LivescoreMatchScore")) time = soup.find("div", class_=re.compile("LivescoreMatchTime")) scoreline = f"{team[0]} {score.string.strip()} {team[1]}" if time.string.strip().lower() in ['ft', 'full-time', 'fulltime']: notify(scoreline, FootballConsts.matchFinished.value) sys.exit() endindex = time.string.find(":") timePassed = time.string[0:endindex] + " minutes" if time.string.strip().lower() in ["ht", "halftime", "half-time", "pause"]: print("Half Time") timePassed = "Half Time" sleep(180) # Do not get the seconds. Only the minutes driver.quit() notify(scoreline, timePassed) sleep(60) MatchDetails(url, team[0], team[1])
def notifyFeedbackToEmployees() : fStr = dict() # Key=Emp.no, Value = message string #Send e-mails to Emps first feedbackList = GoalFeedback.query.filter_by(empNotified = False).filter_by(visibleToEmp = True).all() # Get all pending notifications for f in feedbackList : getStringRep(f, fStr) # Get the string representation and append to list by emp_no for k in fStr.keys() : # All emp-emails pending empEmail = getEmployeeEmail(k) if not empEmail : print("Email not found= Emp.No:" + str(k)) mesg = htmlhead+ feedbackHeader + fStr[k] + goalsheetFooter + htmlfooter # notify(empEmail, feedbackSubject, messg, templateId="-1") #For Testing Only, send all e-mails to me prefixed by the addresss subj = feedbackSubject #print("Sending e-mail to:" + empEmail) notify(empEmail, subj, mesg, templateId="-1") #Set the flag so that it does not send it again #TESTING...uncomment for real use for f in feedbackList : f.empNotified = True #Send e-mails to Non-emps #Year end assessment notification # #REsend e-mails to emps #REsend e-mails to non-emps db.session.commit() return
def run(): for cookies in jdCookie.get_cookies(): print(f"""[ {cookies["pt_pin"]} ]""") status = functionTemplate(cookies, "initPetTown", {})["result"] if status["userStatus"] == 0: print("活动未开启") if status["petStatus"] == 5 and status["showHongBaoExchangePop"]: notification.notify(f"""##东东萌宠【{cookies["pt_pin"]}】可领取""", f"""## 账号【{cookies["pt_pin"]}】 东东萌宠可以领取""") continue if "goodsInfo" not in status: continue print("\n【检查状态】") print(f"""兑换奖品: {status["goodsInfo"]["goodsName"]}""") print( f"""勋章进度: {status["medalNum"]}/{status["goodsInfo"]["exchangeMedalNum"]}""" ) print(f"""还需能量: {status["needCollectEnergy"]}""") print(f"""当前进度: {status["medalPercent"]}%""") print(f"""当前饵料: {status["foodAmount"]}""") print("我的助力码: ", status["shareCode"]) help(cookies, shareCodes) takeTask(cookies) sport(cookies) masterHelp(cookies) feedPets(cookies) energyCollect(cookies) print("\n为防止遗漏,再运行一次") print("##" * 30)
def valid(cookies): headers = { 'Host': 'api.m.jd.com', 'Accept': '*/*', 'Connection': 'keep-alive', 'User-Agent': 'jdapp;iPhone;8.5.5;13.5;Mozilla/5.0 (iPhone; CPU iPhone OS 13_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E148;supportJDSHWK/1', 'Accept-Language': 'zh-cn', 'Accept-Encoding': 'gzip, deflate, br', } params = ( ('functionId', 'plantBeanIndex'), ('body', json.dumps({ "monitor_source": "plant_m_plant_index", "version": "8.4.0.0" })), ('appid', 'ld'), ) response = requests.get('https://api.m.jd.com/client.action', headers=headers, params=params, cookies=cookies) if response.json()["code"] == "3": print(f"""## {cookies["pt_pin"]}: cookie过期""") notification.notify(f"""## 京东账号【{cookies["pt_pin"]}】 cookie过期""", f"""## 账号【{cookies["pt_pin"]}】 cookie过期 ,及时修改""") return False return True
def test_notification_calls_uns(self): with record_events(notification.uns_notification_sent) as events: with self.app.test_request_context(): notification.notify(self.item, 'comment') self.assertEqual(len(events), 1)
def test_uns_not_called_if_suppressed(self): with self.record_uns_calls() as uns_calls: self.app.config['UNS_SUPPRESS_NOTIFICATIONS'] = True try: notification.notify(self.item, 'comment') finally: self.app.config['UNS_SUPPRESS_NOTIFICATIONS'] = False self.assertEqual(len(uns_calls), 0)
def test_uns_called_if_not_in_tests(self): with self.record_uns_calls() as uns_calls: self.app.config['TESTING'] = False try: notification.notify(self.item, 'comment') finally: self.app.config['TESTING'] = True self.assertEqual(len(uns_calls), 1)
def screenShot(self): self.rootObject().hideControls() name = "%s-%s" % (self.title(), time.strftime("%y-%m-%d-%H-%M-%S", time.localtime())) path = QDir.homePath() +"/%s.jpg" % name p = QPixmap.fromImage(self.grabWindow()) p.save(path, "jpg") notify(u"截图成功", u"文件已保存到%s" % path)
def add_history_item_and_notify(parcel, event_type, title, time, actor, description_html, rejected=None): item = parcel.add_history_item(title, time, actor, description_html) notification.notify(item, event_type, rejected)
def valid(qqheaders): headers = ast.literal_eval(qqheaders[0]) response = requests.get( 'https://mqqapi.reader.qq.com/mqq/user/init', headers=headers) if response.json()["data"]['isLogin'] == False: QQNUM = re.findall(r'ywguid=(.*?);ywkey', headers['Cookie'])[0] print(f"""## {QQNUM}: headers过期""") notification.notify( f"""## QQ账号【{QQNUM}】 headers过期""", f"""## 账号【{QQNUM}】 headers过期 ,及时修改""") return False return True
def notifyAllGoalSheet(): empMessage = goalAssignResend % ('2018-2019') gsList = getGoalSheetsAll() for sheet in gsList: eid = str(sheet.empId) emp = getEmployeebyId(sheet.empId) if emp: empEmail = emp.OFFICE_EMAIL_ID print("Notifying:" + str(empEmail)) notify(empEmail, goalEmailSubject, empMessage, templateId="-1") return ("Done")
def sendAskFeedbackNotificationsCron() : empRecvList = dict() #Key: receiver email, Value=List of givers empGivrList = dict() #Key: Giver email, Value= List of receivers empAllList = dict() #Get a list of all feedbacks - empNotified = False askList = FeedbackFromAnyone.query.filter_by(empNotified = False).all() for ask in askList : #=>Who you asked givr = ask.giverEmail recv = ask.receiverEmail if givr not in empGivrList.keys() : empGivrList[givr] = "" # Initialize to an empty List empAllList[givr] = 1 empGivrList[givr] += recv + " " if recv not in empRecvList.keys() : empRecvList[recv] = "" # Initialize to an empty List empAllList[recv] = 1 empRecvList[recv] += givr + " " # revrString = "You have requested feedback from:" # givrString = "Feedback has been requested from you by:" for emp in empAllList : # All givers, receivers found recvsList = 0 givrsList = 0 revrString = "You have requested feedback from:" givrString = "Feedback has been requested from you by:" #Send Notifications, one-person-by-one-person + Update Table notified time #Commented out the GIVERS--no need to send e-mail to self if emp in empGivrList.keys() : givrString += empGivrList[emp] givrsList = 1 if emp in empRecvList.keys() : revrString += empRecvList[emp] recvsList = 1 finalStr = "" if givrsList : finalStr = givrString + "<p>" if recvsList : finalStr += revrString + "<p>" #send notification emp mesg = htmlhead + askfeedbackHeader + finalStr + goalsheetFooter + htmlfooter subject = askfeedbackSubject notify(emp, subject, mesg) # notify('*****@*****.**', subject, mesg) #Update DB for ask in askList : ask.empNotified = True # ask.empNotifiedTime = dt.datetime.now() db.session.commit()
def run(): utc_dt = datetime.utcnow() # UTC时间 bj_dt = utc_dt+timedelta(hours=8) # 北京时间 _datatime = bj_dt.strftime("%Y-%m-%d", ) now = bj_dt.strftime("%Y-%m-%d %H:%M:%S") message = "" for cookies in jdCookie.get_cookies(): total = totalBean(cookies) income, expense = countTodayBean(cookies,_datatime) message += f'\n\n【{cookies["pt_pin"]}】 \n当前京豆: {total} \n今日收入: +{income} \n今日支出: {expense}' print("\n") print(f"⏰ 京豆统计 {now}") print(message) notification.notify(f"⏰ 京豆统计 {now}", message)
def run(self): process = subprocess.Popen( self.cmd, shell=True, stdin=subprocess.PIPE , stdout=subprocess.PIPE, universal_newlines=True ) logging = "" while True: output = process.stdout.readline() if output == "" and process.poll() is not None: logging += "Process completed !" notification.notify("Process completed !") self.signal.emit(logging) break if output: logging += output self.signal.emit(logging)
def sendDataMissingMsgToAll(sendEmailToEmp=True) : empList = hrmsdomain.getAllEmployees() # empList = ['*****@*****.**'] count = 0 respStr = "" empCount = 0 for emp in empList : #For each employee (resp,retval) = dataMissingMsg(empObj = emp, email="", sendEmailToEmp=sendEmailToEmp) respStr += "<p>" + resp + "</p>" if retval : count += 1 empCount += 1 #Send a summary to myself- to be moved to config file retStr = "<p>Total %d employees (out of %d) notified.</p>" % (count, empCount) + respStr notify("*****@*****.**", "HRMS: Data Update Status", retStr , templateId="-1") return (htmlhead + retStr + respStr + htmlfooter)
def main(): sourceProviders = [SourceProvider_ratp(), SourceProvider_jcdecaux_vls(), SourceProvider_transilien()] event_seen = set() heap = HeapEvent() gap = timedelta(minutes=30) # 30 minutes : time to check trafic before an event refresh = timedelta(seconds=30) # grab events every 30 secondes while True: # feed heap for event in get_events(): # check if we already know it if event.id not in event_seen: event_seen.add(event.id) print() if (event.date - datetime.now()).total_seconds() < 0: print("Ignore event in the past:") else: print("Add event:") heap.push(event) print(str(event)) # sleep the min between 1 minute and the next event - gap next = refresh if not heap.empty(): next = min(next, heap.top().date-datetime.now()-gap) if next.total_seconds() > 0: print() print("Sleeping " + str(next) + "...") sleep(next.total_seconds()) # next event if not heap.empty() and heap.top().date-datetime.now() < gap: event = heap.pop() print() print("Check event:") print(str(event)) # get useful ids of sources for this location, and grab info from internet sources=gen_sources(sourceProviders, event.location) if 'print' in config.notification['methods']: print() # show an empty line for src in sources: if src.problem(): # there is a problem ! We notify the user... notification.notify(src.message)
def worker_notification(event_notification): """通知のためのworker """ global working_state while not stop: # event.set が実行されるまで待機 event_notification.wait() event_notification.clear() logging.debug('notify start') text = text_generator.generate_text(working_state) img_path = notification.generate_path(working_state) notification.notify(text, img_path) logging.debug('notify end') if working_state == "praise": title = activity_log.get_active_window_title() activity_log.set_start_word_num(title.split()[0]) working_state = "idle"
def emailBCSInfoToEmp(empEmail, date, fullmsgbuf, bcsProjSummary, bcsUtilSummary): #Get emp Details empDict = getEmpDictbyEmail(empEmail) #Form Message subject = "BCS Booking and Error Summary:" + date.strftime('%d-%m-%y') message = htmlhead + "<h4>Summary for : " + empDict[ "FIRST_NAME"] + " " + empDict["LAST_NAME"] + "</h4>" message = "<h4>This is in BETA Test. Kindly help by pointing out any errors to K.Srinivas.</h4>" tstr = "<table>" for m in fullmsgbuf: tstr += "<tr><td>" + m + "</td></tr>" tstr += "</table>" message += tstr message += "<h3>Available Hours[%d], Booked Hours[%d], Billed Hours[%d] and Leave Hours[%d]</h3>" % \ (bcsUtilSummary["AvailableHours"], bcsUtilSummary["BookedHours"], \ bcsUtilSummary["BilledHours"],bcsUtilSummary["LeaveHours"] ) message += hrmsfooter + htmlfooter #Send e-mail notify(empEmail, "BETA:" + subject, message, templateId="-1")
def run(): for cookies in jdCookie.get_cookies(): result = postTemplate(cookies, 'initForFarm', {"version": 4}) treeState = result["treeState"] if treeState == 0: print("还未开始种植") continue if treeState in [2, 3]: print("可以兑换了") notification.notify(f"""东东农场可兑换【{cookies["pt_pin"]}】""", f"""东东农场 账号【{cookies["pt_pin"]}】 可以兑换了""") continue nickName = result["farmUserPro"]["nickName"] myshareCode = result["farmUserPro"]["shareCode"] treeEnergy = result["farmUserPro"]["treeEnergy"] lastTimes = int( (result["farmUserPro"]["treeTotalEnergy"] - treeEnergy) / 10) print( f"""\n\n[ {nickName} ]\n{result["farmUserPro"]["name"]} (通用红包)""") print(f'已经薅了{result["farmUserPro"]["winTimes"]}次') print(f"""我的助力码: {myshareCode}""") print( f"""treeEnergy: {treeEnergy}/{result["farmUserPro"]["treeTotalEnergy"]}""" ) print(f"""剩余浇水次数: {lastTimes}""") turnTable(cookies) clockIn(cookies) _help(cookies, shareCodes) takeTask(cookies) masterHelp(cookies) luck(cookies) duck(cookies) friends(cookies) bag(cookies) water(cookies) print("\n") print("##" * 30)
def save_teams(): r = api.method('/me/teams') if r.status_code == 200: teams = r.json() if len(teams) > 0: settings.set(teams=teams) for team in teams: save_image('http:' + team["thumbUrl"], 'team-' + team['name']) n.notify("AndBang Workflow Success", "Your teams were saved!", "Teams: " + ', '.join([team["name"] for team in teams])) else: n.notify("AndBang Workflow Error", "No teams were saved", "Please create one at http://andbang.com") else: n.notify("AndBang Workflow Error", resp["message"], "")
def save_members(): output = [] teams = settings.get('teams', []) for team in teams: r = api.method('/members', team['id']) if r.status_code == 200: members = r.json() if len(members) > 0: for member in members: save_image('http:' + member["smallPicUrl"], 'member-' + member['id']) output.append({your_key: member[your_key] for your_key in ['id', 'username', 'smallPicUrl', 'firstName', 'lastName']}) settings.set(members=output) n.notify("AndBang Workflow Success", "Your teams members were saved!", "") else: n.notify("AndBang Workflow Error", "No members were saved", "Have some people join your team!") else: error = r.json() n.notify("AndBang Workflow Error", error["message"], "")
out.write(u'Failed to connect to transmission daemon "%s"\n' % e) #check if torrent is already being downloaded for keys in tc.info(): if tc.get_torrent(keys).hashString == hashlib.sha1(bencode.bencode(info)).hexdigest() or tc.get_torrent(keys).hashString == info: out.write('The torrent is being downloaded by transmission. No need to add it again.\n') tkey = keys # and if not found add the new file to transmission if tkey == -1: try: torrent = tc.add_torrent(torrent_filename, download_dir=tmpname) if FLAGS.verbose: out.write(' Added torrent to transmission...\n') out.write(' Torrent ID "%s"\n' % tkey) tkey = torrent._fields['id'].value notification.notify(socket.gethostbyname(socket.gethostname()),'MythNetTV downloads', 'Added a new torrent to download. %s' % (tc.get_torrent(tkey).name), tkey) except transmissionrpc.TransmissionError, e: out.write('Failed to add torrent "%s"' % e) return 0 # tell transmission to change the upload rate to the one we got from the database tc.change(tkey, uploadLimit=upload_rate, uploadLimited=True) stalecounter = 0 downloadtime = int(db.GetSetting('downloadtime')) * 60 startuptime = int(db.GetSetting('startuptime')) * 60 if FLAGS.verbose: out.write('Max startup seconds: %i. Max download seconds: %i\n' %(startuptime, downloadtime)) try: start_time = datetime.datetime.now() while (not download_ok) or (not exit): time.sleep(10) # don't hit transmission too much
def handle_message(message): sla = determine_sla(message) sender = None from_addr = message['from'] send_from = settings.DEFAULT_FROM_ADDR if re.search('<[^>]*>', from_addr): from_addr = re.search('<([^>]*)>', from_addr).groups(0)[0] if not Contact.objects.filter(email=from_addr).exists(): if settings.ALLOW_NON_CONTACTS: sender = Contact.objects.create(email=from_addr) else: notify("issue_bounced", {}, send_from, from_addr) return False else: sender = Contact.objects.filter(email=from_addr)[0] if sla and not sla.is_contact(sender) and not \ settings.ALLOW_NON_CONTACTS: notify("issue_bounced", {}, send_from, from_addr) return False # Is this a reply to an existing issue? # match = ISSUE_SUBJECT_MATCH.search(message['subject']) # Parse message payload # body = [] html = [] attachments = [] counter = 0 for part in message.walk(): # multipart/* are just containers if part.get_content_maintype() == 'multipart': continue # Applications should really sanitize the given filename so that an # email message can't be used to overwrite important files # if part.get_content_type() == "text/plain": body.append(unicode( part.get_payload(decode=True), part.get_content_charset() or "utf8", 'ignore' )) elif part.get_content_type() == "text/html": if part.get_content_charset() is None: charset = str(chardet.detect(str(part))['encoding']) else: charset = str(part.get_content_charset()) text = unicode(part.get_payload(decode=True), charset, "ignore") html.append(html2text(text).replace(" _place_holder;", " ")) elif part.get_content_type() == "application/ms-tnef": tnef = TNEF(part.get_payload(decode=True), do_checksum=False) for tnef_att in tnef.attachments: att = Attachment() att._file = ContentFile(tnef_att.data) att._file.name = tnef_att.long_filename() att.mimetype = mimetypes.guess_type(tnef_att.long_filename()) attachments.append(att) else: filename = part.get_filename() if not filename: ext = mimetypes.guess_extension(part.get_content_type()) if not ext: # Use a generic bag-of-bits extension ext = '.bin' filename = 'part-%03d%s' % (counter, ext) counter += 1 att = Attachment() att._file = ContentFile(part.get_payload(decode=True)) att._file.name = filename att.mimetype = part.get_content_type() attachments.append(att) # Some emails contain both html and plain. We assume that in this case, # we only need the html part. # if html: body = "\n\n".join(html) else: body = "\n\n".join(body) issue = None comment = None send_notification_tpl = None if match: issue_id = int(match.groups()[0]) try: issue = Issue.objects.get(pk=issue_id) comment = issue.comments.create(comment=body, comment_by=sender) send_notification_tpl = "comment_received" if issue.status == settings.ISSUE_STATUS_WAIT: issue.set_status(settings.ISSUE_STATUS_OPEN) except Issue.DoesNotExist: pass if not issue: issue = Issue(title=message['subject'][100:], contact=sender, text=body, sla=sla) send_notification_tpl = "issue_received" if sla and sla.default_service: issue.service = sla.default_service issue.save() for att in attachments: att.issue = issue att.save() if send_notification_tpl: to_addr = from_addr notify(send_notification_tpl, {"issue": issue, "comment": comment}, issue.email_from, to_addr) return True
def test_uns_not_called_in_tests(self): with self.record_uns_calls() as uns_calls: notification.notify(self.item, 'comment') self.assertEqual(len(uns_calls), 0)
def sendmsg(content: str): """发送通知""" notification.notify("企鹅读书通知", content)
def handle_message(message): sla = determine_sla(message) sender = None from_addr = message['from'] send_from = settings.DEFAULT_FROM_ADDR if re.search('<[^>]*>', from_addr): from_addr = re.search('<([^>]*)>', from_addr).groups(0)[0] if not Contact.objects.filter(email=from_addr).exists(): if settings.ALLOW_NON_CONTACTS: sender = Contact.objects.create(email=from_addr) else: notify("issue_bounced", {}, send_from, from_addr) return False else: sender = Contact.objects.filter(email=from_addr)[0] if sla and not sla.is_contact(sender) and not \ settings.ALLOW_NON_CONTACTS: notify("issue_bounced", {}, send_from, from_addr) return False # Is this a reply to an existing issue? # match = ISSUE_SUBJECT_MATCH.search(message['subject']) # Parse message payload # body = [] html = [] attachments = [] counter = 0 for part in message.walk(): # multipart/* are just containers if part.get_content_maintype() == 'multipart': continue # Applications should really sanitize the given filename so that an # email message can't be used to overwrite important files # if part.get_content_type() == "text/plain": body.append(unicode( part.get_payload(decode=True), part.get_content_charset() or "utf8", 'ignore' )) elif part.get_content_type() == "text/html": if part.get_content_charset() is None: charset = str(chardet.detect(str(part))['encoding']) else: charset = str(part.get_content_charset()) text = unicode(part.get_payload(decode=True), charset, "ignore") html.append(html2text(text).replace(" _place_holder;", " ")) elif part.get_content_type() == "application/ms-tnef": tnef = TNEF(part.get_payload(decode=True), do_checksum=False) for tnef_att in tnef.attachments: att = Attachment() att._file = ContentFile(tnef_att.data) att._file.name = tnef_att.long_filename() att.mimetype = mimetypes.guess_type(tnef_att.long_filename()) attachments.append(att) else: filename = part.get_filename() if not filename: ext = mimetypes.guess_extension(part.get_content_type()) if not ext: # Use a generic bag-of-bits extension ext = '.bin' filename = 'part-%03d%s' % (counter, ext) counter += 1 att = Attachment() att._file = ContentFile(part.get_payload(decode=True)) att._file.name = filename att.mimetype = part.get_content_type() attachments.append(att) # Some emails contain both html and plain. We assume that in this case, # we only need the html part. # if html: body = "\n\n".join(html) else: body = "\n\n".join(body) issue = None comment = None send_notification_tpl = None if match: issue_id = int(match.groups()[0]) try: issue = Issue.objects.get(pk=issue_id) comment = issue.comments.create(comment=body, comment_by=sender) send_notification_tpl = "comment_received" if issue.status == settings.ISSUE_STATUS_WAIT: issue.set_status(settings.ISSUE_STATUS_OPEN) except Issue.DoesNotExist: pass if not issue: issue = Issue(title=message['subject'], contact=sender, text=body, sla=sla) send_notification_tpl = "issue_received" if sla and sla.default_service: issue.service = sla.default_service issue.save() for att in attachments: att.issue = issue att.save() if send_notification_tpl: to_addr = from_addr notify(send_notification_tpl, {"issue": issue, "comment": comment}, issue.email_from, to_addr) return True
def sendmsg(title: str, content: str): """发送通知""" notification.notify(title, content)
def add(event): _validate_event(event) db.event_add(event) notify(u'{} added a new Event - <strong>%s</strong>' % event['title'], action='add', target='event')
def main(): parser = argparse.ArgumentParser() parser.add_argument('--no-download', action='store_true') parser.add_argument('--no-remux', action='store_true') parser.add_argument('--no-upload', action='store_true') parser.add_argument('--no-notify', action='store_true') parser.add_argument('--no-delete', action='store_true') parser.add_argument('--force', action='store_true') parser.add_argument('--force-log-to-file', action='store_true') parser.add_argument('--override-channel-name') parser.add_argument('--override-video-name') parser.add_argument('video_id') args = parser.parse_args() config = load_config() log_filename = LOGDIR / f'{args.video_id}-{os.getpid()}.log' if not sys.stdout.isatty( ) or args.force_log_to_file else None # For subprocess log_file = log_filename.open('a') if log_filename else None if log_file: sys.stderr = sys.stdout = log_file setup_logging(filename=log_filename) log.info(f'Starting download for {args.video_id}') pid_exists, active_downloaders = check_pid(args.video_id) if pid_exists and not args.force: raise ValueError('Another downloader is still alive, exiting') else: active_downloaders[args.video_id] = os.getpid() with open_state() as state: state['active_downloaders'] = active_downloaders if args.override_channel_name and args.override_video_name: log.info( 'Using overridden channel and video name, setting is_upcoming to false' ) channel_name = args.override_channel_name video_name = args.override_video_name # There's no reason to use these overrides for an upcoming video is_upcoming = False else: player_response = get_video_info(args.video_id) if 'videoDetails' not in player_response: log.error(f'{args.video_id} has no details, cannot proceed ' '(playability: {}, {})'.format( player_response["playabilityStatus"]["status"], player_response["playabilityStatus"]["reason"], )) sys.exit(1) else: channel_name = player_response['videoDetails']['author'] video_name = player_response['videoDetails']['title'] is_upcoming = player_response['videoDetails'].get( 'isUpcoming', False) log.info(f'Channel: {channel_name}') log.info(f'Title: {video_name}') log.info(f'Upcoming: {is_upcoming}') if is_upcoming: wait(player_response, config) filename_base = sanitize_filename(video_name) log.info(f'Filename base: {filename_base}') # Copy youtube-dl's naming scheme filepath_streamlink = WORKDIR / f'{filename_base}-{args.video_id}.ts' # TODO: If file already exists, rename it and concatenate it later? # XXX: youtube-dl used to be less reliable than streamlink for downloading # streams - that may no longer be the case. # XXX: Invoke this in a less hacky manner # The reason for doing this is that I wanted to use streamlink # inside the venv but in a separate process, # without hardcoding the path of the venv. streamlink_args = [ '--force', # Overwrite any existing file '--hls-timeout', '60', # XXX: This doesn't work right now! # See https://github.com/streamlink/streamlink/issues/2936 '--hls-live-restart', '--retry-streams', '10', '--retry-max', '10', '-o', str(filepath_streamlink), f'https://www.youtube.com/watch?v={args.video_id}', 'best', ] if not args.no_download: log.info(f'Starting streamlink with args: {streamlink_args}') fork_return = os.fork() if fork_return == 0: sys.argv = streamlink_args streamlink_main() else: os.wait() else: log.info('Skipping download') filename_output = f'{filename_base}-{args.video_id}.mp4' filepath_output = WORKDIR / filename_output ffmpeg_args = ( 'ffmpeg', '-y', '-i', filepath_streamlink, '-c', 'copy', '-movflags', 'faststart', '-metadata', f'title={video_name}', '-metadata', f'artist={channel_name}', '-metadata', f'comment=https://www.youtube.com/watch?v={args.video_id}', filepath_output, ) if not args.no_remux: log.info('Remuxing to mp4') subprocess.run(ffmpeg_args, stdout=log_file) else: log.info('Skipping remux') # Upload if not args.no_upload: link_url, thumbnail = upload( sanitize_filename(channel_name), # This argument duplication is kind of silly... filename_output, filepath_output, ) # We won't have link and thumb if not uploading without # going through a bunch more effort. if not args.no_notify: notify( channel_name, video_name, link_url, thumbnail, ) else: log.info('Skipping notify') else: log.info('Skipping upload') if not args.no_delete: log.info('Deleting work files') filepath_streamlink.unlink() filepath_output.unlink() log.info('Cleaning up state') with open_state() as state: active_downloaders = state.get('active_downloaders', {}) active_downloaders.pop(args.video_id, None) else: log.info('Skipping cleanup') log.info('All done!')
from stats import * import connectivity from connectivity import * import qc from qc import * import eprime import general import notification import freesurfer import driver # Check for update import xmlrpclib try: pypi = xmlrpclib.ServerProxy('http://pypi.python.org/pypi') latest = pypi.package_releases('neural-fmri') if latest: if latest[0]!=version: notify('## Update to neural available on PyPI (current version: %s; latest version: %s)' % (version,latest[0]),level=level.debug) except: pass # user customization import sys if sys.stdout.isatty(): import personality personality.display('greeting') personality.set_goodbye()
def run(self): kernel = [] stableBtn = "" ltsBtn = "" hardBtn = "" zenBtn = "" try: stableVersion = (subprocess.check_output( ["pacman -Qi linux | grep 'Version'"], stderr=subprocess.STDOUT, shell=True, ).decode("ascii").replace("Version : ", "")) except subprocess.CalledProcessError: stableVersion = "Kernel not installed !" try: ltsVersion = (subprocess.check_output( ["pacman -Qi linux-lts | grep 'Version'"], stderr=subprocess.STDOUT, shell=True, ).decode("ascii").replace("Version : ", "")) except subprocess.CalledProcessError: ltsVersion = "Kernel not installed !" try: hardVersion = (subprocess.check_output( ["pacman -Qi linux-hardened | grep 'Version'"], stderr=subprocess.STDOUT, shell=True, ).decode("ascii").replace("Version : ", "")) except subprocess.CalledProcessError: hardVersion = "Kernel not installed !" try: zenVersion = (subprocess.check_output( ["pacman -Qi linux-zen | grep 'Version'"], stderr=subprocess.STDOUT, shell=True, ).decode("ascii").replace("Version : ", "")) except subprocess.CalledProcessError: zenVersion = "Kernel not installed !" try: remoteStable = (subprocess.check_output( ["pacman -Si linux | grep 'Version'"], stderr=subprocess.STDOUT, shell=True, ).decode("ascii").replace("Version : ", "")) remoteLts = (subprocess.check_output( ["pacman -Si linux-lts | grep 'Version'"], stderr=subprocess.STDOUT, shell=True, ).decode("ascii").replace("Version : ", "")) remoteHard = (subprocess.check_output( ["pacman -Si linux-hardened | grep 'Version'"], stderr=subprocess.STDOUT, shell=True, ).decode("ascii").replace("Version : ", "")) remoteZen = (subprocess.check_output( ["pacman -Si linux-zen | grep 'Version'"], stderr=subprocess.STDOUT, shell=True, ).decode("ascii").replace("Version : ", "")) except subprocess.CalledProcessError: notification.notify("Error getting kernels version !") if (stableVersion != remoteStable and stableVersion != "Kernel not installed !" or ltsVersion != remoteLts and ltsVersion != "Kernel not installed !" or hardVersion != remoteHard and hardVersion != "Kernel not installed !" or zenVersion != remoteZen and zenVersion != "Kernel not installed !"): notification.notify("A kernel update is available !") if (stableVersion == remoteStable): stableBtn = "Uninstall kernel !" elif (stableVersion != remoteStable): stableBtn = "Install/Update kernel !" if (ltsVersion == remoteLts): ltsBtn = "Uninstall kernel !" elif (ltsVersion != remoteLts): ltsBtn = "Install/Update kernel !" if (hardVersion == remoteHard): hardBtn = "Uninstall kernel !" elif (hardVersion != remoteHard): hardBtn = "Install/Update kernel !" if (zenVersion == remoteZen): zenBtn = "Uninstall kernel !" elif (zenVersion != remoteZen): zenBtn = "Install/Update kernel !" kernel.extend([ stableVersion, ltsVersion, hardVersion, zenVersion, remoteStable, remoteLts, remoteHard, remoteZen, stableBtn, ltsBtn, hardBtn, zenBtn ]) self.signal.emit(kernel)
def sendmsg(content: str): """发送通知""" notification.notify("企鹅读书通知", content) print("已成功发送通知!")
async def main(self, start_step=0): """Heart of the worker - goes over each point and reports sightings""" self.seen_per_cycle = 0 self.step = start_step or 0 loop = asyncio.get_event_loop() for i, point in enumerate(self.points): latitude = random.uniform(point[0] - 0.00001, point[0] + 0.00001) longitude = random.uniform(point[1] - 0.00001, point[1] + 0.00001) altitude = random.uniform(point[2] - 20, point[2] + 20) if not self.running: return self.logger.info('Visiting point %d (%s,%s %sm)', i, round(latitude, 4), round(longitude, 4), round(altitude)) start = time.time() self.api.set_position(latitude, longitude, altitude) if i not in self.cell_ids: self.cell_ids[i] = await loop.run_in_executor( self.cell_ids_executor, partial(pgoapi_utils.get_cell_ids, latitude, longitude)) cell_ids = self.cell_ids[i] response_dict = await loop.run_in_executor( self.network_executor, self.call_api(self.api.get_map_objects, latitude=pgoapi_utils.f2i(latitude), longitude=pgoapi_utils.f2i(longitude), cell_id=cell_ids)) processing_start = time.time() if not isinstance(response_dict, dict): self.logger.warning('Response: %s', response_dict) raise MalformedResponse if response_dict['status_code'] == 3: logger.warning('Account banned') raise BannedAccount responses = response_dict.get('responses') if not responses: self.logger.warning('Response: %s', response_dict) raise MalformedResponse map_objects = responses.get('GET_MAP_OBJECTS', {}) pokemons = [] longspawns = [] forts = [] if map_objects.get('status') == 1: for map_cell in map_objects['map_cells']: for pokemon in map_cell.get('wild_pokemons', []): # Store spawns outside of the 15 minute range in a # different table until they fall under 15 minutes, # and notify about them differently. long_spawn = (pokemon['time_till_hidden_ms'] < 0 or pokemon['time_till_hidden_ms'] > 3600000) if long_spawn: if store_longspawns: longspawn_deque.append(pokemon['encounter_id']) else: continue normalized = self.normalize_pokemon( pokemon, map_cell['current_timestamp_ms']) pokemons.append(normalized) try: if normalized['pokemon_id'] in config.NOTIFY_IDS: notified = notification.notify(pokemon) if notified[0]: self.logger.info('Successfully ' + notified[1] + '.') else: if notified[1] == 'Already notified.': self.logger.warning( 'Skipped sending duplicate notification.' ) else: self.logger.error(notified[1]) if (long_spawn or pokemon['encounter_id'] in longspawn_deque): normalized['time_till_hidden_ms'] = pokemon[ 'time_till_hidden_ms'] normalized[ 'last_modified_timestamp_ms'] = pokemon[ 'last_modified_timestamp_ms'] longspawns.append(normalized) except NameError: pass for fort in map_cell.get('forts', []): if not fort.get('enabled'): continue if fort.get('type') == 1: # probably pokestops continue forts.append(self.normalize_fort(fort)) self.db_processor.add(pokemons) self.db_processor.add(forts) if store_longspawns: self.db_processor.add(longspawns) self.seen_per_cycle += len(pokemons) self.total_seen += len(pokemons) self.logger.info( 'Point processed, %d Pokemons and %d forts seen!', len(pokemons), len(forts), ) # Clear error code and let know that there are Pokemon if self.error_code and self.seen_per_cycle: self.error_code = None self.step += 1 self.last_step_run_time = (time.time() - start - self.last_api_latency) processing_time = time.time() - processing_start sleep_min = config.SCAN_DELAY[0] - processing_time sleep_max = config.SCAN_DELAY[1] - processing_time if len(config.PROXIES) > 2: sleep_mode = config.SCAN_DELAY[2] sleep_time = random.triangular(sleep_min, sleep_max, sleep_mode) else: sleep_time = random.uniform(sleep_min, sleep_max) await self.sleep(sleep_time) if self.seen_per_cycle == 0: self.error_code = 'NO POKEMON'
def delete(event): db.event_delete(event['id']) notify(u'{} deleted an Event - <strong>%s</strong>' % event['title'], action='delete', target='event')
def update(event): _validate_event(event) db.event_update(event) notify(u'{} updated an Event - <strong>%s</strong>' % event['title'], action='edit', target='event')
#upward lift #Upwards definition is orientation (roll= 0,pitch= 0,yaw=Any value) within a certain threshold, and then Z acceleration change by a certain amt. #Note Gyro Has drift if ACCz > threshold_accel_z: if upwards_cycle_count <= 5: upwards_cycle_count = upwards_cycle_count + 1 va1 = val1 + ACCz/1000 else: upwards_cycle_count = 0 notification_count = notification_count + 1 publisher.publish(client, "Sudden Flip Detected!!") #print("Upwards Lift Detected: " +str(ACCz/1000)) if notification_count % 8 ==0: notification_count = 1 notification.notify("NightLight Detect: Danger", "Sudden Flip Detected!") else: upwards_cycle_count = 0 #if ACCy < threshold_accel_y : # if forwards_cycle_count <= forwards_cycle_cap: # forwards_cycle_count = forwards_cycle_count + 1 # else: # forwards_cycle_count = 0 #publisher.publish(client,"Forwards Lift Detected: " +str(ACCz/1000) #print("Forward Push Detected: " +str(ACCx/1000)) # else: # forwards_cycle_count = 0 if CFangleX < 90 and CFangleX > 55: if standsup_cycle_count <= 20: standsup_cycle_count = standsup_cycle_count + 1
def dataMissingMsg(**kwargs) : if kwargs["email"] : email = kwargs["email"] msgDict = getEmpDictbyEmail(email) #If we got an e-mail ID elif kwargs["empObj"] : emp = kwargs["empObj"] msgDict = getEmpDictbyObj(emp) # If we got an emp-object email = emp.OFFICE_EMAIL_ID else : #Do Nothing print("No valid Arguments") return ("No valid Arguments",False) sendEmailToEmp = False if "sendEmailToEmp" in kwargs.keys(): sendEmailToEmp = kwargs["sendEmailToEmp"] #For absolute SAFETY, check if the domain is correct, else return False if '@' in email : (name, domain) = email.split('@') if domain: domain = domain.lower() if domain != 'msg-global.com' : return ("Invalid Email:" + email,False) else : return ("Invalid Email:" + email, False) else : return ("Invalid Email:" + email, False) num = 0 # No of items per line errNum = 0 # No. of errors found #Start creating the formated output retStr = "<table border='1'>" errStr = "" # Put errors separately, att to the top msgStr = "<tr><td style=\"padding-left:5px;\">" lasttr = 0 for k in msgDict.keys() : if msgDict[k] == "Not Available" : msgStr += "%s:<a><b style=\"color:red;\">%s</b></a>" %(k,msgDict[k] ) errNum += 1 # Some error occured else : msgStr += "%s:%s" %(k,msgDict[k] ) num += 1 if not num % 3 : msgStr += "</td><tr><td style=\"padding-left:5px;\">" lasttr = 1 else : msgStr += "</td><td style=\"padding-left:5px;\">" lasttr = 0 retStr += msgStr if not lasttr : retStr += "</tr>" retStr += "</table>" if (errNum and sendEmailToEmp) : message = htmlhead + hrmsdatacheck + retStr + hrmsfooter + htmlfooter notify(email, "HRMS: Data Update Required", message , templateId="-1") # print(email + " HRMS: Data Update Required "+ message ) return ("Notification sent to:" + email, True) if errNum : return htmlhead + hrmsdatacheck + retStr + hrmsfooter + htmlfooter return ("Up to Date:" + email, False )
yield("Encore une fois ?\n") yield("Pas fatigués ?\n") while True: yield("") sourceProviders = [SourceProvider_ratp(), SourceProvider_jcdecaux_vls(), SourceProvider_transilien()] for m in message(): print(m) location = input("Adresse : ") position = position_of_location(location) if not position: print("Désolé, l'adresse n'est pas reconnue :-(") else: markers = [] for source in main.gen_sources(sourceProviders, location): if source.problem(): print("Problème : ", end='') else: print("Pas de problème : ", end='') notification.notify(source.message) markers.append((source.pos, source.name)) open_markers(position, location, markers) # empty line print()
def lambda_handler(event, context): for message in event['Records']: notify(json.loads(message['body']))
def push_notification(self, msg): subject = "Noise Detected" notification.notify(subject, msg)