def publish_pushover(data): data["timestamp"] = data["timestamp"].timestamp try: pushover.pushover(**data) except Exception as e: print e
def send_notification(name): sys.path.append(os.path.dirname(os.path.abspath(__file__))) from pushover import pushover pushover( '(Insta|Scripto)gram', 'A Instagram post was made by %s' % name )
def Add(self, notify=True): s = Sql("woodhouse") q = "INSERT INTO logs " q += "(post_by, title, text) VALUES (" q += "'" + self.post_by + "', " q += "'" + self.title + "', " q += "'" + self.text + "');" if (notify): pushover(message=self.text, title=self.post_by + ":" + self.title, token="pvPX3Ag69Iwt4E9dvpVpA5xfoQLaEw", user="******") return s.ex_add(q)
def checkNotifyManga(options): whiteList = [] blackList = [] if not options.nofilter: whiteList = config.jnc_manga_notify_whitelist blackList = config.jnc_manga_notify_blacklist networkMangaEvents = jncutils.events.getLatest(filterType=jncutils.EventType.Manga, requestLimit=int(options.limit), whiteList=whiteList, blackList=blackList) print "Found", len(networkMangaEvents) for event in networkMangaEvents: if not jncutils.checkinfo.isNotifiedMangaEvent(event): pushover( "[JNC] %s %s" % (event.name, event.details) ) jncutils.checkinfo.addNotifiedMangaEvent(event)
def main(): fb = fitbit.make_class() devices = fb.devices_json() device = devices[0] battery = device['battery'] pu = p.pushover() pu.send_message( 'Current Battery Status: ' + battery )
def main(): fb = fitbit.make_class() devices = fb.devices_json() device = devices[0] battery = device['battery'] pu = p.pushover() pu.send_message('Current Battery Status: ' + battery)
def processSingleEvent(eventid, verbose=True): event = jncutils.events.getEvent(eventid, verbose=False) if not event: if verbose: print "Couldn't retrieve event", eventid return event.setPreventDefaultQueueing() result = event.process(verbose=verbose) if result == jncutils.EventProcessResultType.Successful: jncutils.checkinfo.addSuccessfulEvent(event) cfg = wepubutils.ConfigFile(event.processedCfgid) cfgdata = cfg.read(verbose=False) try: wepubutils.EpubProcessor(cfgdata).make() except Exception, ex: print ex # raise pushover("[JNC] Error generating %s: %s" % (event.processedCfgid, ex))
def main(): now = datetime.datetime.now() total = 0 for i in range(0, 3): date = now.strftime("%Y-%m-%d") data = get_day_data(now) total = total + data['steps'] now = now - datetime.timedelta(1) if total == 0: message = "You have not reported data over the last 3 days. Check your FitBit." pu = p.pushover() pu.send_message(message)
def main(): now = datetime.datetime.now() total = 0 for i in range(0,3): date = now.strftime("%Y-%m-%d") data = get_day_data( now ) total = total + data['steps'] now = now - datetime.timedelta(1) if total == 0: message = "You have not reported data over the last 3 days. Check your FitBit." pu = p.pushover() pu.send_message( message )
def _started(self): if not self.yamaha.on: self._yamaha_on() self._yamaha_set_input() self._yamaha_set_volume() else: if self.yamaha.input != self.cfg['yamaha']['input']: self.log.info("{} Yamaha ignoring, it's already on".format( self.cfg['yamaha']['friendly_name'])) if self.cfg['notifications']['pushover']['enabled']: notify = pushover( self.cfg['notifications']['pushover']['user'], api_token=self.cfg['notifications']['pushover']['app']) notify.send_message( "{} Yamaha ignoring SONOS because it is already on!" .format(self.cfg['yamaha']['friendly_name']), title="{} Yamaha".format( self.cfg['yamaha']['friendly_name'])) elif self.yamaha.volume != self.cfg['yamaha']['volume']: self._yamaha_set_volume()
from pushover import pushover if __name__ == "__main__": pushover( title = "Completed", message = job_name, token = "ajy7q2FxFmUsdfsaaPzJ9v4Zn56mB45FsieL", user = "******" )
def send_notification(name): sys.path.append(os.path.dirname(os.path.abspath(__file__))) from pushover import pushover pushover('(Insta|Scripto)gram', 'A Instagram post was made by %s' % name)
parser.add_argument("nBatch", type=int, help="total number of batches") parser.add_argument("--nStart", "-s", type=int, help="start batch number", default=0) parser.add_argument("jobName", help="job name in condor") args = parser.parse_args() query = "condor_q zhuyund | grep " + "\"" + args.jobName + "\"" + "| wc -l" limit = 53 for i in range(args.nStart, args.nStart + args.nBatch): while True: out = os.popen(query) nRunning = int(out.readline()) print nRunning if datetime.datetime.now().hour < 9: limit = 3 else: limit = 3 if nRunning <= limit: break else: time.sleep(120) submitCommand = "condor_submit " + args.jobFilePrefix + '_' + str( i) + '.job' os.system(submitCommand) print submitCommand pushover.pushover("run_dvCreation finish!")
def pushoverError(self, error="Unknown"): pushover("[JNC][ERROR] %s %s (%s)" % (self.name, self.details, error))
def pushoverOk(self): pushover("[JNC] %s %s" % (self.name, self.details))
def awesomeDownloader(): # Get current date and time import time,datetime timeNow = datetime.datetime.now() timePosix = int(time.mktime(timeNow.timetuple())) print "<--------------- Running Downloader ----------------->" print "Time: ",timeNow.strftime('%d/%m/%Y %H:%M') #-------------------------------------------------------------------------------- # Connect to local database #-------------------------------------------------------------------------------- from database_operations import create_session session = create_session() #-------------------------------------------------------------------------------- # Check if we need to search for movies #-------------------------------------------------------------------------------- from Movie import Movie nDownloaded = session.query(Movie).filter(Movie.downloaded == 1).count() nRecent = session.query(Movie).filter(Movie.last_searched+86400 > timePosix).count() # Get movies we need to search for movies = session.query(Movie).filter(Movie.downloaded == 0).filter(Movie.last_searched+86400 < timePosix).limit(s.maxSearches).all() nSnatched = 0 if len(movies) > 0: #-------------------------------------------------------------------------------- # Login to Awesome-HD and search for movies #-------------------------------------------------------------------------------- doLogin() for movie in movies: print "---> SEARCHING FOR: "+movie.title.encode('utf8')+' ('+str(movie.year)+')' html = doSearch(movie.title, movie.year) link = getLink(html, movie.title, movie.year) if len(link) > 0: movie.link = link # Make sure we are using only valid chars in the filename torrentName = getValidFilename('.'.join(removeNonUnicodeChars(movie.title).split(' '))+'.('+str(movie.year)+').torrent') try: filename = s.torrentPath+torrentName with open(filename, 'w') as torrent_file: for line in mechanize.urlopen(link).readlines(): torrent_file.write(line) movie.downloaded = 1 print "======================================================" print "DOWNLOADED TORRENT: "+torrentName print "======================================================" pushover("Snatched", movie.title+' ('+str(movie.year)+')') nSnatched += 1 except ValueError: print "ERROR: Something went wrong in twill.." else: print "NO RESULTS FOR: "+movie.title.encode('utf8')+' ('+str(movie.year)+')' # Log time of search and add random interval +/- 2 hours to spread out searches try: movie.last_searched = timePosix + abs(movie.year - timeNow.year)*86400 + int(random.uniform(-7200,7200)) except TypeError: movie.last_searched = timePosix + 30*86400 + int(random.uniform(-7200,7200)) session.add(movie) session.commit() session.commit() print "======================================================" print "No. movies already downloaded: ", nDownloaded print "No. movies recently searched: ", nRecent print "No. movies searched for: ", len(movies) print "No. movies snatched: ", nSnatched print "<---------------------------------------------------->"
def parseRss(rssdef): url = rssdef["url"] rssid = rssdef["id"] if "id" in rssdef else url print print "Processing feed", rssid d = feedparser.parse(url) lastseen = wrcheckinfo.getLastSeen(rssid) newlastseen = lastseen configs = {} configstoprocess = [] found = 0 added = 0 for post in reversed(d.entries): dt = datetime.fromtimestamp(mktime(post.published_parsed)) if dt <= lastseen: continue found += 1 if dt > newlastseen: newlastseen = dt print print "Found post:", post.title targetconfig = rssdef["config"] if "config" in rssdef else None if "configs" in rssdef: for pattern, pconfig in rssdef["configs"]: if re.search(pattern, post.title): targetconfig = pconfig if not targetconfig: print "Couldn't find a target config - no match" pushover("[WepubRSS] No matching config for %s" % (post.title)) continue if targetconfig in configs: CFG = configs[targetconfig] else: CFG = ConfigFile(targetconfig) configs[targetconfig] = CFG if not CFG.addUrl(post.link): print "Couldn't add URL" pushover("[WepubRSS] Couldn't auto-add URL for %s" % (post.title)) continue else: print "Added to config:", targetconfig added += 1 if not targetconfig in configstoprocess: configstoprocess.append(targetconfig) pushover("[WepubRSS] %s" % (post.title)) if newlastseen > lastseen: wrcheckinfo.setLastSeen(rssid, newlastseen) print print "Found", found, "; Added", added, "; Configs to process", len( configstoprocess) for configname in configstoprocess: epub = EpubProcessor(configs[configname].read()) epub.make()
from pushover import pushover from settings import torrentPath, minDiskSpace # Creates a file for flagging low disk space try: f = open('.isdiskfull', 'r') except IOError: f = open('.isdiskfull', 'w') f.close() s = os.statvfs(torrentPath) spaceLeft = (s.f_bavail * s.f_frsize / 1e9) if spaceLeft < minDiskSpace: f = open('.isdiskfull', 'r') if f.readline() == '0': pushover('Awesome-DL','WARNING: Low disk space: '+str(spaceLeft)+' GB. Suspending downloads..') f.close() f = open('.isdiskfull', 'w') f.write('1') f.close() sys.exit("FATAL: Disk space too low (< "+str(minDiskSpace)+" GB)!") f = open('.isdiskfull', 'w') f.write('0') f.close() # Run downloader from imdb_parser import imdbParse from awesome_downloader import awesomeDownloader
try: logging.debug("copying file: "+filename+" to "+dump_location) # call off the transfer with from and to shutil.copy(mount_location + "/" + folder_to_dump + "/" + filename , dump_location) # copy files files_dumped += 1 except shutil.Error, err_msg: #rutrow something went wrong...this is as good as it gets now, eventually better debugging logging.warning("Unable to copy file: " + dirname) logging.warning(err_msg) except IOError, err_msg: #rutrow something went wrong...this is as good as it gets now, eventually better debugging logging.warning("Unable to copy file: " + dirname) logging.warning(err_msg) logging.info("done transfering files, see you next time") #pushover(True,dirs_dumped,files_dumped) pushover.pushover(message="Successfully dumped "+str(dirs_dumped)+" folders and "+str(files_dumped) + " files to " + dump_location,token = app_token,user = user_token,) if clean_dumptruck: #if the user wants a clean dumptruck move the files, otherwise just copy the files logging.debug("this doesnt do anything yet") if unmount_on_finish: #if user elected to unmount on finish then boot that drive out of the system subprocess.call(["umount",device_file]) logging.info(device_file + " unmounted") else: #if the users wants an unmount on a soft fail then the dude abides logging.info("Found nothing to dump") if unmount_on_fail: subprocess.call(["umount",device_file])
def checkLatestParts(options, verbose=True): lastprocessed = jncutils.checkinfo.getLastProcessed() jncutils.checkinfo.setLastCheckedNow() whiteList = [] blackList = [] if not options.nofilter: whiteList = config.jnc_check_whitelist blackList = config.jnc_check_blacklist # Get events from API networkEvents = jncutils.events.getLatest(filterType=jncutils.EventType.Part, minDate=lastprocessed, requestLimit=int(options.limit), whiteList=whiteList, blackList=blackList) print "Found", len(networkEvents) # Read errored events to process erroredEvents = jncutils.checkinfo.getErroredEvents() if erroredEvents and len(erroredEvents) > 0: print "Also processing", len(erroredEvents), "errored events" # If we have the same event in both lists, take the one from the errored list, # since that one includes error information (like counters) erroredEventIds = [e.eventId for e in erroredEvents] events = [e for e in networkEvents if e.eventId not in erroredEventIds] events += erroredEvents configsToGenerate = [] latestProcessedEvent = None for event in events: sleep(5) result = event.process(verbose=verbose) shouldRegenerateEpub = False shouldMarkDateAsProcessed = False if result == jncutils.EventProcessResultType.Error: # Don't regenerate epub, don't save this date as completed pass elif result == jncutils.EventProcessResultType.Skipped: # Don't regenerate epub, but DO save this date shouldMarkDateAsProcessed = True elif result == jncutils.EventProcessResultType.AlreadyProcessed: # Don't regenerate epub, but DO save this date shouldMarkDateAsProcessed = True elif result == jncutils.EventProcessResultType.Successful: # Good! Regenerate and save date. shouldRegenerateEpub = True shouldMarkDateAsProcessed = True else: # What the frick!? print "Unknown EventProcessResultType", result raise Exception("What the frick!?") if shouldMarkDateAsProcessed: if event.date and (not latestProcessedEvent or latestProcessedEvent < event.date): latestProcessedEvent = event.date if shouldRegenerateEpub: cfgid = event.processedCfgid if cfgid not in configsToGenerate: configsToGenerate.append(cfgid) #Done with the loop iteration for cfgid in configsToGenerate: print print cfg = wepubutils.ConfigFile(cfgid) cfgdata = cfg.read(verbose=False) try: wepubutils.EpubProcessor(cfgdata).make() except Exception, ex: print ex #raise pushover("[JNC] Error generating %s: %s" % (cfgid, ex) )
# Pushover function call example: from pushover import pushover pushover('Test message', 'Test title', 1) # Last two parameters are optional. # Telegram function call example: from telegram import telegram telegram('Test message', 1, 1, 'markdownv2') # Last three parameters are optional. # Slack function call example: from slack import slack slack('Test message')