class Geocode: def __init__(self, geocoderCache = True, printStatus = False): self.printStatus = printStatus self.geocoderCache = geocoderCache if self.geocoderCache: self.db = FileBackend('./geo-cache') def getGeo(self, lon, lat): if self.geocoderCache: try: nodeObj = self.db.get(GeoAssign,{'lat' : lat, 'lon' : lon}) nodeObj['cached'] = True return nodeObj except GeoAssign.DoesNotExist: pass if self.printStatus: print('lon: '+str(lon)+', lat: '+str(lat)+' not in cache - start lookup at Nominatim-API') geolocator = Nominatim() location = geolocator.reverse([lat, lon], timeout=20) if 'address' in location.raw: location = location.raw['address'] nodeObj = GeoAssign({ 'lat' : lat, 'lon' : lon, 'payload' : location }) self.db.save(nodeObj) self.db.commit() nodeObj['cached'] = False return nodeObj else: # got no results (i.e. coordinates are incorrect) return None
def on_data(self, data): dict_data = json.loads(data) db_data = {} try: # Ignore retweets if dict_data['retweeted'] == False: try: pp("GET DATA") # Get relevant key value pairs only for KEY in DESIRED_KEYS: db_data[KEY] = dict_data[KEY] db_data['screen_name'] = dict_data['user']['screen_name'] cards = parse_text(db_data['text']) # Add it to the db_data dict to import into BlitzDB db_data['cards'] = cards pp(db_data) except: pp("GET DATA FAILED") try: # Import into BlitzDB backend = FileBackend("./test-db") tweet = Tweet(db_data) backend.save(tweet) # backend.commit() pp("DATABASE INSERTION SUCCESSFUL") except: pp("DATA INSERTION FAILED") except: pp("Retweet detected, skipping...") return True
class ImageToTumblr(object): def __init__(self, bot): self.cfg = PluginConfig(self) self.image_filetypes = self.cfg.get("image_filetypes").split(",") self.db = FileBackend(self.cfg.get("main_db")) self.tumblr = pytumblr.TumblrRestClient( self.cfg.get("consumer_key"), self.cfg.get("consumer_secret"), self.cfg.get("oauth_token"), self.cfg.get("oauth_secret"), ) irc3.base.logging.log( irc3.base.logging.WARN, "Tumblr poster ready! Posting all URLs with: %s" % self.image_filetypes ) def post_image(self, text, poster): # Strip everything but the address m = re.match(r".*(?P<url>http.*)", text) url = m.group("url") # Make sure we didn't do this one already try: self.db.get(PostedImage, {"url": url}) except PostedImage.DoesNotExist: try: # First we post it to tumblr p = self.tumblr.create_photo( "mmerpimages", state="published", source=str(url), caption="Found by %s" % poster ) irc3.base.logging.log(irc3.base.logging.WARN, "Posting image by %s: %s" % (poster, url)) # And then record the fact that we did. self.db.save(PostedImage({"url": url})) self.db.commit() except: irc3.base.logging.log(irc3.base.logging.WARN, "Could not post to tumblr: %s" % url) return else: irc3.base.logging.log(irc3.base.logging.WARN, "Not posting duplicate image: %s" % url) return @irc3.event(irc3.rfc.PRIVMSG) # Triggered on every message anywhere. def parse_image(self, target, mask, data, event): for extension in self.image_filetypes: if "." + extension.lower() in data: self.post_image(data, mask.nick)
class Database: def __init__(self): self.backend = FileBackend(settings['database_location']) def save(self, entry): self.backend.save(entry) self.backend.commit() def search(self, table, query): if table == 'user': doc = User elif table == 'subm': doc = Submission try: return self.backend.get(doc, query) except doc.DoesNotExist: return None
class Database(object): """Blitzdb database.""" def __init__(self): """Load backend.""" self.db = FileBackend(expanduser("~/.omesa/db")) def _query(self, f, q): try: out = f(*q) except KeyError: self.db = FileBackend(expanduser("~/.omesa/db")) f = self.db.filter out = f(*q) return out def save(self, doc): """Save document do db.""" self.db.save(doc) self.db.commit() def fetch(self, doc, q): """Filter and return first entry.""" try: return self._query(self.db.filter, (doc, q))[0] except IndexError: print(str(doc), str(q)) print("File does not exist.") def get_component(self, doc, name): # FIXME: see if returning non-decoded is relevant for anything try: return sr.decode( dict(self._query(self.db.filter, (doc, { 'name': name }))[0])) except IndexError: print(str(doc), {'name': name}) print("File does not exist.") def getall(self, doc): """Returns all entries in db.""" return [d for d in self._query(self.db.filter, (doc, {}))]
class Database(object): """Blitzdb database.""" def __init__(self): """Load backend.""" self.db = FileBackend(expanduser("~/.omesa/db")) def _query(self, f, q): try: out = f(*q) except KeyError: self.db = FileBackend(expanduser("~/.omesa/db")) f = self.db.filter out = f(*q) return out def save(self, doc): """Save document do db.""" self.db.save(doc) self.db.commit() def fetch(self, doc, q): """Filter and return first entry.""" try: return self._query(self.db.filter, (doc, q))[0] except IndexError: print(str(doc), str(q)) print("File does not exist.") def get_component(self, doc, name): # FIXME: see if returning non-decoded is relevant for anything try: return sr.decode(dict(self._query( self.db.filter, (doc, {'name': name}))[0])) except IndexError: print(str(doc), {'name': name}) print("File does not exist.") def getall(self, doc): """Returns all entries in db.""" return [d for d in self._query(self.db.filter, (doc, {}))]
class eventmanager: def __init__(self, trigger_id, jsonfilelist, triggerdir, datadir, real, trigger_path): #os.system('kinit -k -t /var/keytab/desgw.keytab desgw/des/[email protected]') tstart = time.time() if real: self.backend = FileBackend("./realdb") else: self.backend = FileBackend("./testdb") try: thisevent = self.backend.get(Trigger, {'id': trigger_id}) print 'Found this event in desgw database...' except Trigger.DoesNotExist: thisevent = Trigger({ 'id': trigger_id, 'jsonfilelist': jsonfilelist, 'triggerpath': triggerdir, 'mapspath': datadir, 'jobids': [ (0, 'jsonfile_corresponding_to_jobid.json'), ], }) print 'Database entry created!' self.trigger_id = trigger_id self.trigger_path = trigger_path self.backend.save(thisevent) self.backend.commit() with open(os.path.join(triggerdir, "strategy.yaml"), "r") as f: self.config = yaml.safe_load(f) self.filterobslist = np.array(self.config['exposure_filter'], dtype='str') self.strategydict = {} for f in np.unique(self.filterobslist): self.strategydict[f] = len( self.filterobslist[self.filterobslist == f]) self.connection = ea.connect(DATABASE) self.cursor = self.connection.cursor() self.jsonfilelist = jsonfilelist print self.jsonfilelist if hardjson: self.jsonfilelist = hj self.trigger_id = trigger_id self.datadir = datadir self.triggerdir = triggerdir self.processingdir = os.path.join(self.triggerdir, 'PROCESSING') if not os.path.exists(self.processingdir): os.makedirs(self.processingdir) dire = './processing/' + trigger_id + '/' if not os.path.exists(dire): os.makedirs(dire) with open(os.path.join(triggerdir, "strategy.yaml"), "r") as f: self.strategy = yaml.safe_load(f) with open("jobmanager.yaml", "r") as g: self.jmconfig = yaml.safe_load(g) q1 = "select expnum,nite,mjd_obs,telra,teldec,band,exptime,propid,obstype,object from exposure where " \ "nite>20130828 and nite<20150101 and expnum<300000 and obstype='object' order by expnum" # y1 images self.connection.query_and_save(q1, './processing/exposuresY1.tab') q2 = "select expnum,nite,mjd_obs,radeg,decdeg,band,exptime,propid,obstype,object from prod.exposure where " \ "nite>20150901 and obstype='object' order by expnum" # y2 and later self.connection.query_and_save(q2, './processing/exposuresCurrent.tab') os.system( 'cat ./processing/exposuresY1.tab ./processing/exposuresCurrent.tab > ./processing/exposures.list' ) self.submit_all_jsons_for_sejobs( ) #preps all DES images that already exist tfin = time.time() print 'TOTAL SE JOBS TIME', tfin - tstart #sys.exit() self.monitor_images_from_mountain( ) #A loop that waits for images off mountain and submits for processing def submit_all_jsons_for_sejobs(self): obsStartTime = self.getDatetimeOfFirstJson( self.jsonfilelist[0]) # THIS IS A DATETIME OBJ currentTime = dt.utcnow() print '***** The current time is UTC', currentTime, '*****' delt = obsStartTime - currentTime timedelta = td(days=delt.days, seconds=delt.seconds).total_seconds() / 3600. print '***** The time delta is ', timedelta, 'hours *****' # if timedelta > np.pi: sejob_timecushion = self.jmconfig["sejob_timecushion"] if timedelta > sejob_timecushion: for jsonfile in self.jsonfilelist: print 'json', jsonfile try: #check if this json file is already in the submitted preprocessing database thisjson = self.backend.get( preprocessing, {'jsonfilename': os.path.join(self.datadir, jsonfile)}) print 'Found this json in desgw database...' except preprocessing.DoesNotExist: #do submission and then add to database print 'cd diffimg-proc; ./SEMaker_RADEC.sh ' + os.path.join( self.datadir, jsonfile) os.chdir("diffimg-proc") out = os.popen( './SEMaker_RADEC.sh ' + os.path.join(self.datadir, jsonfile)).read() #out = os.popen('ls').read() os.chdir("..") print out if 'non-zero exit status' in out: dt.sendEmailSubject( self.trigger_id, 'Error in creating dag for .json: ' + out) else: for o in out.split('\n'): if 'file://' in o: dagfile = o.split('/')[-1] self.dagfile = os.path.join( self.processingdir, jsonfile.split('/')[-1].split('.')[0] + '_' + dagfile) os.system('cp diffimg-proc/' + dagfile + ' ' + self.dagfile) jobsubmitline = copy(o) print self.dagfile print 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; jobsub_submit_dag -G des --role=DESGW file://' + self.dagfile out = os.popen( 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; ' 'jobsub_submit_dag -G des --role=DESGW file://' + self.dagfile).read() print out if 'non-zero exit status' in out: dt.sendEmailSubject( self.trigger_id, 'Error in submitting .json for preprocessing: ' + out) else: if doimmediateremove: for o in out.split('\n'): if 'Use job id' in o: jobid = o.split()[3] out = os.popen( 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; ' 'jobsub_rm --jobid=' + jobid + ' --group=des --role=DESGW').read() print out thisjson = preprocessing({ 'jsonfilename': os.path.join(self.datadir, jsonfile), 'jobid': jobid, 'dagfile': self.dagfile, 'status': 'Submitted' }) self.backend.save(thisjson) self.backend.commit() print 'saved' #raw_input() #runProcessingIfNotAlready(image, self.backend) #sys.exit() print 'Finished submitting minidagmaker with all json files' #sys.exit() #raw_input() # Loop queries for images from mountain and submits them # Need to add complexity that monitors filter strategy and waits for entire groups of images to be co-added def monitor_images_from_mountain(self): #NEED TO ADD COADD LOGIC USING STRATEGY FROM CONFIG exposure_filter = np.array(self.strategy['exposure_filter'], dtype='str') uniquefilts = np.unique(self.strategy['exposure_filter']) filterstrategy = {} for f in uniquefilts: filterstrategy[f] = len(exposure_filter[exposure_filter == f]) print 'filter strategy dictionary ', filterstrategy starttime = time.time() pptime = time.time() keepgoing = True index = -1 submission_counter = 0 maxsub = 10000 postprocessingtime = 10 #every half hour fire off Tim's code for post-processing while keepgoing: #os.system('kinit -k -t /var/keytab/desgw.keytab desgw/des/[email protected]') index += 1 newfireds = [] if time.time() - starttime > 50000: keepgoing = False continue ofile = open(os.path.join(self.triggerdir, 'latestquery.txt'), 'w') ofile.write( "--------------------------------------------------------------------------------------------------\n" ) ofile.write( "EXPNUM\tNITE\tBAND\tEXPTIME\tRADEG\t DECDEG\tPROPID\tOBJECT\n" ) ofile.write( "--------------------------------------------------------------------------------------------------\n" ) print "--------------------------------------------------------------------------------------------------" print "EXPNUM\tNITE\tBAND\tEXPTIME\tRADEG\t DECDEG\tPROPID\tOBJECT" print "--------------------------------------------------------------------------------------------------" query = "SELECT expnum,nite,band,exptime,radeg,decdeg,propid,object FROM prod.exposure@desoper WHERE " \ "expnum > 475900 and propid=" + propid + "and obstype='object' ORDER BY expnum" # latest self.cursor.execute(query) for s in self.cursor: ofile.write( str(s[0]) + "\t" + str(s[1]) + "\t" + str(s[2]) + "\t" + str(s[3]) + "\t" + str(s[4]) + "\t" + str(s[5]) + "\t" + str(s[6]) + "\t" + str(s[7]) + '\n') print str(s[0]) + "\t" + str(s[1]) + "\t" + str( s[2]) + "\t" + str(s[3]) + "\t" + str(s[4]) + "\t" + str( s[5]) + "\t" + str(s[6]) + "\t" + str(s[7]) if not 'DESGW' in str(s[7]): continue #print 'exptime',float(s[3]) if not float(s[3]) > 29.: continue #exposure must be longer than 30 seconds expnum = str(s[0]) nite = str(s[1]) band = str(s[2]) exptime = str(s[3]) #FIRST CHECK HERE THAT THE EXPOSURE NUMBER ISNT ALREADY IN THE DATABASE try: exposure = self.backend.get(exposures, {'expnum': expnum}) print 'Found this exposure in desgw database...' # print exposure.attributes # if expnum == 506432: # sys.exit() # self.backend.delete(exposure) # self.backend.commit() # exposure = self.backend.get(exposures, {'expnum': expnum}) except exposures.DoesNotExist: # add to database #runProcessingIfNotAlready(image,self.backend) print './diffimg-proc/getTiling.sh ' + expnum res = os.popen('./diffimg-proc/getTiling.sh ' + expnum).readlines() print res #sys.exit() field, tiling = res[-2], res[-1] #print 'field_tiling',field_tiling hexnite = field.strip() + '_' + tiling.strip() + '_' + str( nite) #print hexnite #sys.exit() #print 'hexnite',hexnite print 'Creating exposure in database...', hexnite #raw_input() if '--' in hexnite: print 'found bad example' #raw_input() exposure = exposures({ 'expnum': expnum, 'nite': nite, 'field': field, 'tiling': tiling, 'hexnite': hexnite, 'band': band, 'jobid': np.nan, 'exptime': exptime, 'status': 'Awaiting additional exposures', 'triggerid': self.trigger_id, 'object': str(s[7]) }) self.backend.save(exposure) self.backend.commit() hexnite = exposure.hexnite print 'hexnite', hexnite if '--' in hexnite: print exposure.attributes #raw_input() #raw_input() #sys.exit() try: hex = self.backend.get(hexes, {'hexnite': hexnite}) #self.backend.delete(hex) #self.backend.commit() #hex = self.backend.get(hexes, {'hexnite': hexnite}) #print 'Found this hex in desgw database...' except hexes.DoesNotExist: hex = hexes({ 'hexnite': hexnite, 'strategy': self.strategy['exposure_filter'], 'num_target_g': len(exposure_filter[exposure_filter == 'g']), 'num_target_r': len(exposure_filter[exposure_filter == 'r']), 'num_target_i': len(exposure_filter[exposure_filter == 'i']), 'num_target_z': len(exposure_filter[exposure_filter == 'z']), 'observed_g': [], 'observed_r': [], 'observed_i': [], 'observed_z': [], 'exposures': [], 'status': 'Awaiting additional exposures', 'dagfile': None, }) self.backend.save(hex) self.backend.commit() print hex.attributes print 'created new hex' #raw_input() if hex.status == 'Submitted for processing': print 'This hex has already been submitted for processing' continue # if '--' in hexnite: # print hex.attributes # raw_input() # if hex.status == 'Submitted for processing': # print 'Hex ',hexnite,' band',band,'exposure',expnum,'has already been submitted for processing' # #raw_input() # continue if band == 'g': if not expnum in hex.observed_g: hex.observed_g.append(expnum) hex.exposures.append(expnum) if band == 'r': if not expnum in hex.observed_r: hex.observed_r.append(expnum) hex.exposures.append(expnum) if band == 'i': if not expnum in hex.observed_i: hex.observed_i.append(expnum) hex.exposures.append(expnum) if band == 'z': if not expnum in hex.observed_z: hex.observed_z.append(expnum) hex.exposures.append(expnum) self.backend.save(hex) self.backend.commit() print hex.attributes didwork = False if len(hex.observed_g) == hex.num_target_g: if len(hex.observed_r) == hex.num_target_r: if len(hex.observed_i) == hex.num_target_i: if len(hex.observed_z) == hex.num_target_z: print 'All exposures in strategy satisfied! ' #raw_input() submissionPassed = True for target, exps in zip([ hex.num_target_g, hex.num_target_r, hex.num_target_i, hex.num_target_z ], [ hex.observed_g, hex.observed_r, hex.observed_i, hex.observed_z ]): if target == 0: continue exposurestring = '' logstring = '' for ex in exps: exposurestring += ex + ' ' logstring += ex + '_' print 'cd diffimg-proc; source DAGMaker.sh ' + exposurestring os.chdir("diffimg-proc") #out = os.popen('ls').read() out = os.popen('./DAGMaker.sh ' + exposurestring).read() os.chdir("..") print out f = open( os.path.join( self.processingdir, logstring + hexnite + '.log'), 'w') f.write(out) f.close() tt = time.time() if not 'To submit this DAG do' in out: dt.sendEmailSubject( self.trigger_id, 'Error in creating dag for desgw hex: ' + out) submissionPassed = False else: for o in out.split('\n'): if 'file://' in o: dagfile = o.split('/')[-1] self.dagfile = os.path.join( self.processingdir, logstring + 'job.dag') os.system('cp diffimg-proc/' + dagfile + ' ' + self.dagfile) print self.dagfile print 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; jobsub_submit_dag -G des --role=DESGW file://' + self.dagfile out = os.popen( 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; ' 'jobsub_submit_dag -G des --role=DESGW file://' + self.dagfile).read() print out if 'non-zero exit status' in out: dt.sendEmailSubject( self.trigger_id, 'Error in submitting hex dag for processing: ' + out) submissionPassed = False else: if doimmediateremove: for o in out.split('\n'): if 'Use job id' in o: jobid = o.split()[3] out = os.popen( 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; ' 'jobsub_rm --jobid=' + jobid + ' --group=des --role=DESGW' ).read() print out ttt = time.time() #print 'TOTAL JOBSUB FOR A SINGLE DESGW IMAGE',ttt-tt #sys.exit() #raw_input() if submissionPassed: hex.status = 'Submitted for processing' hex.dagfile = self.dagfile self.backend.save(hex) self.backend.commit() for expn in hex.exposures: print expn, 'updated in database to Submitted For Processing' exp = self.backend.get( exposures, {'expnum': expn}) exp.status = 'Submitted for processing' self.backend.save(exp) self.backend.commit() didwork = True print 'didwork', didwork print 'dagfile', self.dagfile #raw_input() if not didwork: print 'Could not find all images in strategy for this hex... Added hex', hexnite,' to database ' \ 'and will continue waiting...' #raw_input() #HERE YOU NEED TO ADD TO HEXSTRATEGYDICT DATABASE if time.time( ) - pptime > postprocessingtime: #happens every 30 minutes or so... pptime = time.time() print '***** Firing post processing script *****' #sys.exit() self.submit_post_processing() #sys.exit() print 'Waiting 10s to check from mountain...' #sys.exit() time.sleep(10) #looping over checking the mountain top # cfiles = os.listdir(os.path.join(trigger_path,trigger_id,'candidates')) # for f in cfiles: # if f.split('.')[-1] == 'npz': # cp.makeNewPage(f) def submit_post_processing(self): firedlist = open('./processing/firedlist.txt', 'r') fl = firedlist.readlines() firedlist.close() print fl fl = ['475914', '475915', '475916', '482859', '482860', '482861'] expnumlist = '' for f in fl: expnumlist += f.strip() + ' ' print 'FIRING TIMs CODE' gwpostdir = os.environ['GWPOST_DIR'] print 'source ' + os.path.join(gwpostdir, 'diffimg_setup.sh') + '; \ python ' +os.path.join(gwpostdir,'postproc.py')\ +' --expnums ' + expnumlist\ + ' --outputdir ' + os.path.join(self.trigger_path,self.trigger_id,'candidates')\ + ' --triggerid '+self.trigger_id+' --season 46 --ups True' # os.system('source ' + os.path.join(gwpostdir, 'diffimg_setup.sh') + '; \ # python '+os.path.join(gwpostdir,'postproc.py')\ # +' --expnums ' + expnumlist\ # + ' --outputdir ' + os.path.join(trigger_path,trigger_id,'candidates')\ # + ' --triggerid '+trigger_id+' --season 46 --ups True' ) #pid = os.spawnlp(os.P_WAIT, "source", os.path.join(gwpostdir, 'diffimg_setup.sh')) args = ['ssh -t [email protected] "source '+ os.path.join(gwpostdir, 'mi_setup.sh')+'; '+ 'yes | python '+os.path.join(gwpostdir,'postproc.py')\ +' --expnums ' + expnumlist\ + ' --outputdir ' + os.path.join(self.trigger_path,self.trigger_id,'candidates')\ + ' --triggerid '+self.trigger_id+' --season 46 --ups True"' ] print args #p = subprocess.Popen(args,stdout=PIPE, stderr=PIPE,shell=True) #print p.communicate() #p = subprocess.Popen(args,stdin=None, stdout=None, stderr=None, close_fds=True,shell=True) return def getDatetimeOfFirstJson(self, jsonstring): js = jsonstring.split('UTC')[1] #-2015-12-27-3:2:00.json #date_object = dt.strptime(js, '-%Y-%m-%d-%H_%M_%S.json') date_object = dt.strptime(js, '-%Y-%m-%d-%H_%M_%S-test.json') print '***** Datetime of first observation UTC', date_object, '*****' return date_object def sortHexes(self): pass
class TwitchNotifier(commands.Cog): def __init__(self, bot): self.bot: 'PixlBot' = bot self.config = bot.config['TwitchNotifier'] self.backend = FileBackend('db') self.backend.autocommit = True self.bot.logger.info("Twitch notifier plugin ready") self.uuids = [] self.online_uuids = [] self.sslcontext = ssl.SSLContext() self.sslcontext.load_cert_chain(self.config['cert_path'], self.config['key_path']) self._twitch_init_() def _twitch_init_(self): self.bot.logger.info("Registering with Twitch...") self.twitch = Twitch(self.config['id'], self.config['secret']) self.twitch.authenticate_app([]) self.bot.logger.info( f"Registering webhook endpoint {self.config['myurl']} ...") self.hook = TwitchWebHook(self.config['myurl'], self.config['id'], self.config['port'], ssl_context=self.sslcontext) self.hook.authenticate(self.twitch) self.bot.logger.info("Clearing all hook subscriptions...") self.hook.unsubscribe_all(self.twitch) # Clear all subs on startup self.hook.start() self._register_all() def _login_to_id(self, name: str) -> Optional[str]: """Returns the twitch ID for a given login name, or None if the name couldn't be resolved.""" try: res: dict = self.twitch.get_users(logins=[name]) except TwitchBackendException as e: self.bot.logger.error(f"Backend error fetching user! {e}") return None if len(res) == 0: return None else: return res['data'][0]['id'] def _register_all(self): """Attempts to register stream_changed callbacks for all configured users.""" self.bot.logger.info("Registering callbacks for all watched users..") users = self.backend.filter(TwitchWatchedUser, {'twitch_name': { "$exists": True }}) if not users: self.bot.logger.info("No users to watch. No callbacks registered.") else: for u in users: self.bot.logger.info(f"Registering: {u['twitch_name']}") success, uuid = self.hook.subscribe_stream_changed( u['twitch_id'], self._cb_stream_changed) if success and uuid: self.uuids.append(uuid) self.bot.logger.info( f"{success}: registered subscription UUID: {uuid}") else: self.bot.logger.error( f"{success}: failed registering subscription: {uuid}") def _cb_stream_changed(self, uuid, data): """Callback for Twitch webhooks, fires on stream change event""" self.bot.logger.debug(f"Callback data for {uuid}: {data}") if data["type"] == "offline": if uuid in self.online_uuids: self.online_uuids.remove( uuid ) # Stupid twitch sending the same damn webhook multiple times... return else: self.bot.logger.debug( f"Ignoring duplicate offline callback for {uuid}") return elif data["type"] == "live": if uuid in self.online_uuids: self.bot.logger.debug( f"Ignoring duplicate live callback for {uuid}") return else: self.online_uuids.append(uuid) else: self.bot.logger.error( f"Got a callback type we can't handle: {data['type']}") return if uuid not in self.uuids: self.bot.logger.error( f"Got a callback for a UUID we're not tracking: {uuid}, my UUIDs: {self.uuids}" ) return try: item = self.backend.get(TwitchWatchedUser, {"twitch_id": data["user_id"]}) except TwitchWatchedUser.DoesNotExist: self.bot.logger.error( f"Got a callback for a USER we're not tracking: {data['user_id']} -> {data['user_name']}" ) return channel: discord.TextChannel = self.bot.get_channel( item['notify_channel']) width = 640 height = 360 url = data['thumbnail_url'].format(width=width, height=height) tu = self.twitch.get_users(data['user_id'])['data'][0] self.bot.logger.debug(tu) embed = discord.Embed( title=f"Now streaming {data['game_name']}", description=data['title'], color=discord.Color.green(), ) embed.set_image(url=url) embed.set_thumbnail(url=tu["profile_image_url"]) embed.set_author(name=item["twitch_name"], url=f"https://twitch.tv/{data['user_name']}") embed.add_field(name="Watch live at", value=f"https://twitch.tv/{data['user_name']}") self.bot.loop.create_task( channel. send( # This isn't an async function, so enqueue it manually embed=embed)) self.bot.logger.info( f"Successfully sent online notification for {data['user_id']}") @cog_ext.cog_subcommand( base="Twitchwatch", name="add_notification", description="Add a go live notification for Twitch", options=[twitch_name, notify_channel, notify_text], guild_ids=util.guilds) async def add_notification(self, ctx: SlashContext, twitch_name: str, notify_channel: discord.TextChannel, notify_text: str): twitch_id = self._login_to_id(twitch_name) try: self.backend.get(TwitchWatchedUser, {'twitch_name': twitch_name}) except TwitchWatchedUser.DoesNotExist: pass except TwitchWatchedUser.MultipleDocumentsReturned: self.bot.logger.error( "Multiple users returned - database inconsistent???") return if not twitch_id: await ctx.send(embed=mkembed( 'error', f"Unable to get the Twitch ID for the name {twitch_name}")) return await ctx.defer() # This bit can take a minute. success, uuid = self.hook.subscribe_stream_changed( twitch_id, self._cb_stream_changed) if success and uuid: self.uuids.append(uuid) self.bot.logger.info( f"{success}: registered subscription UUID: {uuid}") else: self.bot.logger.error( f"{success}: failed registering subscription: {uuid}") await ctx.send("Bluh, couldn't register the webhook with twitch :(" ) return item = TwitchWatchedUser({ 'twitch_name': twitch_name, 'twitch_id': twitch_id, 'discord_name': ctx.author.id, 'notify_channel': notify_channel.id, 'notify_text': notify_text, 'uuid': str(uuid) }) self.bot.logger.debug(f"DB object dump: {item.__dict__}") self.backend.save(item) await ctx.send(embed=mkembed("done", f"Notification added for {twitch_name}", channel=notify_channel.name)) @cog_ext.cog_subcommand( base="Twitchwatch", name="del_notification", description="Remove a go live notification for Twitch", options=[twitch_name], guild_ids=util.guilds) async def del_notification(self, ctx: SlashContext, twitch_name: str): try: item = self.backend.get(TwitchWatchedUser, {'twitch_name': twitch_name}) except TwitchWatchedUser.DoesNotExist: await ctx.send(embed=mkembed( "error", f"No notification exists for {twitch_name}")) return self.hook.unsubscribe(item['uuid']) self.bot.logger.info(f"Removing watch {item['uuid']}: {twitch_name}") self.backend.delete(item) if item['uuid'] in self.uuids: self.uuids.remove(item['uuid']) await ctx.send( embed=mkembed("done", f"Notification for {twitch_name} removed."))
class BlitzDB(Database): key = 'pk' _database_type = 'blitzdb' class Data(Document): pass class Cutout(Document): pass class Fingerprint(Document): pass class Similarity(Document): pass def _get_table(self, table_name): if table_name == 'data': return BlitzDB.Data elif table_name == 'cutout': return BlitzDB.Cutout elif table_name == 'fingerprint': return BlitzDB.Fingerprint elif table_name == 'similarity': return BlitzDB.Similarity else: log.error('BAD TABLE NAME {}'.format(table_name)) def __init__(self, filename): self._filename = filename self._backend = FileBackend(self._filename) def save(self, table, data): # Convert to dict if not one already if not isinstance(data, dict): data = data.save() blitz_table = self._get_table(table) data.update({'pk': data['uuid']}) save_id = self._backend.save(blitz_table(data)) self._backend.commit() return save_id['pk'] def find(self, table, key=None): blitz_table = self._get_table(table) factory = get_factory(table) if key is None: return [ factory(dict(x), db=self) for x in self._backend.filter(blitz_table, {}) ] elif isinstance(key, list): return [ factory(dict(x), db=self) for x in self._backend.filter( blitz_table, {'pk': { '$in': key }}) ] else: return factory(dict(self._backend.get(blitz_table, {'pk': key})), db=self) def count(self, table): blitz_table = self._get_table(table) return len(self._backend.filter(blitz_table, {})) def update(self, table, key, data): blitz_table = self._get_table(table) entry = self._backend.get(blitz_table, {'pk': key}) for k, v in data.items(): setattr(entry, k, v) entry.save() self._backend.commit() def close(self): pass def delete_database(self): shutil.rmtree(self._filename)
class Profiles(object): def __init__(self, bot): self.bot = bot self.cfg = PluginConfig(self) self.db = FileBackend(self.cfg.get('main_db')) mtt = MessageRetargeter(bot) self.msg = mtt.msg web = Flask(__name__, template_folder=tmpl_dir) mako = MakoTemplates() mako.init_app(web) # Add routes here web.add_url_rule('/edit_web/<args>', 'edit_web', self.edit_web, methods=['GET', 'POST']) _thread.start_new_thread(web.run, (), {'host': '0.0.0.0'}) @command def learn(self, mask, target, args): """ Stores information allowing for later retrieval. Names are downcased for sanity. Usage: %%learn <name> <information>... """ name = args['<name>'].lower() info = ' '.join(args['<information>']) try: profile = self.db.get(Profile, {'name': name}) except Profile.DoesNotExist: profile = Profile( { 'name': name, 'owner': mask.nick.lower(), 'lines': [info], 'random': False, 'public': False } ) profile.save(self.db) self.db.commit() self.msg(mask, target, 'Your data "%s" has been stored.' % name) return except Profile.MultipleDocumentsReturned: self.msg(mask, target, "Found more than one %s. This is bad! Please notify the bot owner." % name) return if is_allowed_to(Action.edit, mask.nick, profile): lines_to_append = profile.lines lines_to_append.append(info) profile.save(self.db) self.db.commit() self.msg(mask, target, 'Your data "%s" has been updated.' % name) return else: self.msg(mask, target, 'You are not authorized to edit "%s". Ask %s instead.' % (name, profile.owner)) return @command def query(self, mask, target, args): """ Retrieve the information associated with <name>. If the item is marked random, then one random item will be returned. Usage: %%query <name> ?? <name> """ name = args['<name>'].lower() try: profile = self.db.get(Profile, {'name': name}) except Profile.DoesNotExist: self.msg(mask, target, 'I cannot find "%s" in the records.' % name) return if profile.random: self.msg(mask, target, get_flags(profile) + random.choice(profile.lines)) else: for line in profile.lines: self.msg(mask, target, get_flags(profile) + line) if len(profile.lines) >= int(self.cfg.get('throttle_max')): sleep(int(self.cfg.get('throttle_time'))) @command def forget(self, mask, target, args): """ Delete <name> from the records. Only the person who created the item can remove it. Usage: %%forget <name> """ name = args['<name>'].lower() try: profile = self.db.get(Profile, {'name': name}) except Profile.DoesNotExist: self.msg(mask, target, 'I cannot find "%s" in the records.' % name) return if is_allowed_to(Action.delete, mask.nick, profile): self.db.delete(profile) self.db.commit() self.msg(mask, target, "%s has been deleted." % name) else: self.msg(mask, target, 'You are not authorized to delete "%s". Ask %s instead.' % (name, profile.owner)) @command(permission='admin', show_in_help_list=False) def rmf(self, mask, target, args): """ Delete <name> from the records without checking permissions. Usage: %%rmf <name> """ name = args['<name>'].lower() try: profile = self.db.get(Profile, {'name': name}) except Profile.DoesNotExist: self.msg(mask, target, 'I cannot find "%s" in the records.' % name) return self.db.delete(profile) self.db.commit() self.msg(mask, target, "%s has been deleted." % name) @command(permission='admin', show_in_help_list=False) def chown(self, mask, target, args): """ Change the owner of <name> to <newowner>. Usage: %%chown <name> <newowner> """ name = args['<name>'].lower() newowner = args['<newowner>'].lower() try: profile = self.db.get(Profile, {'name': name}) except Profile.DoesNotExist: self.msg(mask, target, 'I cannot find "%s" in the records.' % name) return profile.owner = newowner self.db.save(profile) self.db.commit() self.msg(mask, target, "%s is now owned by %s." % (name, newowner)) @command def toggle_public(self, mask, target, args): """ Changes whether <name> is publicly editable or not Usage: %%toggle_public <name> """ profile = args['<name>'].lower() try: profile = self.db.get(Profile, {'name': profile}) except Profile.DoesNotExist: self.msg(mask, target, 'I cannot find "%s" in the records.' % profile) return if is_allowed_to(Action.edit, mask.nick, profile): if profile.public: profile.public = False self.msg(mask, target, '"%s" is no longer publicly editable.' % profile) else: profile.public = True self.msg(mask, target, '"%s" is now publicly editable.' % profile) self.db.save(profile) self.db.commit() return else: self.msg(mask, target, 'You are not authorized to edit "%s". Ask %s instead.' % (profile, profile.owner)) return @command def toggle_random(self, mask, target, args): """ Toggle the randomness of an item, so that it shows a single random line instead of all lines when queried. Usage: %%toggle_random <name> """ name = args['<name>'].lower() try: profile = self.db.get(Profile, {'name': name}) except Profile.DoesNotExist: self.msg(mask, target, 'I cannot find "%s" in the records.' % name) return if is_allowed_to(Action.edit, mask.nick, profile): profile.random = not profile.random self.msg(mask, target, 'Random mode for %s is set to: %s' % (profile.name, profile.random)) profile.save(self.db) self.db.commit() else: self.msg(mask, target, 'You are not authorized to edit "%s". Ask %s instead.' % (name, profile.owner)) irc3.base.logging.log(irc3.base.logging.WARN, "%s tried to edit %s, but can't since it's owned by %s" % (mask.nick, profile.name, profile.owner) ) @event("(@(?P<tags>\S+) )?:(?P<mask>\S+) PRIVMSG (?P<target>\S+) :\?\? (?P<data>.*)") def easy_query(self, mask, target, data): self.bot.get_plugin(Commands).on_command(cmd='query', mask=mask, target=target, data=data) #### # All web stuff below this point # @command def edit(self, mask, target, args): """ Sends you a webpage link to edit <name>. Great for longer profiles. Make sure to keep the URL you are given secure, as with it, anyone can edit your profiles. Usage: %%edit <name> """ # TODO: Clear any existing sessions the user has data = { 'id': str(uuid.uuid4()), 'name': mask.nick, 'profile': args['<name>'] } name = args['<name>'].lower() try: profile = self.db.get(Profile, {'name': name}) except Profile.DoesNotExist: self.msg(mask, target, 'I cannot find "%s" in the records.' % name) return if is_allowed_to(Action.fulledit, mask.nick, profile): newses = Session(data) self.db.save(newses) self.db.commit() self.bot.privmsg(mask.nick, "An editor has been set up for you at http://skaianet.tkware.us:5000/edit_web/%s" % str( data['id'])) self.bot.privmsg(mask.nick, "Be very careful not to expose this address - with it, anyone can edit your stuff") else: self.msg(mask, target, 'You are not authorized to webedit "%s". Ask %s instead.' % (name, profile.owner)) def edit_web(self, args): # Web endpoint: /edit_web/<args> if request.method == 'GET': # Does the session exist? try: edit_session = self.db.get(Session, {'id': args}) except Session.DoesNotExist: return render_template('youfail.html', bot=self.bot, failreason='Invalid Session', userfail=True) # Does the profile exist? name = edit_session.profile try: profile = self.db.get(Profile, {'name': name.lower()}) except Profile.DoesNotExist: return render_template('youfail.html', bot=self.bot, failreason='I cannot find "%s" in the records.' % name ) # Kick off to the edit page! return render_template('edit.html', bot=self.bot, profile=profile, username=edit_session.name, sessionid=edit_session.id ) elif request.method == 'POST': # We have to look up the session ID one more time. Something could have happened to the profile # since we created the session. try: edit_session = self.db.get(Session, {'id': request.form['ID']}) except Session.DoesNotExist: return render_template('youfail.html', bot=self.bot, failreason='Invalid Session', userfail=True) name = request.form['profile'] try: profile = self.db.get(Profile, {'name': request.form['profile']}) except Profile.DoesNotExist: return render_template('youfail.html', bot=self.bot, failreason='I cannot find "%s" in the records.' % name, userfail=True ) # Now with the profile in hand, blank the lines field and rebuild it from the form. # Here we grab all numeric items from the submission, sort it, and one by one refill the DB object. lines = [item for item in request.form if item.isdigit()] lines.sort() profile.lines = [] for item in lines: profile.lines.append(request.form[item]) self.db.save(profile) self.db.delete(edit_session) self.db.commit() return render_template('done.html', bot=self.bot, profile=profile.name )
class Responder(commands.Cog): def __init__(self, bot): self.bot = bot self.backend = FileBackend('db') self.backend.autocommit = True bot.logger.info("Responder plugin ready") def _find_one(self, name: str) -> Union[ResponseCommand, None]: """Searches for a response in the DB, returning it if found, or None if it doesn't exist or there are multiples. This exists to tie up the Blitzdb boilerplate in one place.""" try: comm = self.backend.get(ResponseCommand, {'command': name}) except ResponseCommand.DoesNotExist: return None except ResponseCommand.MultipleDocumentsReturned: self.bot.logger.error(f"_find_one discarding multiple results returned for '{name}'") return None else: return comm def _reply_allowed(self, comm: ResponseCommand, message: discord.Message) -> bool: """Determine whether a message can be replied to based on its attributes In general, if a user or channel restriction is set on a command, it can only be used when called in the listed channel or by the listed user. """ self.bot.logger.debug(f"Restriction dump: {comm.get('restrictions')}") if not comm.get("restrictions"): # No restrictions on this command, we can respond without doing anything else. return True else: if comm["restrictions"].get("channels"): channels = comm["restrictions"]["channels"] if message.channel.id in channels: return True else: return False elif comm["restrictions"].get("users"): users = comm["restrictions"]["users"] if message.author.id in users: return True else: return False else: return True @cog_ext.cog_subcommand(base="Autoresponder", name="addresponse", description="Adds an automatic response to certain text", options=[respond_to, response], guild_ids=util.guilds) async def addresponse(self, ctx: SlashContext, respond_to: str, response: str): """Adds an automatic response to (name) as (response) The first word (name) is the text that will be replied to. Everything else is what it will be replied to with. If you want to reply to an entire phrase, enclose name in quotes.""" if self._find_one(respond_to): await ctx.send(embed=mkembed('error', f"'{respond_to}' already exists.")) return else: comm = ResponseCommand( {'command': respond_to, 'reply': response, 'creator_str': str(ctx.author), 'creator_id': ctx.author.id} ) self.backend.save(comm) self.bot.logger.info(f"'{response}' was added by {ctx.author.display_name}") await ctx.send(embed=mkembed('done', "Autoresponse saved.", reply_to=respond_to, reply_with=response)) @cog_ext.cog_subcommand(base="Autoresponder", name="delresponse", description="Removes an automatic reponse from certain text", options=[respond_to], guild_ids=util.guilds) async def delresponse(self, ctx: SlashContext, respond_to: str): """Removes an autoresponse. Only the initial creator of a response can remove it.""" comm = self._find_one(respond_to) if not comm: await ctx.send(embed=mkembed('error', f"{respond_to} is not defined.")) return elif not ctx.author.id == comm['creator_id']: await ctx.send( embed=mkembed('error', f"You are not the creator of {respond_to}. Ask {comm['creator_str']}")) else: self.backend.delete(comm) self.bot.logger.info(f"'{respond_to}' was deleted by {ctx.author.display_name}") await ctx.send(embed=mkembed('info', f"{respond_to} has been removed.")) # @commands.command() # @cog_ext.cog_subcommand(base="Autoresponder", name="limit_user", # description="Limit a response to triggering on a certain user. Leave users blank to remove.", # options=[respond_to, restrict_user], # guild_ids=util.guilds) async def limitchannel(self, ctx: SlashContext, respond_to: str, **kwargs): comm = self._find_one(respond_to) if not comm: await ctx.send(embed=mkembed('error', f"'{respond_to}' does not exist.")) return if not ctx.author.id == comm['creator_id']: await ctx.send( embed=mkembed('error', f"You are not the creator of '{respond_to}'. Ask {comm['creator_str']}")) return if len(kwargs) == 0: comm["restrictions"] = {} self.backend.save(comm) await ctx.send(embed=mkembed('done', f"All restrictions removed from {respond_to}")) return if kwargs['restrict_user']: if not comm.get("restrictions"): comm["restrictions"] = {} elif not comm["restrictions"].get("users"): comm["restrictions"]["users"] = [] comm["restrictions"]["users"] = list(set( comm["restrictions"]["users"] + [u.id for u in restrict_user] )) self.backend.save(comm) display_users = [self.bot.get_user(u).display_name for u in comm["restrictions"]["users"]] await ctx.send( embed=mkembed('done', 'User restriction updated:', command=comm['command'], users=display_users) ) if kwargs['restrict_channel']: if not comm.get("restrictions"): comm["restrictions"] = {} if not comm["restrictions"].get("channels"): comm["restrictions"]["channels"] = [] comm["restrictions"]["channels"] = list(set( comm["restrictions"]["channels"] + [c.id for c in ctx.message.channel_mentions] )) display_channels = [self.bot.get_channel(c).name for c in comm["restrictions"]["channels"]] self.backend.save(comm) await ctx.send( embed=mkembed('done', 'Channel restriction updated:', Command=comm['command'], Channels=display_channels ) ) @commands.command() async def responserestrictions(self, ctx: context, name: str): """Show the restriction list for a given command""" comm = self._find_one(name) if not comm: await ctx.send(embed=mkembed('error', f"{name} does not exist.")) return await ctx.send( embed=mkembed('info', f"Information for `{name}`", Reply=comm['reply'], Restrictions=comm.get('restrictions', 'None'), Creator=comm['creator_str'] ) ) @commands.Cog.listener() async def on_message(self, message: discord.message): comm = self._find_one(message.content) if comm and self._reply_allowed(comm, message): await message.channel.send(comm['reply'])
def create_app(configfile=None): app = Flask(__name__) AppConfig(app, configfile) # Flask-Appconfig is not necessary, but # highly recommend =) # https://github.com/mbr/flask-appconfig Bootstrap(app) login_manager = LoginManager() login_manager.init_app(app) login_manager.login_view = 'login' #NoSQL Backend backend = FileBackend("/tmp/wakeonlan.db") backend.create_index(Device, fields={'id': 1}, unique=True) #TEST Devices alldevices = backend.filter(Device, {}) if len(alldevices) == 0: try: pc1 = Device({ "id": "001122334411", "name": "PC 1", "mac": "00:11:22:33:44:11", "ip": "192.168.222.111", 'status': '' }) backend.save(pc1) pc2 = Device({ "id": "001122334422", "name": "PC 2", "mac": "00:11:22:33:44:22", "ip": "192.168.222.112", 'status': '' }) backend.save(pc2) pc3 = Device({ "id": "001122334433", "name": "Router", "mac": "00:11:22:33:44:33", "ip": "192.168.222.1", 'status': '' }) backend.save(pc3) backend.commit() except: backend.revert() pass # in a real app, these should be configured through Flask-Appconfig app.config['SECRET_KEY'] = 'devkey' # app.config['RECAPTCHA_PUBLIC_KEY'] = \ # '6Lfol9cSAAAAADAkodaYl9wvQCwBMr3qGR_PPHcw' def getDeviceById(id): device = None try: device = backend.get(Device, {'id': id}) except: pass return device def pingDeviceById(id): #Get Device device = backend.get(Device, {'id': id}) if device: #Get Device's IP ip = device['ip'] result = pingDeviceByIp(ip) #Update Status UP/Down/'' if result == 0: device['status'] = 'UP' else: device['status'] = 'DOWN' backend.save(device) return result return None def wolDeviceById(id): #Get Device device = backend.get(Device, {'id': id}) if device: #WoL for Device MAC mac = device['mac'] wolDeviceByMac(mac) return None @login_manager.user_loader def user_loader(user_id): """Given *user_id*, return the associated User object. :param unicode user_id: user_id (email) user to retrieve """ user_entry = User.getById(user_id) if user_entry is not None: user = User(user_entry[0], user_entry[1]) return user else: return None @app.route('/', methods=('GET', 'POST')) @login_required def index(): form = ExampleForm() form.validate_on_submit() # to get error messages to the browser # flash('critical message', 'critical') # flash('error message', 'error') # flash('warning message', 'warning') # flash('info message', 'info') # flash('debug message', 'debug') # flash('different message', 'different') # flash('uncategorized message') alldevices = None alldevices = backend.filter(Device, {}).sort('name') #app.logger.info('Devices: %s' % (len(alldevices) ) ) return render_template('index.html', form=form, devices=alldevices) @app.route('/login', methods=('GET', 'POST')) def login(): if request.method == 'GET': form = LoginForm() form.validate_on_submit() # to get error messages to the browser return render_template('login.html', form=form) username = request.form['username'] password = request.form['password'] user_entry = User.get(username, password) if user_entry is None: flash('Username or Passord is invalid', 'error') return redirect(url_for('login')) user = User(user_entry[0], user_entry[1]) login_user(user, remember=True) return redirect(request.args.get('next') or url_for('index')) @app.route("/logout", methods=["GET"]) @login_required def logout(): """Logout the current user.""" user = current_user user.authenticated = False logout_user() return redirect(url_for('login')) @app.route('/addDevice', methods=('GET', 'POST')) @login_required def addDevice(): if request.method == 'GET': form = AddDeviceForm() form.validate_on_submit() # to get error messages to the browser return render_template('add_device.html', form=form) name = request.form['name'] mac = request.form['mac'] ip = request.form['ip'] id = mac.replace(':', '') try: newDevice = Device({ "id": id, "name": name, "mac": mac, "ip": ip, 'status': '' }) backend.save(newDevice) backend.commit() except: flash('Error creating new Device', 'error') pass return redirect(url_for('index')) @app.route('/editListDevice', methods=('GET', 'POST')) @login_required def editListDevice(): alldevices = None alldevices = backend.filter(Device, {}).sort('name') return render_template('list_device.html', devices=alldevices) @app.route('/pingDevice/<deviceId>', methods=('GET', 'POST')) @login_required def pingDevice(deviceId): app.logger.info('pingDevice: %s' % (deviceId)) device = getDeviceById(deviceId) result = pingDeviceById(deviceId) app.logger.info('pingDevice: %s' % (result)) if result is None: flash('Ping - Error on device %s' % (device['name']), 'error') elif result == 0: flash('Device %s is UP' % (device['name']), 'info') else: flash('Device %s is DOWN' % (device['name']), 'error') return redirect(url_for('index')) @app.route('/wolDevice/<deviceId>', methods=('GET', 'POST')) @login_required def wolDevice(deviceId): app.logger.info('wolDevice: %s' % (deviceId)) device = getDeviceById(deviceId) result = wolDeviceById(deviceId) if device: flash('WoL sent to %s' % (device['name']), 'info') else: flash('WoL error', 'error') return redirect(url_for('index')) @app.route('/deleteDevice/<deviceId>', methods=('GET', 'POST')) @login_required def deleteDevice(deviceId): app.logger.info('wolDevice: %s' % (deviceId)) device = getDeviceById(deviceId) try: backend.delete(device) backend.commit() flash('%s Deleted' % (device['name']), 'info') except: flash('Delete error', 'error') pass return redirect(url_for('editListDevice')) return app
def create_app(configfile=None): app = Flask(__name__) AppConfig(app, configfile) # Flask-Appconfig is not necessary, but # highly recommend =) # https://github.com/mbr/flask-appconfig Bootstrap(app) login_manager = LoginManager() login_manager.init_app(app) login_manager.login_view='login' #NoSQL Backend backend = FileBackend("/tmp/wakeonlan.db") backend.create_index(Device, fields={'id':1}, unique=True) #TEST Devices alldevices = backend.filter(Device, {}) if len(alldevices) == 0 : try: pc1 = Device({"id" : "001122334411", "name" : "PC 1", "mac" : "00:11:22:33:44:11", "ip":"192.168.222.111", 'status' : ''}) backend.save(pc1) pc2 = Device({"id" : "001122334422","name" : "PC 2", "mac" : "00:11:22:33:44:22", "ip":"192.168.222.112", 'status' : ''}) backend.save(pc2) pc3 = Device({"id" : "001122334433","name" : "Router", "mac" : "00:11:22:33:44:33", "ip":"192.168.222.1", 'status' : ''}) backend.save(pc3) backend.commit() except: backend.revert() pass # in a real app, these should be configured through Flask-Appconfig app.config['SECRET_KEY'] = 'devkey' # app.config['RECAPTCHA_PUBLIC_KEY'] = \ # '6Lfol9cSAAAAADAkodaYl9wvQCwBMr3qGR_PPHcw' def getDeviceById(id): device = None try: device = backend.get(Device, {'id':id}) except: pass return device def pingDeviceById(id): #Get Device device = backend.get(Device, {'id':id}) if device: #Get Device's IP ip = device['ip'] result = pingDeviceByIp(ip) #Update Status UP/Down/'' if result==0: device['status'] = 'UP' else: device['status'] = 'DOWN' backend.save(device) return result return None def wolDeviceById(id): #Get Device device = backend.get(Device, {'id':id}) if device: #WoL for Device MAC mac = device['mac'] wolDeviceByMac(mac) return None @login_manager.user_loader def user_loader(user_id): """Given *user_id*, return the associated User object. :param unicode user_id: user_id (email) user to retrieve """ user_entry = User.getById(user_id) if user_entry is not None: user = User(user_entry[0], user_entry[1]) return user else: return None @app.route('/', methods=('GET', 'POST')) @login_required def index(): form = ExampleForm() form.validate_on_submit() # to get error messages to the browser # flash('critical message', 'critical') # flash('error message', 'error') # flash('warning message', 'warning') # flash('info message', 'info') # flash('debug message', 'debug') # flash('different message', 'different') # flash('uncategorized message') alldevices = None alldevices = backend.filter(Device, {}).sort('name') #app.logger.info('Devices: %s' % (len(alldevices) ) ) return render_template('index.html', form=form, devices = alldevices) @app.route('/login', methods=('GET', 'POST')) def login(): if request.method == 'GET': form = LoginForm() form.validate_on_submit() # to get error messages to the browser return render_template('login.html', form=form) username = request.form['username'] password = request.form['password'] user_entry = User.get(username, password) if user_entry is None: flash('Username or Passord is invalid', 'error') return redirect(url_for('login')) user = User(user_entry[0], user_entry[1]) login_user(user, remember=True) return redirect(request.args.get('next') or url_for('index')) @app.route("/logout", methods=["GET"]) @login_required def logout(): """Logout the current user.""" user = current_user user.authenticated = False logout_user() return redirect(url_for('login')) @app.route('/addDevice', methods=('GET', 'POST')) @login_required def addDevice(): if request.method == 'GET': form = AddDeviceForm() form.validate_on_submit() # to get error messages to the browser return render_template('add_device.html', form=form) name = request.form['name'] mac = request.form['mac'] ip = request.form['ip'] id = mac.replace(':','') try: newDevice = Device({"id" : id, "name" : name, "mac" : mac, "ip":ip, 'status' : ''}) backend.save(newDevice) backend.commit() except: flash('Error creating new Device', 'error') pass return redirect(url_for('index')) @app.route('/editListDevice', methods=('GET', 'POST')) @login_required def editListDevice(): alldevices = None alldevices = backend.filter(Device, {}).sort('name') return render_template('list_device.html', devices = alldevices) @app.route('/pingDevice/<deviceId>', methods=('GET', 'POST')) @login_required def pingDevice(deviceId): app.logger.info('pingDevice: %s' % (deviceId ) ) device = getDeviceById(deviceId) result = pingDeviceById(deviceId) app.logger.info('pingDevice: %s' % (result ) ) if result is None: flash('Ping - Error on device %s' % (device['name']), 'error') elif result == 0: flash('Device %s is UP' % (device['name']), 'info') else: flash('Device %s is DOWN' % (device['name']), 'error') return redirect(url_for('index')) @app.route('/wolDevice/<deviceId>', methods=('GET', 'POST')) @login_required def wolDevice(deviceId): app.logger.info('wolDevice: %s' % (deviceId ) ) device = getDeviceById(deviceId) result = wolDeviceById(deviceId) if device: flash('WoL sent to %s' % (device['name']), 'info') else: flash('WoL error', 'error') return redirect(url_for('index')) @app.route('/deleteDevice/<deviceId>', methods=('GET', 'POST')) @login_required def deleteDevice(deviceId): app.logger.info('wolDevice: %s' % (deviceId ) ) device = getDeviceById(deviceId) try: backend.delete(device) backend.commit() flash('%s Deleted' % (device['name']), 'info') except: flash('Delete error', 'error') pass return redirect(url_for('editListDevice')) return app
class Responder(commands.Cog): def __init__(self, bot): self.bot = bot self.backend = FileBackend('./responder-db') self.backend.autocommit = True bot.logger.info("Responder plugin ready") def _find_one(self, name: str) -> Union[ResponseCommand, None]: """Searches for a response in the DB, returning it if found, or None if it doesn't exist or there are multiples. This exists to tie up the Blitzdb boilerplate in one place.""" try: comm = self.backend.get(ResponseCommand, {'command': name}) except ResponseCommand.DoesNotExist: return None except ResponseCommand.MultipleDocumentsReturned: self.bot.logger.error( f"_find_one discarding multiple results returned for '{name}'") return None else: return comm def _reply_allowed(self, comm: ResponseCommand, message: discord.Message) -> bool: """Determine whether a message can be replied to based on its attributes In general, if a user or channel restriction is set on a command, it can only be used when called in the listed channel or by the listed user. """ self.bot.logger.debug(f"Restriction dump: {comm.get('restrictions')}") if not comm.get("restrictions"): # No restrictions on this command, we can respond without doing anything else. return True else: if comm["restrictions"].get("channels"): channels = comm["restrictions"]["channels"] if message.channel.id in channels: return True else: return False elif comm["restrictions"].get("users"): users = comm["restrictions"]["users"] if message.author.id in users: return True else: return False else: return True @commands.command() async def addresponse(self, ctx: context, name: str, *responsen): """Adds an automatic response to (name) as (response) The first word (name) is the text that will be replied to. Everything else is what it will be replied to with. If you want to reply to an entire phrase, enclose name in quotes.""" arg2 = " ".join(responsen) if self._find_one(name): await ctx.send(embed=mkembed('error', f"'{name}' already exists.")) return else: comm = ResponseCommand({ 'command': name, 'reply': arg2, 'creator_str': str(ctx.author), 'creator_id': ctx.author.id }) self.backend.save(comm) self.bot.logger.info( f"'{name}' was added by {ctx.author.display_name}") await ctx.send(embed=mkembed('done', f"Saved: {name} => {arg2}")) @commands.command() async def delresponse(self, ctx: context, name: str): """Removes an autoresponse. Only the initial creator of a response can remove it.""" comm = self._find_one(name) if not comm: ctx.send(embed=mkembed('error', f"{name} is not defined.")) return elif not ctx.author.id == comm['creator_id']: await ctx.send(embed=mkembed( 'error', f"You are not the creator of {name}. Ask {comm['creator_str']}" )) else: self.backend.delete(comm) self.bot.logger.info( f"'{name}' was deleted by {ctx.author.display_name}") await ctx.send(embed=mkembed('error', f"{name} has been removed.")) @commands.command() async def responselimit(self, ctx: context, name: str, *tags: str): """Adds a restriction mode on a response so it only triggers in certain circumstances. Tags is a space-separated list of users and channels that this responder will apply to. These must be discord tags or pings, i.e. discord must show them as links, not just text. If the word NONE is used, all restrictions are removed. """ comm = self._find_one(name) if not comm: await ctx.send(embed=mkembed('error', f"{name} does not exist.")) return if not ctx.author.id == comm['creator_id']: await ctx.send(embed=mkembed( 'error', f"You are not the creator of {name}. Ask {comm['creator_str']}" )) return if tags[0] == "NONE": comm["restrictions"] = {} self.backend.save(comm) await ctx.send( embed=mkembed('done', f"All restrictions removed from {name}")) return if ctx.message.mentions: if not comm.get("restrictions"): comm["restrictions"] = {} elif not comm["restrictions"].get("users"): comm["restrictions"]["users"] = [] comm["restrictions"]["users"] = list( set(comm["restrictions"]["users"] + [u.id for u in ctx.message.mentions])) self.backend.save(comm) display_users = [ self.bot.get_user(u).display_name for u in comm["restrictions"]["users"] ] await ctx.send(embed=mkembed('done', 'User restriction updated:', command=comm['command'], users=display_users)) if ctx.message.channel_mentions: if not comm.get("restrictions"): comm["restrictions"] = {} if not comm["restrictions"].get("channels"): comm["restrictions"]["channels"] = [] comm["restrictions"]["channels"] = list( set(comm["restrictions"]["channels"] + [c.id for c in ctx.message.channel_mentions])) display_channels = [ self.bot.get_channel(c).name for c in comm["restrictions"]["channels"] ] self.backend.save(comm) await ctx.send(embed=mkembed('done', 'Channel restriction updated:', Command=comm['command'], Channels=display_channels)) @commands.command() async def responserestrictions(self, ctx: context, name: str): """Show the restriction list for a given command""" comm = self._find_one(name) if not comm: await ctx.send(embed=mkembed('error', f"{name} does not exist.")) return await ctx.send( embed=mkembed('info', f"Information for `{name}`", Reply=comm['reply'], Restrictions=comm.get('restrictions', 'None'), Creator=comm['creator_str'])) @commands.Cog.listener() async def on_message(self, message: discord.message): comm = self._find_one(message.content) if comm and self._reply_allowed(comm, message): await message.channel.send(comm['reply'])
class eventmanager: def __init__(self, trigger_id, jsonfilelist, triggerdir, datadir, real, trigger_path): #os.system('kinit -k -t /var/keytab/desgw.keytab desgw/des/[email protected]') tstart = time.time() if real: self.backend = FileBackend("./realdb") else: self.backend = FileBackend("./testdb") try: self.thisevent = self.backend.get(Trigger, {'id': trigger_id}) print 'Found this event in desgw database...' except Trigger.DoesNotExist: self.thisevent = Trigger({ 'id':trigger_id, 'jsonfilelist':jsonfilelist, 'triggerpath':triggerdir, 'mapspath':datadir, 'jobids':[ (0,'jsonfile_corresponding_to_jobid.json'), ], 'postprocint': 0 }) print 'Database entry created!' self.trigger_id = trigger_id self.trigger_path = trigger_path self.backend.save(self.thisevent) self.backend.commit() with open(os.path.join(triggerdir,"strategy.yaml"), "r") as f: self.config = yaml.safe_load(f); self.filterobslist = np.array(self.config['exposure_filter'],dtype='str') self.strategydict = {} for f in np.unique(self.filterobslist): self.strategydict[f] = len(self.filterobslist[self.filterobslist == f]) self.connection = ea.connect(DATABASE) self.cursor = self.connection.cursor() self.jsonfilelist = jsonfilelist print self.jsonfilelist if hardjson: self.jsonfilelist = hj #self.pp = subprocess.Popen('echo starting',stdout=PIPE, stderr=PIPE,shell=True) self.trigger_id = trigger_id self.datadir = datadir self.triggerdir = triggerdir self.processingdir = os.path.join(self.triggerdir,'PROCESSING') if not os.path.exists(self.processingdir): os.makedirs(self.processingdir) dire = './processing/' + trigger_id + '/' if not os.path.exists(dire): os.makedirs(dire) with open(os.path.join(triggerdir, "strategy.yaml"), "r") as f: self.strategy = yaml.safe_load(f) with open("jobmanager.yaml", "r") as g: self.jmconfig = yaml.safe_load(g); q1 = "select expnum,nite,mjd_obs,telra,teldec,band,exptime,propid,obstype,object from exposure where " \ "nite>20130828 and nite<20150101 and expnum<300000 and obstype='object' order by expnum" # y1 images self.connection.query_and_save(q1, './processing/exposuresY1.tab') q2 = "select expnum,nite,mjd_obs,radeg,decdeg,band,exptime,propid,obstype,object from prod.exposure where " \ "nite>20150901 and obstype='object' order by expnum" # y2 and later self.connection.query_and_save(q2, './processing/exposuresCurrent.tab') os.system('cat ./processing/exposuresY1.tab ./processing/exposuresCurrent.tab > ./processing/exposures.list') #self.submit_post_processing() self.submit_all_jsons_for_sejobs()#preps all DES images that already exist tfin = time.time() print 'TOTAL SE JOBS TIME', tfin - tstart #sys.exit() self.monitor_images_from_mountain()#A loop that waits for images off mountain and submits for processing def submit_all_jsons_for_sejobs(self): obsStartTime = self.getDatetimeOfFirstJson(self.jsonfilelist[0]) # THIS IS A DATETIME OBJ currentTime = dt.utcnow() print '***** The current time is UTC', currentTime, '*****' delt = obsStartTime - currentTime timedelta = td(days=delt.days, seconds=delt.seconds).total_seconds() / 3600. print '***** The time delta is ', timedelta, 'hours *****' # if timedelta > np.pi: sejob_timecushion = self.jmconfig["sejob_timecushion"] if timedelta > sejob_timecushion: for jsonfile in self.jsonfilelist: print 'json',jsonfile try: #check if this json file is already in the submitted preprocessing database thisjson = self.backend.get(preprocessing, {'jsonfilename': os.path.join(self.datadir, jsonfile)}) print 'Found this json in desgw database...' except preprocessing.DoesNotExist: #do submission and then add to database print 'cd diffimg-proc; ./SEMaker_RADEC.sh '+os.path.join(self.datadir, jsonfile) os.chdir("diffimg-proc") out = os.popen('./SEMaker_RADEC.sh '+os.path.join(self.datadir, jsonfile)).read() of = open(os.path.join(self.processingdir,jsonfile.split('/')[-1].split('.')[0]+'.SEMakerlog'),'w') of.write(out) of.close() #out = os.popen('ls').read() os.chdir("..") print out if 'non-zero exit status' in out: dt.sendEmailSubject(self.trigger_id,'Error in creating SEMaker dag for .json: '+out) else: for o in out.split('\n'): if 'file://' in o: dagfile = o.split('/')[-1] self.dagfile = os.path.join(self.processingdir,jsonfile.split('/')[-1].split('.')[0]+'_'+dagfile) os.system('cp diffimg-proc/'+dagfile+' '+self.dagfile) jobsubmitline = copy(o) print self.dagfile print 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; jobsub_submit_dag -G des --role=DESGW file://'+self.dagfile out = os.popen( 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; ' 'jobsub_submit_dag -G des --role=DESGW file://'+self.dagfile).read() print out of = open(os.path.join(self.processingdir, jsonfile.split('/')[-1].split('.')[0] + '.SEdagsubmitlog'), 'w') of.write(out) of.close() if 'non-zero exit status' in out: dt.sendEmailSubject(self.trigger_id, 'Error in submitting .json for preprocessing: ' + out) else: for o in out.split('\n'): if 'Use job id' in o: jobid = o.split()[3] if doimmediateremove: out = os.popen( 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; ' 'jobsub_rm --jobid=' + jobid + ' --group=des --role=DESGW').read() print out thisjson = preprocessing({ 'jsonfilename': os.path.join(self.datadir, jsonfile), 'jobid': jobid, 'dagfile': self.dagfile, 'status' : 'Submitted' }) self.backend.save(thisjson) self.backend.commit() print 'saved' #sys.exit() #raw_input() #runProcessingIfNotAlready(image, self.backend) #sys.exit() print 'Finished submitting minidagmaker with all json files' #sys.exit() #raw_input() # Loop queries for images from mountain and submits them # Need to add complexity that monitors filter strategy and waits for entire groups of images to be co-added def monitor_images_from_mountain(self): #NEED TO ADD COADD LOGIC USING STRATEGY FROM CONFIG exposure_filter = np.array(self.strategy['exposure_filter'],dtype='str') uniquefilts = np.unique(self.strategy['exposure_filter']) filterstrategy = {} for f in uniquefilts: filterstrategy[f] = len(exposure_filter[exposure_filter == f]) print 'filter strategy dictionary ', filterstrategy starttime = time.time() pptime = time.time() keepgoing = True index = -1 submission_counter = 0 maxsub = 10000 postprocessingtime = 2000 #every half hour fire off Tim's code for post-processing while keepgoing: #os.system('kinit -k -t /var/keytab/desgw.keytab desgw/des/[email protected]') index += 1 newfireds = [] if time.time() - starttime > 50000: keepgoing = False continue ofile = open(os.path.join(self.triggerdir , 'latestquery.txt'), 'w') ofile.write( "--------------------------------------------------------------------------------------------------\n") ofile.write("EXPNUM\tNITE\tBAND\tEXPTIME\tRADEG\t DECDEG\tPROPID\tOBJECT\n") ofile.write( "--------------------------------------------------------------------------------------------------\n") print "--------------------------------------------------------------------------------------------------" print "EXPNUM\tNITE\tBAND\tEXPTIME\tRADEG\t DECDEG\tPROPID\tOBJECT" print "--------------------------------------------------------------------------------------------------" query = "SELECT expnum,nite,band,exptime,radeg,decdeg,propid,object FROM prod.exposure@desoper WHERE " \ "expnum > 475900 and propid=" + propid + "and obstype='object' ORDER BY expnum" # latest self.cursor.execute(query) for s in self.cursor: ofile.write( str(s[0]) + "\t" + str(s[1]) + "\t" + str(s[2]) + "\t" + str(s[3]) + "\t" + str(s[4]) + "\t" + str( s[5]) + "\t" + str(s[6]) + "\t" + str(s[7]) + '\n') print str(s[0]) + "\t" + str(s[1]) + "\t" + str(s[2]) + "\t" + str(s[3]) + "\t" + str( s[4]) + "\t" + str(s[5]) + "\t" + str(s[6]) + "\t" + str(s[7]) if not 'DESGW' in str(s[7]): continue #print 'exptime',float(s[3]) if not float(s[3]) > 29.: continue #exposure must be longer than 30 seconds expnum = str(s[0]) nite = str(s[1]) band = str(s[2]) exptime = str(s[3]) #FIRST CHECK HERE THAT THE EXPOSURE NUMBER ISNT ALREADY IN THE DATABASE try: exposure = self.backend.get(exposures, {'expnum': expnum}) print 'Found this exposure in desgw database...' # print exposure.attributes # if expnum == 506432: # sys.exit() # self.backend.delete(exposure) # self.backend.commit() # exposure = self.backend.get(exposures, {'expnum': expnum}) except exposures.DoesNotExist: # add to database #runProcessingIfNotAlready(image,self.backend) print './diffimg-proc/getTiling.sh '+expnum res = os.popen('./diffimg-proc/getTiling.sh '+expnum).readlines() print res #sys.exit() field,tiling =res[-2],res[-1] #print 'field_tiling',field_tiling hexnite = field.strip()+'_'+tiling.strip()+'_'+str(nite) #print hexnite #sys.exit() #print 'hexnite',hexnite print 'Creating exposure in database...',hexnite #raw_input() if '--' in hexnite: print 'found bad example' #raw_input() exposure = exposures({ 'expnum':expnum, 'nite':nite, 'field':field, 'tiling':tiling, 'hexnite':hexnite, 'band':band, 'jobid':np.nan, 'exptime':exptime, 'status':'Awaiting additional exposures', 'triggerid': self.trigger_id, 'object':str(s[7]) }) self.backend.save(exposure) self.backend.commit() hexnite = exposure.hexnite print 'hexnite',hexnite if '--' in hexnite: print exposure.attributes #raw_input() #raw_input() #sys.exit() try: hex = self.backend.get(hexes, {'hexnite': hexnite}) #self.backend.delete(hex) #self.backend.commit() #hex = self.backend.get(hexes, {'hexnite': hexnite}) #print 'Found this hex in desgw database...' except hexes.DoesNotExist: hex = hexes({ 'hexnite': hexnite, 'strategy': self.strategy['exposure_filter'], 'num_target_g': len(exposure_filter[exposure_filter == 'g']), 'num_target_r': len(exposure_filter[exposure_filter == 'r']), 'num_target_i': len(exposure_filter[exposure_filter == 'i']), 'num_target_z': len(exposure_filter[exposure_filter == 'z']), 'observed_g': [], 'observed_r': [], 'observed_i': [], 'observed_z': [], 'exposures': [], 'status': 'Awaiting additional exposures', 'dagfile' : None, }) self.backend.save(hex) self.backend.commit() print hex.attributes print 'created new hex' #raw_input() if hex.status == 'Submitted for processing': print 'This hex has already been submitted for processing' continue # if '--' in hexnite: # print hex.attributes # raw_input() # if hex.status == 'Submitted for processing': # print 'Hex ',hexnite,' band',band,'exposure',expnum,'has already been submitted for processing' # #raw_input() # continue if band == 'g': if not expnum in hex.observed_g: hex.observed_g.append(expnum) hex.exposures.append(expnum) if band == 'r': if not expnum in hex.observed_r: hex.observed_r.append(expnum) hex.exposures.append(expnum) if band == 'i': if not expnum in hex.observed_i: hex.observed_i.append(expnum) hex.exposures.append(expnum) if band == 'z': if not expnum in hex.observed_z: hex.observed_z.append(expnum) hex.exposures.append(expnum) self.backend.save(hex) self.backend.commit() print hex.attributes didwork = False if len(hex.observed_g) == hex.num_target_g: if len(hex.observed_r) == hex.num_target_r: if len(hex.observed_i) == hex.num_target_i: if len(hex.observed_z) == hex.num_target_z: print 'All exposures in strategy satisfied! ' #raw_input() submissionPassed = True for target, exps in zip([hex.num_target_g,hex.num_target_r,hex.num_target_i,hex.num_target_z], [hex.observed_g,hex.observed_r,hex.observed_i,hex.observed_z]): if target == 0: continue exposurestring = '' logstring = '' for ex in exps: exposurestring += ex+' ' logstring += ex+'_' print 'cd diffimg-proc; source DAGMaker.sh ' + exposurestring os.chdir("diffimg-proc") #out = os.popen('ls').read() out = os.popen('./DAGMaker.sh ' + exposurestring ).read() os.chdir("..") print out f = open(os.path.join(self.processingdir,logstring+hexnite+'.dagmakerlog'),'w') f.write(out) f.close() tt = time.time() if not 'To submit this DAG do' in out: dt.sendEmailSubject(self.trigger_id, 'Error in creating dag for desgw hex: ' + out) submissionPassed = False else: for o in out.split('\n'): if 'file://' in o: dagfile = o.split('/')[-1] self.dagfile = os.path.join(self.processingdir,logstring+'job.dag') os.system('cp diffimg-proc/' + dagfile + ' ' + self.dagfile) print self.dagfile print 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; jobsub_submit_dag -G des --role=DESGW file://' + self.dagfile out = os.popen( 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; ' 'jobsub_submit_dag -G des --role=DESGW file://' + self.dagfile).read() print out f = open(os.path.join(self.processingdir, logstring + hexnite + '.dagsumbitlog'), 'w') f.write(out) f.close() if 'non-zero exit status' in out: dt.sendEmailSubject(self.trigger_id, 'Error in submitting hex dag for processing: ' + out) submissionPassed = False else: if doimmediateremove: for o in out.split('\n'): if 'Use job id' in o: jobid = o.split()[3] out = os.popen( 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; ' 'jobsub_rm --jobid=' + jobid + ' --group=des --role=DESGW').read() print out ttt = time.time() #print 'TOTAL JOBSUB FOR A SINGLE DESGW IMAGE',ttt-tt #sys.exit() #raw_input() if submissionPassed: hex.status = 'Submitted for processing' hex.dagfile = self.dagfile self.backend.save(hex) self.backend.commit() for expn in hex.exposures: print expn, 'updated in database to Submitted For Processing' exp = self.backend.get(exposures, {'expnum': expn}) exp.status = 'Submitted for processing' self.backend.save(exp) self.backend.commit() didwork = True print 'didwork',didwork print 'dagfile',self.dagfile #sys.exit() #raw_input() if not didwork: print 'Could not find all images in strategy for this hex... Added hex', hexnite,' to database ' \ 'and will continue waiting...' #raw_input() if time.time() - pptime > postprocessingtime: #happens every 30 minutes or so... pptime = time.time() print '***** Firing post processing script *****' #sys.exit() #ppout = self.pp.communicate() # if self.thisevent.postprocint > 0: # print ppout # f = open(os.path.join(self.processingdir,'postproc_attempt'+str(int(self.thisevent.postprocint))+'.log'),'w') # f.write(ppout) # f.close() self.thisevent.postprocint += 1 self.backend.save(self.thisevent) self.backend.commit() self.submit_post_processing() #sys.exit() #print 'Waiting 10s to check from mountain...' #sys.exit() time.sleep(10)#looping over checking the mountain top # cfiles = os.listdir(os.path.join(trigger_path,trigger_id,'candidates')) # for f in cfiles: # if f.split('.')[-1] == 'npz': # cp.makeNewPage(f) def submit_post_processing(self): #firedlist = open('./processing/firedlist.txt', 'r') #fl = firedlist.readlines() #firedlist.close() #print fl #fl = ['475914','475915','475916','482859','482860','482861'] fl = self.backend.filter(exposures, {'triggerid': self.trigger_id}) expnumlist = '' for f in fl: expnumlist += f.expnum.strip()+' ' print expnumlist #expnumlist = '475905 475909 475913 475917 475921 475925 475929 475933 475937 475941 475945 475949 475953 475957 475961' print 'FIRING TIMs CODE' try: os.mkdir(os.path.join(self.trigger_path,self.trigger_id,'candidates')) except: print 'Candidates directory exists,',os.path.join(self.trigger_path,self.trigger_id,'candidates') pass #sys.exit() gwpostdir = os.environ['GWPOST_DIR'] print 'source ' + os.path.join(gwpostdir, 'diffimg_setup.sh') + '; \ python '+os.path.join(gwpostdir,'postproc.py')\ +' --expnums ' + expnumlist\ + ' --outputdir ' + os.path.join(self.trigger_path,self.trigger_id,'candidates')\ + ' --triggerid '+self.trigger_id+' --season 70 --ups True' # os.system('source ' + os.path.join(gwpostdir, 'diffimg_setup.sh') + '; \ # python '+os.path.join(gwpostdir,'postproc.py')\ # +' --expnums ' + expnumlist\ # + ' --outputdir ' + os.path.join(trigger_path,trigger_id,'candidates')\ # + ' --triggerid '+trigger_id+' --season 46 --ups True' ) #pid = os.spawnlp(os.P_WAIT, "source", os.path.join(gwpostdir, 'diffimg_setup.sh')) # args = ['yes | ssh -t [email protected] "source '+ os.path.join(gwpostdir, 'mi_setup.sh')+'; '+ # 'yes | python '+os.path.join(gwpostdir,'postproc.py')\ # +' --expnums ' + expnumlist\ # + ' --outputdir ' + os.path.join(self.trigger_path,self.trigger_id,'candidates')\ # + ' --triggerid '+self.trigger_id+' --season 46 --ups True"' # ] args = ['yes | python ' + os.path.join(gwpostdir, 'postproc.py') \ + ' --expnums ' + expnumlist \ + ' --outputdir ' + os.path.join(self.trigger_path, self.trigger_id, 'candidates') \ + ' --triggerid ' + self.trigger_id + ' --season 70 --ups True'] print args #sys.exit() f = open(os.path.join(self.processingdir,'postproc_attempt'+str(int(self.thisevent.postprocint)))+'.log','w') self.pp = subprocess.Popen(args,stdout=f, stderr=f,shell=True) #p = subprocess.Popen(args, stdin=None, stdout=None, stderr=None, close_fds=True, shell=True) #print 'going'*1000 #print self.pp.communicate() #print 'gone'*1000 #p = subprocess.Popen(args,stdin=None, stdout=None, stderr=None, close_fds=True,shell=True) #p.communicate() #sys.exit() return def getDatetimeOfFirstJson(self,jsonstring): #'M263920-30-UTC-2016-12-1-0:44:00.json' js = jsonstring.split('UTC')[1]#-2015-12-27-3:2:00.json #date_object = dt.strptime(js, '-%Y-%m-%d-%H_%M_%S.json') date_object = dt.strptime(js, '-%Y-%m-%d-%H:%M:%S.json') print '***** Datetime of first observation UTC',date_object,'*****' return date_object def sortHexes(self): pass