class DataProvider:
    def __init__(self, db_path):
        self.db = FileBackend(db_path)

    def last(self, user):
        return get_first_func(self.db.filter(WeightRecord, {'user': user, 'last': True}))

    def all_mornings(self, user):
        db_filter = self.db.filter(WeightRecord, {'user': user, 'morning': True})
        #if db_filter:
        #    for i in db_filter:
        #        logging.debug("T0 {}".format(i.time))
        sor = sorted(db_filter, key=lambda x: x.time, reverse=False)
        #if sor:
        #    for i in sor:
        #       logging.debug("T1 {}".format(i.time))
        return sor

    def last_morning(self, data):
        return get_first_func(self.db.filter(WeightRecord, {
            'last': True, 'morning': True, 'user': data.user}))

    def today_morning(self, data):
        return get_first_func(self.db.filter(WeightRecord, {
            'year': data.year, 'month': data.month, 'day': data.day, 'user': data.user, 'morning': True}))

    def save(self, record):
        record.save(self.db)

    def commit(self):
        self.db.commit()
示例#2
0
class JavManagerDB:
    def __init__(self):
        self.jav_db = FileBackend('jav_manager.db')

    def create_indexes(self):
        print('creating index for stat')
        self.jav_db.create_index(JavObj, 'stat')

    def rebuild_index(self):
        self.jav_db.rebuild_index(self.jav_db.get_collection_for_cls(JavObj),
                                  'stat')

    def bulk_list(self):
        return self.jav_db.filter(JavObj, {})

    def partial_search(self, search_string: str):
        rt = self.jav_db.filter(JavObj,
                                {'pk': {
                                    '$regex': search_string.upper()
                                }})[:20]
        return rt

    def query_on_filter(self, filter_on: dict, page=1, limit=8):
        rt = self.jav_db.filter(JavObj, filter_on)
        rt_max_page = ceil(len(rt) / limit)
        rt_list = rt[(page - 1) * limit:(page) * limit]

        return [dict(x) for x in rt_list], rt_max_page

    def upcreate_jav(self, jav_obj: dict):
        # uniform car to upper case
        jav_obj['car'] = str(jav_obj['car']).upper()
        # set pk to car
        jav_obj['pk'] = jav_obj['car']

        # pull existing data since this is update function
        try:
            current_jav_obj = dict(self.get_by_pk(jav_obj['car']))
            # overwrite current db dict with input dict
            current_jav_obj.update(jav_obj)
        except DoesNotExist:
            # set default to no opinion
            #0-want, 1-viewed, 2-no opinion 3-local 4-downloading
            jav_obj.setdefault('stat', 2)

        _jav_doc = JavObj(jav_obj)
        _jav_doc.save(self.jav_db)
        self.jav_db.commit()
        print('writed ', jav_obj)

    def get_by_pk(self, pk: str):
        return self.jav_db.get(JavObj, {'pk': pk.upper()})

    def pk_exist(self, pk: str):
        try:
            self.jav_db.get(JavObj, {'pk': pk.upper()})
            return True
        except DoesNotExist:
            return False
示例#3
0
class DataProvider:
    def __init__(self, db_path):
        self.db = FileBackend(db_path)

    def last(self, user):
        return get_first_func(
            self.db.filter(WeightRecord, {
                'user': user,
                'last': True
            }))

    def all_mornings(self, user):
        db_filter = self.db.filter(WeightRecord, {
            'user': user,
            'morning': True
        })
        #if db_filter:
        #    for i in db_filter:
        #        logging.debug("T0 {}".format(i.time))
        sor = sorted(db_filter, key=lambda x: x.time, reverse=False)
        #if sor:
        #    for i in sor:
        #       logging.debug("T1 {}".format(i.time))
        return sor

    def last_morning(self, data):
        return get_first_func(
            self.db.filter(WeightRecord, {
                'last': True,
                'morning': True,
                'user': data.user
            }))

    def today_morning(self, data):
        return get_first_func(
            self.db.filter(
                WeightRecord, {
                    'year': data.year,
                    'month': data.month,
                    'day': data.day,
                    'user': data.user,
                    'morning': True
                }))

    def save(self, record):
        record.save(self.db)

    def commit(self):
        self.db.commit()
示例#4
0
def removeHexFromProcessingDB(hexnite,real):
    if real:
        backend = FileBackend("./realdb")
    else:
        backend = FileBackend("./testdb")

    hex = backend.filter(hexes, {'hexnite': hexnite})
    backend.delete(hex)
    'Hexnite',hexnite,'removed from database'
示例#5
0
def removeHexFromProcessingDB(hexnite, real):
    if real:
        backend = FileBackend("./realdb")
    else:
        backend = FileBackend("./testdb")

    hex = backend.filter(hexes, {'hexnite': hexnite})
    backend.delete(hex)
    'Hexnite', hexnite, 'removed from database'
示例#6
0
def cleardb(real=False):
    password = '******'
    user_input = raw_input('Please Enter Password: '******'Incorrect Password, terminating... \n')

    if real:
        backend = FileBackend("./realdb")
    else:
        backend = FileBackend("./testdb")

    images = backend.filter(preprocessing, {'status': 'Submitted'})
    images.delete()
    hexen = backend.filter(hexes,{'num_target_g':0})
    hexen.delete()
    exposuren = backend.filter(exposures,{'status':'Awaiting additional exposures'})
    exposuren.delete()
    backend.commit()
示例#7
0
def cleardb(real=False):
    password = '******'
    user_input = raw_input('Please Enter Password: '******'Incorrect Password, terminating... \n')

    if real:
        backend = FileBackend("./realdb")
    else:
        backend = FileBackend("./testdb")

    images = backend.filter(preprocessing, {'status': 'Submitted'})
    images.delete()
    hexen = backend.filter(hexes, {'num_target_g': 0})
    hexen.delete()
    exposuren = backend.filter(exposures,
                               {'status': 'Awaiting additional exposures'})
    exposuren.delete()
    backend.commit()
def main():
	backend = FileBackend("./my-db") #locate the backend

	fin = file("Cu_material_db.yaml","r") #read the yaml file
	in_param = yaml.load(fin) #load it
	name = in_param.get("Name") #extract out the parameters
	cond = in_param.get("Conductivity") 
	n = in_param.get("Carrier_Density") 
	supercond = in_param.get("Superconductivity")
	res = in_param.get("Resistance")

	right_input_flag = True #check the superconducting type matches the number of critical fields
	if supercond.get("Superconducting"): #create a new entry for a superconductor
		right_input_flag = False if len(supercond.get("Critical_Fields")) != supercond.get("Type")  else True
		if right_input_flag:
			new = Material({"name": name, "cond": cond, "n": n,  
					"sc": supercond.get("Superconducting"),
					"sctype" : supercond.get("Type"),
					"Tc" : supercond.get("Critical_Temperature"),
					"nc" : supercond.get("Critical_Current_Density"),
					"Hc" : supercond.get("Critical_Fields"),
					"L" : supercond.get("Penetration_Depth"),
					"Tref" : res.get("Ref_Temperature"),
					"res" : res.get("Resistivity"),
					"alpha" : res.get("Temperature_Coefficient")
					})
		else:
			print "Superconducting type and number of critical fields don't match!"
	else: #create a new entry for a non-superconductor
		new = Material({"name": name, "cond": cond, "n": n,  
				"sc": supercond.get("Superconducting"),
				"Tref" : res.get("Ref_Temperature"),
				"res" : res.get("Resistivity"),
				"alpha" : res.get("Temperature_Coefficient")
				})
		
	if right_input_flag:
		new.save(backend) #save the entry in the database
		#always update the database when there's an old entry of this material in the database
		mtrl = backend.filter(Material, {"name":name})
		mtrl.delete()
		backend.commit()
		print "Input succeeded!"
示例#9
0
#!/usr/bin/python
# -*- coding: utf-8 -*-

import tweepy, secrets, time, random, json
from blitzdb import Document, FileBackend

backend = FileBackend(".db")

class User(Document):
    pass

with open('phrases.json') as f:
    phrases = json.load(f)

users = backend.filter(User,{'twitted':False})

auth = tweepy.OAuthHandler(secrets.CONSUMER_KEY, secrets.CONSUMER_SECRET)
auth.set_access_token(secrets.ACCES_TOKEN, secrets.ACCES_TOKEN_SECRET)

api = tweepy.API(auth)

for startup in users:
    user = startup.get('user').split('?')[0]
    print user
    if len(user) > 1:
        phrase = random.choice(phrases)
        print phrase % user
        api.update_status(phrase % user)
        startup.twitted = True
        startup.save(backend)
    else:
示例#10
0
class BlitzDB(Database):

    key = 'pk'
    _database_type = 'blitzdb'

    class Data(Document):
        pass

    class Cutout(Document):
        pass

    class Fingerprint(Document):
        pass

    class Similarity(Document):
        pass

    def _get_table(self, table_name):
        if table_name == 'data':
            return BlitzDB.Data
        elif table_name == 'cutout':
            return BlitzDB.Cutout
        elif table_name == 'fingerprint':
            return BlitzDB.Fingerprint
        elif table_name == 'similarity':
            return BlitzDB.Similarity
        else:
            log.error('BAD TABLE NAME {}'.format(table_name))

    def __init__(self, filename):
        self._filename = filename
        self._backend = FileBackend(self._filename)

    def save(self, table, data):

        # Convert to dict if not one already
        if not isinstance(data, dict):
            data = data.save()

        blitz_table = self._get_table(table)
        data.update({'pk': data['uuid']})
        save_id = self._backend.save(blitz_table(data))
        self._backend.commit()
        return save_id['pk']

    def find(self, table, key=None):
        blitz_table = self._get_table(table)
        factory = get_factory(table)

        if key is None:
            return [
                factory(dict(x), db=self)
                for x in self._backend.filter(blitz_table, {})
            ]
        elif isinstance(key, list):
            return [
                factory(dict(x), db=self) for x in self._backend.filter(
                    blitz_table, {'pk': {
                        '$in': key
                    }})
            ]
        else:
            return factory(dict(self._backend.get(blitz_table, {'pk': key})),
                           db=self)

    def count(self, table):
        blitz_table = self._get_table(table)
        return len(self._backend.filter(blitz_table, {}))

    def update(self, table, key, data):
        blitz_table = self._get_table(table)
        entry = self._backend.get(blitz_table, {'pk': key})
        for k, v in data.items():
            setattr(entry, k, v)
        entry.save()
        self._backend.commit()

    def close(self):
        pass

    def delete_database(self):
        shutil.rmtree(self._filename)
示例#11
0
print("Backend Saved and committed:", os.path.realpath(os.curdir))


# print(backend.get(Movie,{'pk':1}))
# or...
the_godfather = backend.get(Movie,{'name' : 'The Godfather'})
print("the_godfather",the_godfather)

the_godfather.cast = {'Don Vito Corleone' : marlon_brando, 'Michael Corleone' : al_pacino}

#Documents stored within other objects will be automatically converted to database references.

marlon_brando.performances = [the_godfather]
al_pacino.performances = [the_godfather]

marlon_brando.save(backend)
al_pacino.save(backend)
the_godfather.save(backend)



backend.create_index(Actor,'performances')
#Will create an index on the 'performances' field, for fast querying

godfather_cast = backend.filter(Actor,{'movies' : the_godfather})
print("godfather_cast",godfather_cast)
#Will return 'Al Pacino' and 'Marlon Brando'


print(backend.get(Movie,{'name' : 'The Godfather'}))
示例#12
0
class eventmanager:
    def __init__(self, trigger_id, jsonfilelist, triggerdir, datadir, real, trigger_path):

        #os.system('kinit -k -t /var/keytab/desgw.keytab desgw/des/[email protected]')
        tstart = time.time()

        if real:
            self.backend = FileBackend("./realdb")
        else:
            self.backend = FileBackend("./testdb")

        try:
            self.thisevent = self.backend.get(Trigger, {'id': trigger_id})
            print 'Found this event in desgw database...'
        except Trigger.DoesNotExist:
            self.thisevent = Trigger({
                'id':trigger_id,
                'jsonfilelist':jsonfilelist,
                'triggerpath':triggerdir,
                'mapspath':datadir,
                'jobids':[
                    (0,'jsonfile_corresponding_to_jobid.json'),
                ],
                'postprocint': 0
            })
            print 'Database entry created!'


        self.trigger_id = trigger_id
        self.trigger_path = trigger_path

        self.backend.save(self.thisevent)
        self.backend.commit()

        with open(os.path.join(triggerdir,"strategy.yaml"), "r") as f:
            self.config = yaml.safe_load(f);
        self.filterobslist = np.array(self.config['exposure_filter'],dtype='str')
        self.strategydict = {}

        for f in np.unique(self.filterobslist):
            self.strategydict[f] = len(self.filterobslist[self.filterobslist == f])

        self.connection = ea.connect(DATABASE)
        self.cursor = self.connection.cursor()

        self.jsonfilelist = jsonfilelist

        print self.jsonfilelist
        if hardjson:
            self.jsonfilelist = hj

        #self.pp = subprocess.Popen('echo starting',stdout=PIPE, stderr=PIPE,shell=True)

        self.trigger_id = trigger_id
        self.datadir = datadir
        self.triggerdir = triggerdir
        self.processingdir = os.path.join(self.triggerdir,'PROCESSING')
        if not os.path.exists(self.processingdir):
            os.makedirs(self.processingdir)

        dire = './processing/' + trigger_id + '/'
        if not os.path.exists(dire):
            os.makedirs(dire)

        with open(os.path.join(triggerdir, "strategy.yaml"), "r") as f:
            self.strategy = yaml.safe_load(f)

        with open("jobmanager.yaml", "r") as g:
            self.jmconfig = yaml.safe_load(g);


        q1 = "select expnum,nite,mjd_obs,telra,teldec,band,exptime,propid,obstype,object from exposure where " \
             "nite>20130828 and nite<20150101 and expnum<300000 and obstype='object' order by expnum"  # y1 images
        self.connection.query_and_save(q1, './processing/exposuresY1.tab')

        q2 = "select expnum,nite,mjd_obs,radeg,decdeg,band,exptime,propid,obstype,object from prod.exposure where " \
             "nite>20150901 and obstype='object' order by expnum"  # y2 and later
        self.connection.query_and_save(q2, './processing/exposuresCurrent.tab')

        os.system('cat ./processing/exposuresY1.tab ./processing/exposuresCurrent.tab > ./processing/exposures.list')

        #self.submit_post_processing()

        self.submit_all_jsons_for_sejobs()#preps all DES images that already exist
        tfin = time.time()
        print 'TOTAL SE JOBS TIME', tfin - tstart
        #sys.exit()
        self.monitor_images_from_mountain()#A loop that waits for images off mountain and submits for processing

    def submit_all_jsons_for_sejobs(self):
        obsStartTime = self.getDatetimeOfFirstJson(self.jsonfilelist[0])  # THIS IS A DATETIME OBJ
        currentTime = dt.utcnow()
        print '***** The current time is UTC', currentTime, '*****'
        delt = obsStartTime - currentTime

        timedelta = td(days=delt.days, seconds=delt.seconds).total_seconds() / 3600.
        print '***** The time delta is ', timedelta, 'hours *****'
        # if timedelta > np.pi:

        sejob_timecushion = self.jmconfig["sejob_timecushion"]

        if timedelta > sejob_timecushion:
            for jsonfile in self.jsonfilelist:
                print 'json',jsonfile
                try: #check if this json file is already in the submitted preprocessing database
                    thisjson = self.backend.get(preprocessing, {'jsonfilename': os.path.join(self.datadir, jsonfile)})
                    print 'Found this json in desgw database...'
                except preprocessing.DoesNotExist: #do submission and then add to database

                    print 'cd diffimg-proc; ./SEMaker_RADEC.sh '+os.path.join(self.datadir, jsonfile)
                    os.chdir("diffimg-proc")
                    out = os.popen('./SEMaker_RADEC.sh '+os.path.join(self.datadir, jsonfile)).read()
                    of = open(os.path.join(self.processingdir,jsonfile.split('/')[-1].split('.')[0]+'.SEMakerlog'),'w')
                    of.write(out)
                    of.close()
                    #out = os.popen('ls').read()
                    os.chdir("..")
                    print out
                    if 'non-zero exit status' in out:
                        dt.sendEmailSubject(self.trigger_id,'Error in creating SEMaker dag for .json: '+out)
                    else:
                        for o in out.split('\n'):
                            if 'file://' in o:
                                dagfile = o.split('/')[-1]
                                self.dagfile = os.path.join(self.processingdir,jsonfile.split('/')[-1].split('.')[0]+'_'+dagfile)
                                os.system('cp diffimg-proc/'+dagfile+' '+self.dagfile)
                                jobsubmitline = copy(o)
                        print self.dagfile

                    print 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; jobsub_submit_dag -G des --role=DESGW file://'+self.dagfile

                    out = os.popen(
                        'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; '
                        'jobsub_submit_dag -G des --role=DESGW file://'+self.dagfile).read()
                    print out

                    of = open(os.path.join(self.processingdir,
                                           jsonfile.split('/')[-1].split('.')[0] + '.SEdagsubmitlog'), 'w')
                    of.write(out)
                    of.close()

                    if 'non-zero exit status' in out:
                        dt.sendEmailSubject(self.trigger_id, 'Error in submitting .json for preprocessing: ' + out)
                    else:
                        for o in out.split('\n'):
                            if 'Use job id' in o:
                                jobid = o.split()[3]
                        if doimmediateremove:
                            out = os.popen(
                                'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; '
                                'jobsub_rm --jobid=' + jobid + ' --group=des --role=DESGW').read()
                            print out

                    thisjson = preprocessing({
                        'jsonfilename': os.path.join(self.datadir, jsonfile),
                        'jobid': jobid,
                        'dagfile': self.dagfile,
                        'status' : 'Submitted'
                    })

                    self.backend.save(thisjson)
                    self.backend.commit()
                    print 'saved'
                    #sys.exit()
                #raw_input()
                #runProcessingIfNotAlready(image, self.backend)

                #sys.exit()

        print 'Finished submitting minidagmaker with all json files'
        #sys.exit()
        #raw_input()

    # Loop queries for images from mountain and submits them
    # Need to add complexity that monitors filter strategy and waits for entire groups of images to be co-added
    def monitor_images_from_mountain(self):
        #NEED TO ADD COADD LOGIC USING STRATEGY FROM CONFIG

        exposure_filter = np.array(self.strategy['exposure_filter'],dtype='str')
        uniquefilts = np.unique(self.strategy['exposure_filter'])

        filterstrategy = {}
        for f in uniquefilts:
            filterstrategy[f] = len(exposure_filter[exposure_filter == f])

        print 'filter strategy dictionary  ', filterstrategy

        starttime = time.time()
        pptime = time.time()
        keepgoing = True
        index = -1
        submission_counter = 0
        maxsub = 10000
        postprocessingtime = 2000 #every half hour fire off Tim's code for post-processing
        while keepgoing:
            #os.system('kinit -k -t /var/keytab/desgw.keytab desgw/des/[email protected]')
            index += 1
            newfireds = []
            if time.time() - starttime > 50000:
                keepgoing = False
                continue

            ofile = open(os.path.join(self.triggerdir , 'latestquery.txt'), 'w')

            ofile.write(
                "--------------------------------------------------------------------------------------------------\n")
            ofile.write("EXPNUM\tNITE\tBAND\tEXPTIME\tRADEG\t DECDEG\tPROPID\tOBJECT\n")
            ofile.write(
                "--------------------------------------------------------------------------------------------------\n")

            print "--------------------------------------------------------------------------------------------------"
            print "EXPNUM\tNITE\tBAND\tEXPTIME\tRADEG\t DECDEG\tPROPID\tOBJECT"
            print "--------------------------------------------------------------------------------------------------"

            query = "SELECT expnum,nite,band,exptime,radeg,decdeg,propid,object FROM prod.exposure@desoper WHERE " \
                    "expnum > 475900 and propid=" + propid + "and obstype='object' ORDER BY expnum"  # latest


            self.cursor.execute(query)

            for s in self.cursor:
                ofile.write(
                    str(s[0]) + "\t" + str(s[1]) + "\t" + str(s[2]) + "\t" + str(s[3]) + "\t" + str(s[4]) + "\t" + str(
                        s[5]) + "\t" + str(s[6]) + "\t" + str(s[7]) + '\n')
                print str(s[0]) + "\t" + str(s[1]) + "\t" + str(s[2]) + "\t" + str(s[3]) + "\t" + str(
                    s[4]) + "\t" + str(s[5]) + "\t" + str(s[6]) + "\t" + str(s[7])

                if not 'DESGW' in str(s[7]): continue
                #print 'exptime',float(s[3])
                if not float(s[3]) > 29.: continue #exposure must be longer than 30 seconds

                expnum = str(s[0])
                nite = str(s[1])
                band = str(s[2])
                exptime = str(s[3])


                #FIRST CHECK HERE THAT THE EXPOSURE NUMBER ISNT ALREADY IN THE DATABASE
                try:
                    exposure = self.backend.get(exposures, {'expnum': expnum})
                    print 'Found this exposure in desgw database...'
                    # print exposure.attributes
                    # if expnum == 506432:
                    #     sys.exit()
                    # self.backend.delete(exposure)
                    # self.backend.commit()
                    # exposure = self.backend.get(exposures, {'expnum': expnum})

                except exposures.DoesNotExist:  # add to database
                    #runProcessingIfNotAlready(image,self.backend)

                    print './diffimg-proc/getTiling.sh '+expnum

                    res = os.popen('./diffimg-proc/getTiling.sh '+expnum).readlines()
                    print res
                    #sys.exit()
                    field,tiling =res[-2],res[-1]
                    #print 'field_tiling',field_tiling
                    hexnite = field.strip()+'_'+tiling.strip()+'_'+str(nite)
                    #print hexnite
                    #sys.exit()
                    #print 'hexnite',hexnite
                    print 'Creating exposure in database...',hexnite
                    #raw_input()
                    if '--' in hexnite:
                        print 'found bad example'
                        #raw_input()

                    exposure = exposures({
                        'expnum':expnum,
                        'nite':nite,
                        'field':field,
                        'tiling':tiling,
                        'hexnite':hexnite,
                        'band':band,
                        'jobid':np.nan,
                        'exptime':exptime,
                        'status':'Awaiting additional exposures',
                        'triggerid': self.trigger_id,
                        'object':str(s[7])
                    })

                    self.backend.save(exposure)
                    self.backend.commit()

                hexnite = exposure.hexnite
                print 'hexnite',hexnite
                if '--' in hexnite:
                    print exposure.attributes
                    #raw_input()
                #raw_input()
                #sys.exit()
                try:
                    hex = self.backend.get(hexes, {'hexnite': hexnite})
                    #self.backend.delete(hex)
                    #self.backend.commit()
                    #hex = self.backend.get(hexes, {'hexnite': hexnite})

                    #print 'Found this hex in desgw database...'
                except hexes.DoesNotExist:
                    hex = hexes({
                        'hexnite': hexnite,
                        'strategy': self.strategy['exposure_filter'],
                        'num_target_g': len(exposure_filter[exposure_filter == 'g']),
                        'num_target_r': len(exposure_filter[exposure_filter == 'r']),
                        'num_target_i': len(exposure_filter[exposure_filter == 'i']),
                        'num_target_z': len(exposure_filter[exposure_filter == 'z']),
                        'observed_g': [],
                        'observed_r': [],
                        'observed_i': [],
                        'observed_z': [],
                        'exposures': [],
                        'status': 'Awaiting additional exposures',
                        'dagfile' : None,
                    })

                    self.backend.save(hex)
                    self.backend.commit()
                    print hex.attributes
                    print 'created new hex'
                    #raw_input()

                if hex.status == 'Submitted for processing':
                    print 'This hex has already been submitted for processing'
                    continue

                # if '--' in hexnite:
                #     print hex.attributes
                #     raw_input()
                # if hex.status == 'Submitted for processing':
                #     print 'Hex ',hexnite,' band',band,'exposure',expnum,'has already been submitted for processing'
                #     #raw_input()
                #     continue

                if band == 'g':
                    if not expnum in hex.observed_g:
                        hex.observed_g.append(expnum)
                        hex.exposures.append(expnum)
                if band == 'r':
                    if not expnum in hex.observed_r:
                        hex.observed_r.append(expnum)
                        hex.exposures.append(expnum)
                if band == 'i':
                    if not expnum in hex.observed_i:
                        hex.observed_i.append(expnum)
                        hex.exposures.append(expnum)
                if band == 'z':
                    if not expnum in hex.observed_z:
                        hex.observed_z.append(expnum)
                        hex.exposures.append(expnum)

                self.backend.save(hex)
                self.backend.commit()

                print hex.attributes

                didwork = False
                if len(hex.observed_g) == hex.num_target_g:
                    if len(hex.observed_r) == hex.num_target_r:
                        if len(hex.observed_i) == hex.num_target_i:
                            if len(hex.observed_z) == hex.num_target_z:
                                print 'All exposures in strategy satisfied! '
                                #raw_input()
                                submissionPassed = True

                                for target, exps in zip([hex.num_target_g,hex.num_target_r,hex.num_target_i,hex.num_target_z],
                                                        [hex.observed_g,hex.observed_r,hex.observed_i,hex.observed_z]):

                                    if target == 0: continue
                                    exposurestring = ''
                                    logstring = ''
                                    for ex in exps:
                                        exposurestring += ex+' '
                                        logstring += ex+'_'

                                    print 'cd diffimg-proc; source DAGMaker.sh ' + exposurestring
                                    os.chdir("diffimg-proc")
                                    #out = os.popen('ls').read()
                                    out = os.popen('./DAGMaker.sh ' + exposurestring ).read()


                                    os.chdir("..")
                                    print out
                                    f = open(os.path.join(self.processingdir,logstring+hexnite+'.dagmakerlog'),'w')
                                    f.write(out)
                                    f.close()
                                    tt = time.time()
                                    if not 'To submit this DAG do' in out:
                                        dt.sendEmailSubject(self.trigger_id, 'Error in creating dag for desgw hex: ' + out)
                                        submissionPassed = False
                                    else:
                                        for o in out.split('\n'):
                                            if 'file://' in o:
                                                dagfile = o.split('/')[-1]
                                                self.dagfile = os.path.join(self.processingdir,logstring+'job.dag')
                                                os.system('cp diffimg-proc/' + dagfile + ' ' + self.dagfile)
                                        print self.dagfile

                                    print 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; jobsub_submit_dag -G des --role=DESGW file://' + self.dagfile

                                    out = os.popen(
                                        'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; '
                                        'jobsub_submit_dag -G des --role=DESGW file://' + self.dagfile).read()
                                    print out
                                    f = open(os.path.join(self.processingdir, logstring + hexnite + '.dagsumbitlog'), 'w')
                                    f.write(out)
                                    f.close()

                                    if 'non-zero exit status' in out:
                                        dt.sendEmailSubject(self.trigger_id,
                                                            'Error in submitting hex dag for processing: ' + out)
                                        submissionPassed = False
                                    else:
                                        if doimmediateremove:
                                            for o in out.split('\n'):
                                                if 'Use job id' in o:
                                                    jobid = o.split()[3]
                                            out = os.popen(
                                                'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; '
                                                'jobsub_rm --jobid=' + jobid + ' --group=des --role=DESGW').read()
                                            print out
                                    ttt = time.time()
                                    #print 'TOTAL JOBSUB FOR A SINGLE DESGW IMAGE',ttt-tt

                                    #sys.exit()
                                #raw_input()
                                if submissionPassed:

                                    hex.status = 'Submitted for processing'
                                    hex.dagfile = self.dagfile
                                    self.backend.save(hex)
                                    self.backend.commit()

                                    for expn in hex.exposures:
                                        print expn, 'updated in database to Submitted For Processing'
                                        exp = self.backend.get(exposures, {'expnum': expn})
                                        exp.status = 'Submitted for processing'
                                        self.backend.save(exp)
                                        self.backend.commit()
                                    didwork = True
                                print 'didwork',didwork
                                print 'dagfile',self.dagfile
                                #sys.exit()
                                #raw_input()

                if not didwork:
                    print 'Could not find all images in strategy for this hex... Added hex', hexnite,' to database ' \
                        'and will continue waiting...'
                    #raw_input()


                if time.time() - pptime > postprocessingtime: #happens every 30 minutes or so...
                    pptime = time.time()
                    print '***** Firing post processing script *****'
                    #sys.exit()

                    #ppout = self.pp.communicate()

                    # if self.thisevent.postprocint > 0:
                    #     print ppout
                    #     f = open(os.path.join(self.processingdir,'postproc_attempt'+str(int(self.thisevent.postprocint))+'.log'),'w')
                    #     f.write(ppout)
                    #     f.close()
                    self.thisevent.postprocint += 1
                    self.backend.save(self.thisevent)
                    self.backend.commit()

                    self.submit_post_processing()
                #sys.exit()
                #print 'Waiting 10s to check from mountain...'
                #sys.exit()
                time.sleep(10)#looping over checking the mountain top

            # cfiles = os.listdir(os.path.join(trigger_path,trigger_id,'candidates'))
            # for f in cfiles:
            #     if f.split('.')[-1] == 'npz':
            #         cp.makeNewPage(f)

    def submit_post_processing(self):
        #firedlist = open('./processing/firedlist.txt', 'r')
        #fl = firedlist.readlines()
        #firedlist.close()
        #print fl
        #fl = ['475914','475915','475916','482859','482860','482861']

        fl = self.backend.filter(exposures, {'triggerid': self.trigger_id})

        expnumlist = ''
        for f in fl:
            expnumlist += f.expnum.strip()+' '
        print expnumlist
        #expnumlist = '475905  475909  475913  475917  475921  475925  475929  475933  475937  475941  475945  475949  475953  475957  475961'
        print 'FIRING TIMs CODE'
        try:
            os.mkdir(os.path.join(self.trigger_path,self.trigger_id,'candidates'))
        except:
            print 'Candidates directory exists,',os.path.join(self.trigger_path,self.trigger_id,'candidates')
            pass
        #sys.exit()
        gwpostdir = os.environ['GWPOST_DIR']
        print 'source ' + os.path.join(gwpostdir, 'diffimg_setup.sh') + '; \
                        python '+os.path.join(gwpostdir,'postproc.py')\
                         +' --expnums ' + expnumlist\
                         + ' --outputdir ' + os.path.join(self.trigger_path,self.trigger_id,'candidates')\
                         + ' --triggerid '+self.trigger_id+' --season 70 --ups True'
        # os.system('source ' + os.path.join(gwpostdir, 'diffimg_setup.sh') + '; \
        #                  python '+os.path.join(gwpostdir,'postproc.py')\
        #                  +' --expnums ' + expnumlist\
        #                  + ' --outputdir ' + os.path.join(trigger_path,trigger_id,'candidates')\
        #                  + ' --triggerid '+trigger_id+' --season 46 --ups True' )

        #pid = os.spawnlp(os.P_WAIT, "source", os.path.join(gwpostdir, 'diffimg_setup.sh'))
        # args = ['yes | ssh -t [email protected] "source '+ os.path.join(gwpostdir, 'mi_setup.sh')+'; '+
        #                 'yes | python '+os.path.join(gwpostdir,'postproc.py')\
        #                  +' --expnums ' + expnumlist\
        #                  + ' --outputdir ' + os.path.join(self.trigger_path,self.trigger_id,'candidates')\
        #                  + ' --triggerid '+self.trigger_id+' --season 46 --ups True"'
        #         ]

        args = ['yes | python ' + os.path.join(gwpostdir, 'postproc.py') \
                + ' --expnums ' + expnumlist \
                + ' --outputdir ' + os.path.join(self.trigger_path, self.trigger_id, 'candidates') \
                + ' --triggerid ' + self.trigger_id + ' --season 70 --ups True']
        print args

        #sys.exit()
        f = open(os.path.join(self.processingdir,'postproc_attempt'+str(int(self.thisevent.postprocint)))+'.log','w')
        self.pp = subprocess.Popen(args,stdout=f, stderr=f,shell=True)
        #p = subprocess.Popen(args, stdin=None, stdout=None, stderr=None, close_fds=True, shell=True)
        #print 'going'*1000
        #print self.pp.communicate()
        #print 'gone'*1000
        #p = subprocess.Popen(args,stdin=None, stdout=None, stderr=None, close_fds=True,shell=True)
        #p.communicate()
        #sys.exit()
        return



    def getDatetimeOfFirstJson(self,jsonstring):
        #'M263920-30-UTC-2016-12-1-0:44:00.json'
        js = jsonstring.split('UTC')[1]#-2015-12-27-3:2:00.json
        #date_object = dt.strptime(js, '-%Y-%m-%d-%H_%M_%S.json')
        date_object = dt.strptime(js, '-%Y-%m-%d-%H:%M:%S.json')
        print '***** Datetime of first observation UTC',date_object,'*****'
        return date_object

    def sortHexes(self):
        pass
示例#13
0
def create_app(configfile=None):
    app = Flask(__name__)
    AppConfig(app, configfile)  # Flask-Appconfig is not necessary, but
                                # highly recommend =)
                                # https://github.com/mbr/flask-appconfig
    Bootstrap(app)

    login_manager = LoginManager()
    login_manager.init_app(app)
    login_manager.login_view='login'


    #NoSQL Backend
    backend = FileBackend("/tmp/wakeonlan.db")
    backend.create_index(Device, fields={'id':1}, unique=True)
    
    #TEST Devices
    alldevices = backend.filter(Device, {})
    if len(alldevices) == 0 :
        try:
            pc1 = Device({"id" : "001122334411", "name" : "PC 1", "mac" : "00:11:22:33:44:11", "ip":"192.168.222.111", 'status' : ''})
            backend.save(pc1)
            pc2 = Device({"id" : "001122334422","name" : "PC 2", "mac" : "00:11:22:33:44:22", "ip":"192.168.222.112", 'status' : ''})
            backend.save(pc2)
            pc3 = Device({"id" : "001122334433","name" : "Router", "mac" : "00:11:22:33:44:33", "ip":"192.168.222.1", 'status' : ''})
            backend.save(pc3)
            backend.commit()
        except: 
            backend.revert()
            pass

    # in a real app, these should be configured through Flask-Appconfig
    app.config['SECRET_KEY'] = 'devkey'
    # app.config['RECAPTCHA_PUBLIC_KEY'] = \
    #     '6Lfol9cSAAAAADAkodaYl9wvQCwBMr3qGR_PPHcw'

    def getDeviceById(id):
        device = None
        try:
            device = backend.get(Device, {'id':id})
        except:
            pass

        return device

    def pingDeviceById(id):
        #Get Device
        device = backend.get(Device, {'id':id})

        if device:
            #Get Device's IP
            ip = device['ip']
            result = pingDeviceByIp(ip)

            #Update Status   UP/Down/''
            if result==0:
                device['status'] = 'UP'
            else:
                device['status'] = 'DOWN'

            backend.save(device)
            return result

        return None

    def wolDeviceById(id):
        #Get Device
        device = backend.get(Device, {'id':id})

        if device:
            #WoL for Device MAC
            mac = device['mac']
            wolDeviceByMac(mac)

        return None

    @login_manager.user_loader
    def user_loader(user_id):
        """Given *user_id*, return the associated User object.
        :param unicode user_id: user_id (email) user to retrieve
        """
        user_entry = User.getById(user_id)
        if user_entry is not None:
            user = User(user_entry[0], user_entry[1])
            return user
        else:
            return None

    @app.route('/', methods=('GET', 'POST'))
    @login_required
    def index():
        form = ExampleForm()
        form.validate_on_submit()  # to get error messages to the browser
        # flash('critical message', 'critical')
        # flash('error message', 'error')
        # flash('warning message', 'warning')
        # flash('info message', 'info')
        # flash('debug message', 'debug')
        # flash('different message', 'different')
        # flash('uncategorized message')
        alldevices = None
        alldevices = backend.filter(Device, {}).sort('name')

        #app.logger.info('Devices: %s' % (len(alldevices) ) )

        return render_template('index.html', form=form, devices = alldevices)

    @app.route('/login', methods=('GET', 'POST'))
    def login():
        if request.method == 'GET':
            form = LoginForm()
            form.validate_on_submit()  # to get error messages to the browser
            return render_template('login.html', form=form)

        username = request.form['username']
        password = request.form['password']

        user_entry = User.get(username, password)
        if user_entry is None:
            flash('Username or Passord is invalid', 'error')
            return redirect(url_for('login'))

        user = User(user_entry[0], user_entry[1])
        login_user(user, remember=True)
        return redirect(request.args.get('next') or url_for('index'))


    @app.route("/logout", methods=["GET"])
    @login_required
    def logout():
        """Logout the current user."""
        user = current_user
        user.authenticated = False
        logout_user()
        return redirect(url_for('login'))



    @app.route('/addDevice', methods=('GET', 'POST'))
    @login_required
    def addDevice():
        if request.method == 'GET':
            form = AddDeviceForm()
            form.validate_on_submit()  # to get error messages to the browser
            return render_template('add_device.html', form=form)

        name = request.form['name']
        mac = request.form['mac']
        ip = request.form['ip']
        id = mac.replace(':','')

        try:
            newDevice = Device({"id" : id, "name" : name, "mac" : mac, "ip":ip, 'status' : ''})
            backend.save(newDevice)
            backend.commit()
        except:
            flash('Error creating new Device', 'error')
            pass

        return redirect(url_for('index'))


    @app.route('/editListDevice', methods=('GET', 'POST'))
    @login_required
    def editListDevice():
        alldevices = None
        alldevices = backend.filter(Device, {}).sort('name')

        return render_template('list_device.html', devices = alldevices)



    @app.route('/pingDevice/<deviceId>', methods=('GET', 'POST'))
    @login_required
    def pingDevice(deviceId):
        app.logger.info('pingDevice: %s' % (deviceId ) )
        device = getDeviceById(deviceId)
        result = pingDeviceById(deviceId)

        app.logger.info('pingDevice: %s' % (result ) )

        if result is None:
            flash('Ping - Error on device %s' % (device['name']), 'error')
        elif result == 0:
            flash('Device %s is UP' % (device['name']), 'info')
        else:
            flash('Device %s is DOWN' % (device['name']), 'error')

        return redirect(url_for('index'))


    @app.route('/wolDevice/<deviceId>', methods=('GET', 'POST'))
    @login_required
    def wolDevice(deviceId):
        app.logger.info('wolDevice: %s' % (deviceId ) )
        device = getDeviceById(deviceId)
        result = wolDeviceById(deviceId)

        if device:
            flash('WoL sent to %s' % (device['name']), 'info')
        else:
            flash('WoL error', 'error')

        return redirect(url_for('index'))


    @app.route('/deleteDevice/<deviceId>', methods=('GET', 'POST'))
    @login_required
    def deleteDevice(deviceId):
        app.logger.info('wolDevice: %s' % (deviceId ) )
        device = getDeviceById(deviceId)

        try:
            backend.delete(device)
            backend.commit()
            flash('%s Deleted' % (device['name']), 'info')
        except:
            flash('Delete error', 'error')
            pass

        return redirect(url_for('editListDevice'))


    return app
示例#14
0
class VisinumDatabase:
    def __init__(self,
                 dbpath,
                 name=VisinumDB_name,
                 DbDocClass=DbDocFileMetadata):  # , attrs={}, **kwargs
        self.DbDocClass = DbDocClass
        self.name = name
        self.open_database(
            dbpath)  # sets self.dbpath and self.db as reference to database

    def open_database(self, dbpath):  # (self, dbpath, attrs={}, **kwargs)
        if os.path.basename(dbpath) == self.name:
            self.dbpath = dbpath  # opening existing Visinum database
        elif os.path.isdir(os.path.join(dbpath, self.name)):
            self.dbpath = os.path.join(dbpath,
                                       self.name)  # opening existing database
            logger.info("Found Visinum database %s in directory %s" %
                        (self.name, dbpath))
        elif os.path.isdir(dbpath):
            self.dbpath = os.path.join(dbpath, self.name)
            logger.info("Creating new Visinum database %s in directory %s" %
                        (self.name, dbpath))
        else:
            logger.error("Database path (dbpath) incorrectly specified %s" %
                         (dbpath, ))
            raise ValueError(
                "Database path (dbpath) incorrectly specified %s" % (dbpath, ))
        self.db = FileBackend(self.dbpath, {'serializer_class': 'json'})
        logger.info("Opening Visinum database %s" % (self.dbpath, ))
        config_attrs = self.get_config()
        """try:
            #config_attrs = self.db.get(self.DbDocClass, {'visinum_type' : 'db_config'})
            config_attrs = self.get_config()
        except self.DbDocClass.DoesNotExist:
            self.set_config( {'visinum_type' : 'db_config',
                            'title' : os.path.basename( os.path.dirname(self.dbpath) ),
                            'path_orig' : self.dbpath,
                            'UUID': make_uuid(version=3, namespace='uuid.NAMESPACE_URL', name=self.dbpath)
                            } )
            #config_attrs.update(attrs)
            #self.create_new_item(config_attrs, config_attrs, **kwargs )"""
        self.config_attrs = config_attrs

    def get_config(self, attr=None):
        try:
            dbitem = self.db.get(self.DbDocClass,
                                 {'visinum_type': 'db_config'})
            config_attrs = dbitem.attributes
        except self.DbDocClass.DoesNotExist:
            try:
                config_UUID = make_uuid(version=3,
                                        namespace='uuid.NAMESPACE_URL',
                                        name=self.dbpath)
                dbitem = self.db.get(self.DbDocClass, {'UUID': config_UUID})
                config_attrs = dbitem.attributes
            except self.DbDocClass.DoesNotExist:
                # cannot find db configuration, setup (reset) new configuration
                config_attrs = {
                    'visinum_type':
                    'db_config',
                    'title':
                    os.path.basename(os.path.dirname(self.dbpath)),
                    'UUID':
                    make_uuid(version=3,
                              namespace='uuid.NAMESPACE_URL',
                              name=self.dbpath),
                    'path_orig':
                    self.dbpath
                }
                self.set_config(config_attrs, reset=True)
                logger.warning(
                    "Cannot find db configuration, re-setting configuration %s"
                    % (config_attrs['UUID'], ))
        if attr:  # if attr and attr in config_attrs:
            return config_attrs[attr]
        return config_attrs

    def set_config(self, attrs={}, reset=False, **kwargs):
        if reset:
            config_attrs = {}
        else:
            config_attrs = self.get_config()
        config_attrs.update(attrs)
        for k, v in kwargs.items():
            config_attrs[k] = v
        UUID = self.set_dbitem(config_attrs)
        self.config_attrs = config_attrs
        return UUID

    def extract_file_metadata(self, dirpath, update=True):
        rootNode = make_file_system_tree(dirpath, excludedirs=[self.name])
        """UUID = self.save_item(rootNode.to_vndict(), {'visinum_type':'datatree',
                               'name':'datatree ' + rootNode.name,
                               'visinum_datatree':'file_system',
                               'visinum_node':rootNode.__class__.__name__,
                               'fpath':dirpath})"""
        rootNode.__class__.metadata_to_dict = node_metadata_to_dict
        for node in list(rootNode):
            mdata = {}
            if update:
                try:
                    dbdoc = self.get_dbitem(node._UUID)
                    mdata.update(dbdoc.attributes)
                    del dbdoc
                except:
                    pass
            mdata.update(node.metadata_to_dict(self.db, dirpath))
            dbdoc = self.DbDocClass(mdata)
            dbdoc.save(self.db)
            logger.info('Metadata extracted from %s' %
                        (node.get_path(dirpath), ))
        logger.info("Completed processing data tree %s " % (rootNode.name, ))
        self.db.commit()
        for node in list(rootNode):
            dbdoc = self.db.get(self.DbDocClass, {'UUID': node._UUID})
            if node is rootNode:
                dbdoc.parent = None
                #dbdoc.visinum_type = 'datatree'
                dbdoc.visinum_datatree = 'file_system'
                #self.set_config(filesystemtreeroot=rootNode._UUID)
                self.set_config(filesystemtreeroot=rootNode._UUID
                                )  # filesystemtreeroot=dbdoc
                ##self.datatreelist.append( ('file system', rootNode._UUID) )
            else:
                #dbdoc.parent = node.parent.UUID
                dbdoc.visinum_parent = self.db.get(self.DbDocClass,
                                                   {'UUID': node.parent._UUID})
            dbdoc.visinum_node = node.__class__.__name__
            dbdoc.visinum_childs = [
            ]  # database will not accept _childs as a doc attribute
            for child in node._childs:
                #_childs.append(child.UUID)
                dbdoc.visinum_childs.append(
                    self.db.get(self.DbDocClass, {'UUID': child._UUID}))
            dbdoc.visinum_nchilds = len(node._childs)
            dbdoc.save(self.db)
        self.db.commit()
        logger.info("Metadata committed to DB %s" % (rootNode.name, ))

    def set_dbitem(self, attrs={}, commit=True, **kwargs):
        for k, v in kwargs.items():
            attrs[k] = v
        if not 'UUID' in attrs:
            attrs['UUID'] = make_uuid()
        dbItem = self.DbDocClass(attrs)
        dbItem.save(self.db)
        if commit:
            self.db.commit()
        return attrs['UUID']

    def get_dbitem(self, attrs={}, DbDocClass=None):
        if not DbDocClass:
            DbDocClass = self.DbDocClass
        if isinstance(attrs, str):
            attrs = {'UUID': attrs}
        return self.db.get(DbDocClass, attrs)

    def filter(self, query, DbDocClass=None):
        if not DbDocClass:
            DbDocClass = self.DbDocClass
        return self.db.filter(DbDocClass, query)

    def tree_from_db(self, dbitem, root_node=None, parent=None):
        #dbitem = self.get_dbitem(rootUUID)
        metadata = dbitem.attributes  # {k:v for k, v in dict_.items() if k != "_childs"}
        node = FsNode(metadata['path'], parent=parent, metadata=metadata)
        if not root_node:
            root_node = node
        if 'visinum_childs' in metadata:
            for child in metadata['visinum_childs']:
                self.tree_from_db(child, root_node, node)
        return root_node
示例#15
0
class Actor(Document):
    pass

the_godfather = Movie({'name': 'The Godfather','year':1972,'pk':1L})

marlon_brando = Actor({'name':'Marlon Brando','pk' : 1L})
al_pacino = Actor({'name' : 'Al Pacino','pk' : 2L})

from blitzdb import FileBackend

backend = FileBackend("/tmp/movies")

backend.register(Movie,{'collection':'movies'})
backend.register(Actor,{'collection':'actors'})

backend.filter(Movie,{}).delete()
backend.filter(Actor,{}).delete()

the_godfather.save(backend)
marlon_brando.save(backend)
al_pacino.save(backend)

the_godfather = backend.get(Movie,{'pk':1L})
#or...
the_godfather = backend.get(Movie,{'name' : 'The Godfather'})

print the_godfather

movies_from_1972 = backend.filter(Movie,{'year' : 1972})

the_godfather.cast = {'Don Vito Corleone' : marlon_brando, 'Michael Corleone' : al_pacino}
示例#16
0
class TwitchNotifier(commands.Cog):
    def __init__(self, bot):
        self.bot: 'PixlBot' = bot
        self.config = bot.config['TwitchNotifier']
        self.backend = FileBackend('db')
        self.backend.autocommit = True
        self.bot.logger.info("Twitch notifier plugin ready")
        self.uuids = []
        self.online_uuids = []
        self.sslcontext = ssl.SSLContext()
        self.sslcontext.load_cert_chain(self.config['cert_path'],
                                        self.config['key_path'])
        self._twitch_init_()

    def _twitch_init_(self):
        self.bot.logger.info("Registering with Twitch...")
        self.twitch = Twitch(self.config['id'], self.config['secret'])
        self.twitch.authenticate_app([])
        self.bot.logger.info(
            f"Registering webhook endpoint {self.config['myurl']} ...")
        self.hook = TwitchWebHook(self.config['myurl'],
                                  self.config['id'],
                                  self.config['port'],
                                  ssl_context=self.sslcontext)
        self.hook.authenticate(self.twitch)
        self.bot.logger.info("Clearing all hook subscriptions...")
        self.hook.unsubscribe_all(self.twitch)  # Clear all subs on startup
        self.hook.start()
        self._register_all()

    def _login_to_id(self, name: str) -> Optional[str]:
        """Returns the twitch ID for a given login name, or None if the name couldn't be resolved."""
        try:
            res: dict = self.twitch.get_users(logins=[name])
        except TwitchBackendException as e:
            self.bot.logger.error(f"Backend error fetching user! {e}")
            return None
        if len(res) == 0:
            return None
        else:
            return res['data'][0]['id']

    def _register_all(self):
        """Attempts to register stream_changed callbacks for all configured users."""
        self.bot.logger.info("Registering callbacks for all watched users..")
        users = self.backend.filter(TwitchWatchedUser,
                                    {'twitch_name': {
                                        "$exists": True
                                    }})
        if not users:
            self.bot.logger.info("No users to watch. No callbacks registered.")
        else:
            for u in users:
                self.bot.logger.info(f"Registering: {u['twitch_name']}")
                success, uuid = self.hook.subscribe_stream_changed(
                    u['twitch_id'], self._cb_stream_changed)
                if success and uuid:
                    self.uuids.append(uuid)
                    self.bot.logger.info(
                        f"{success}: registered subscription UUID: {uuid}")
                else:
                    self.bot.logger.error(
                        f"{success}: failed registering subscription: {uuid}")

    def _cb_stream_changed(self, uuid, data):
        """Callback for Twitch webhooks, fires on stream change event"""
        self.bot.logger.debug(f"Callback data for {uuid}: {data}")
        if data["type"] == "offline":
            if uuid in self.online_uuids:
                self.online_uuids.remove(
                    uuid
                )  # Stupid twitch sending the same damn webhook multiple times...
                return
            else:
                self.bot.logger.debug(
                    f"Ignoring duplicate offline callback for {uuid}")
                return
        elif data["type"] == "live":
            if uuid in self.online_uuids:
                self.bot.logger.debug(
                    f"Ignoring duplicate live callback for {uuid}")
                return
            else:
                self.online_uuids.append(uuid)
        else:
            self.bot.logger.error(
                f"Got a callback type we can't handle: {data['type']}")
            return

        if uuid not in self.uuids:
            self.bot.logger.error(
                f"Got a callback for a UUID we're not tracking: {uuid}, my UUIDs: {self.uuids}"
            )
            return

        try:
            item = self.backend.get(TwitchWatchedUser,
                                    {"twitch_id": data["user_id"]})
        except TwitchWatchedUser.DoesNotExist:
            self.bot.logger.error(
                f"Got a callback for a USER we're not tracking: {data['user_id']} -> {data['user_name']}"
            )
            return
        channel: discord.TextChannel = self.bot.get_channel(
            item['notify_channel'])

        width = 640
        height = 360
        url = data['thumbnail_url'].format(width=width, height=height)

        tu = self.twitch.get_users(data['user_id'])['data'][0]
        self.bot.logger.debug(tu)

        embed = discord.Embed(
            title=f"Now streaming {data['game_name']}",
            description=data['title'],
            color=discord.Color.green(),
        )
        embed.set_image(url=url)
        embed.set_thumbnail(url=tu["profile_image_url"])
        embed.set_author(name=item["twitch_name"],
                         url=f"https://twitch.tv/{data['user_name']}")
        embed.add_field(name="Watch live at",
                        value=f"https://twitch.tv/{data['user_name']}")
        self.bot.loop.create_task(
            channel.
            send(  # This isn't an async function, so enqueue it manually
                embed=embed))
        self.bot.logger.info(
            f"Successfully sent online notification for {data['user_id']}")

    @cog_ext.cog_subcommand(
        base="Twitchwatch",
        name="add_notification",
        description="Add a go live notification for Twitch",
        options=[twitch_name, notify_channel, notify_text],
        guild_ids=util.guilds)
    async def add_notification(self, ctx: SlashContext, twitch_name: str,
                               notify_channel: discord.TextChannel,
                               notify_text: str):
        twitch_id = self._login_to_id(twitch_name)
        try:
            self.backend.get(TwitchWatchedUser, {'twitch_name': twitch_name})
        except TwitchWatchedUser.DoesNotExist:
            pass
        except TwitchWatchedUser.MultipleDocumentsReturned:
            self.bot.logger.error(
                "Multiple users returned - database inconsistent???")
            return
        if not twitch_id:
            await ctx.send(embed=mkembed(
                'error',
                f"Unable to get the Twitch ID for the name {twitch_name}"))
            return
        await ctx.defer()  # This bit can take a minute.
        success, uuid = self.hook.subscribe_stream_changed(
            twitch_id, self._cb_stream_changed)
        if success and uuid:
            self.uuids.append(uuid)
            self.bot.logger.info(
                f"{success}: registered subscription UUID: {uuid}")
        else:
            self.bot.logger.error(
                f"{success}: failed registering subscription: {uuid}")
            await ctx.send("Bluh, couldn't register the webhook with twitch :("
                           )
            return
        item = TwitchWatchedUser({
            'twitch_name': twitch_name,
            'twitch_id': twitch_id,
            'discord_name': ctx.author.id,
            'notify_channel': notify_channel.id,
            'notify_text': notify_text,
            'uuid': str(uuid)
        })
        self.bot.logger.debug(f"DB object dump: {item.__dict__}")
        self.backend.save(item)
        await ctx.send(embed=mkembed("done",
                                     f"Notification added for {twitch_name}",
                                     channel=notify_channel.name))

    @cog_ext.cog_subcommand(
        base="Twitchwatch",
        name="del_notification",
        description="Remove a go live notification for Twitch",
        options=[twitch_name],
        guild_ids=util.guilds)
    async def del_notification(self, ctx: SlashContext, twitch_name: str):
        try:
            item = self.backend.get(TwitchWatchedUser,
                                    {'twitch_name': twitch_name})
        except TwitchWatchedUser.DoesNotExist:
            await ctx.send(embed=mkembed(
                "error", f"No notification exists for {twitch_name}"))
            return
        self.hook.unsubscribe(item['uuid'])
        self.bot.logger.info(f"Removing watch {item['uuid']}: {twitch_name}")
        self.backend.delete(item)
        if item['uuid'] in self.uuids:
            self.uuids.remove(item['uuid'])
        await ctx.send(
            embed=mkembed("done", f"Notification for {twitch_name} removed."))
示例#17
0
def create_app(configfile=None):
    app = Flask(__name__)
    AppConfig(app, configfile)  # Flask-Appconfig is not necessary, but
    # highly recommend =)
    # https://github.com/mbr/flask-appconfig
    Bootstrap(app)

    login_manager = LoginManager()
    login_manager.init_app(app)
    login_manager.login_view = 'login'

    #NoSQL Backend
    backend = FileBackend("/tmp/wakeonlan.db")
    backend.create_index(Device, fields={'id': 1}, unique=True)

    #TEST Devices
    alldevices = backend.filter(Device, {})
    if len(alldevices) == 0:
        try:
            pc1 = Device({
                "id": "001122334411",
                "name": "PC 1",
                "mac": "00:11:22:33:44:11",
                "ip": "192.168.222.111",
                'status': ''
            })
            backend.save(pc1)
            pc2 = Device({
                "id": "001122334422",
                "name": "PC 2",
                "mac": "00:11:22:33:44:22",
                "ip": "192.168.222.112",
                'status': ''
            })
            backend.save(pc2)
            pc3 = Device({
                "id": "001122334433",
                "name": "Router",
                "mac": "00:11:22:33:44:33",
                "ip": "192.168.222.1",
                'status': ''
            })
            backend.save(pc3)
            backend.commit()
        except:
            backend.revert()
            pass

    # in a real app, these should be configured through Flask-Appconfig
    app.config['SECRET_KEY'] = 'devkey'

    # app.config['RECAPTCHA_PUBLIC_KEY'] = \
    #     '6Lfol9cSAAAAADAkodaYl9wvQCwBMr3qGR_PPHcw'

    def getDeviceById(id):
        device = None
        try:
            device = backend.get(Device, {'id': id})
        except:
            pass

        return device

    def pingDeviceById(id):
        #Get Device
        device = backend.get(Device, {'id': id})

        if device:
            #Get Device's IP
            ip = device['ip']
            result = pingDeviceByIp(ip)

            #Update Status   UP/Down/''
            if result == 0:
                device['status'] = 'UP'
            else:
                device['status'] = 'DOWN'

            backend.save(device)
            return result

        return None

    def wolDeviceById(id):
        #Get Device
        device = backend.get(Device, {'id': id})

        if device:
            #WoL for Device MAC
            mac = device['mac']
            wolDeviceByMac(mac)

        return None

    @login_manager.user_loader
    def user_loader(user_id):
        """Given *user_id*, return the associated User object.
        :param unicode user_id: user_id (email) user to retrieve
        """
        user_entry = User.getById(user_id)
        if user_entry is not None:
            user = User(user_entry[0], user_entry[1])
            return user
        else:
            return None

    @app.route('/', methods=('GET', 'POST'))
    @login_required
    def index():
        form = ExampleForm()
        form.validate_on_submit()  # to get error messages to the browser
        # flash('critical message', 'critical')
        # flash('error message', 'error')
        # flash('warning message', 'warning')
        # flash('info message', 'info')
        # flash('debug message', 'debug')
        # flash('different message', 'different')
        # flash('uncategorized message')
        alldevices = None
        alldevices = backend.filter(Device, {}).sort('name')

        #app.logger.info('Devices: %s' % (len(alldevices) ) )

        return render_template('index.html', form=form, devices=alldevices)

    @app.route('/login', methods=('GET', 'POST'))
    def login():
        if request.method == 'GET':
            form = LoginForm()
            form.validate_on_submit()  # to get error messages to the browser
            return render_template('login.html', form=form)

        username = request.form['username']
        password = request.form['password']

        user_entry = User.get(username, password)
        if user_entry is None:
            flash('Username or Passord is invalid', 'error')
            return redirect(url_for('login'))

        user = User(user_entry[0], user_entry[1])
        login_user(user, remember=True)
        return redirect(request.args.get('next') or url_for('index'))

    @app.route("/logout", methods=["GET"])
    @login_required
    def logout():
        """Logout the current user."""
        user = current_user
        user.authenticated = False
        logout_user()
        return redirect(url_for('login'))

    @app.route('/addDevice', methods=('GET', 'POST'))
    @login_required
    def addDevice():
        if request.method == 'GET':
            form = AddDeviceForm()
            form.validate_on_submit()  # to get error messages to the browser
            return render_template('add_device.html', form=form)

        name = request.form['name']
        mac = request.form['mac']
        ip = request.form['ip']
        id = mac.replace(':', '')

        try:
            newDevice = Device({
                "id": id,
                "name": name,
                "mac": mac,
                "ip": ip,
                'status': ''
            })
            backend.save(newDevice)
            backend.commit()
        except:
            flash('Error creating new Device', 'error')
            pass

        return redirect(url_for('index'))

    @app.route('/editListDevice', methods=('GET', 'POST'))
    @login_required
    def editListDevice():
        alldevices = None
        alldevices = backend.filter(Device, {}).sort('name')

        return render_template('list_device.html', devices=alldevices)

    @app.route('/pingDevice/<deviceId>', methods=('GET', 'POST'))
    @login_required
    def pingDevice(deviceId):
        app.logger.info('pingDevice: %s' % (deviceId))
        device = getDeviceById(deviceId)
        result = pingDeviceById(deviceId)

        app.logger.info('pingDevice: %s' % (result))

        if result is None:
            flash('Ping - Error on device %s' % (device['name']), 'error')
        elif result == 0:
            flash('Device %s is UP' % (device['name']), 'info')
        else:
            flash('Device %s is DOWN' % (device['name']), 'error')

        return redirect(url_for('index'))

    @app.route('/wolDevice/<deviceId>', methods=('GET', 'POST'))
    @login_required
    def wolDevice(deviceId):
        app.logger.info('wolDevice: %s' % (deviceId))
        device = getDeviceById(deviceId)
        result = wolDeviceById(deviceId)

        if device:
            flash('WoL sent to %s' % (device['name']), 'info')
        else:
            flash('WoL error', 'error')

        return redirect(url_for('index'))

    @app.route('/deleteDevice/<deviceId>', methods=('GET', 'POST'))
    @login_required
    def deleteDevice(deviceId):
        app.logger.info('wolDevice: %s' % (deviceId))
        device = getDeviceById(deviceId)

        try:
            backend.delete(device)
            backend.commit()
            flash('%s Deleted' % (device['name']), 'info')
        except:
            flash('Delete error', 'error')
            pass

        return redirect(url_for('editListDevice'))

    return app
示例#18
0
class eventmanager:
    def __init__(self, trigger_id, jsonfilelist, triggerdir, datadir, real,
                 trigger_path):

        #os.system('kinit -k -t /var/keytab/desgw.keytab desgw/des/[email protected]')
        tstart = time.time()

        if real:
            self.backend = FileBackend("./realdb")
        else:
            self.backend = FileBackend("./testdb")

        try:
            self.thisevent = self.backend.get(Trigger, {'id': trigger_id})
            print 'Found this event in desgw database...'
        except Trigger.DoesNotExist:
            self.thisevent = Trigger({
                'id':
                trigger_id,
                'jsonfilelist':
                jsonfilelist,
                'triggerpath':
                triggerdir,
                'mapspath':
                datadir,
                'jobids': [
                    (0, 'jsonfile_corresponding_to_jobid.json'),
                ],
                'postprocint':
                0
            })
            print 'Database entry created!'

        self.trigger_id = trigger_id
        self.trigger_path = trigger_path

        self.backend.save(self.thisevent)
        self.backend.commit()

        with open(os.path.join(triggerdir, "strategy.yaml"), "r") as f:
            self.config = yaml.safe_load(f)
        self.filterobslist = np.array(self.config['exposure_filter'],
                                      dtype='str')
        self.strategydict = {}

        for f in np.unique(self.filterobslist):
            self.strategydict[f] = len(
                self.filterobslist[self.filterobslist == f])

        self.connection = ea.connect(DATABASE)
        self.cursor = self.connection.cursor()

        self.jsonfilelist = jsonfilelist

        print self.jsonfilelist
        if hardjson:
            self.jsonfilelist = hj

        #self.pp = subprocess.Popen('echo starting',stdout=PIPE, stderr=PIPE,shell=True)

        self.trigger_id = trigger_id
        self.datadir = datadir
        self.triggerdir = triggerdir
        self.processingdir = os.path.join(self.triggerdir, 'PROCESSING')
        if not os.path.exists(self.processingdir):
            os.makedirs(self.processingdir)

        dire = './processing/' + trigger_id + '/'
        if not os.path.exists(dire):
            os.makedirs(dire)

        with open(os.path.join(triggerdir, "strategy.yaml"), "r") as f:
            self.strategy = yaml.safe_load(f)

        with open("jobmanager.yaml", "r") as g:
            self.jmconfig = yaml.safe_load(g)


        q1 = "select expnum,nite,mjd_obs,telra,teldec,band,exptime,propid,obstype,object from exposure where " \
             "nite>20130828 and nite<20150101 and expnum<300000 and obstype='object' order by expnum"  # y1 images
        self.connection.query_and_save(q1, './processing/exposuresY1.tab')

        q2 = "select expnum,nite,mjd_obs,radeg,decdeg,band,exptime,propid,obstype,object from prod.exposure where " \
             "nite>20150901 and obstype='object' order by expnum"  # y2 and later
        self.connection.query_and_save(q2, './processing/exposuresCurrent.tab')

        os.system(
            'cat ./processing/exposuresY1.tab ./processing/exposuresCurrent.tab > ./processing/exposures.list'
        )

        #self.submit_post_processing()

        self.submit_all_jsons_for_sejobs(
        )  #preps all DES images that already exist
        tfin = time.time()
        print 'TOTAL SE JOBS TIME', tfin - tstart
        #sys.exit()
        self.monitor_images_from_mountain(
        )  #A loop that waits for images off mountain and submits for processing

    def submit_all_jsons_for_sejobs(self):
        obsStartTime = self.getDatetimeOfFirstJson(
            self.jsonfilelist[0])  # THIS IS A DATETIME OBJ
        currentTime = dt.utcnow()
        print '***** The current time is UTC', currentTime, '*****'
        delt = obsStartTime - currentTime

        timedelta = td(days=delt.days,
                       seconds=delt.seconds).total_seconds() / 3600.
        print '***** The time delta is ', timedelta, 'hours *****'
        # if timedelta > np.pi:

        sejob_timecushion = self.jmconfig["sejob_timecushion"]

        if timedelta > sejob_timecushion:
            for jsonfile in self.jsonfilelist:
                print 'json', jsonfile
                try:  #check if this json file is already in the submitted preprocessing database
                    thisjson = self.backend.get(
                        preprocessing,
                        {'jsonfilename': os.path.join(self.datadir, jsonfile)})
                    print 'Found this json in desgw database...'
                except preprocessing.DoesNotExist:  #do submission and then add to database

                    print 'cd diffimg-proc; ./SEMaker_RADEC.sh ' + os.path.join(
                        self.datadir, jsonfile)
                    os.chdir("diffimg-proc")
                    out = os.popen(
                        './SEMaker_RADEC.sh ' +
                        os.path.join(self.datadir, jsonfile)).read()
                    of = open(
                        os.path.join(
                            self.processingdir,
                            jsonfile.split('/')[-1].split('.')[0] +
                            '.SEMakerlog'), 'w')
                    of.write(out)
                    of.close()
                    #out = os.popen('ls').read()
                    os.chdir("..")
                    print out
                    if 'non-zero exit status' in out:
                        dt.sendEmailSubject(
                            self.trigger_id,
                            'Error in creating SEMaker dag for .json: ' + out)
                    else:
                        for o in out.split('\n'):
                            if 'file://' in o:
                                dagfile = o.split('/')[-1]
                                self.dagfile = os.path.join(
                                    self.processingdir,
                                    jsonfile.split('/')[-1].split('.')[0] +
                                    '_' + dagfile)
                                os.system('cp diffimg-proc/' + dagfile + ' ' +
                                          self.dagfile)
                                jobsubmitline = copy(o)
                        print self.dagfile

                    print 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; jobsub_submit_dag -G des --role=DESGW file://' + self.dagfile

                    out = os.popen(
                        'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; '
                        'jobsub_submit_dag -G des --role=DESGW file://' +
                        self.dagfile).read()
                    print out

                    of = open(
                        os.path.join(
                            self.processingdir,
                            jsonfile.split('/')[-1].split('.')[0] +
                            '.SEdagsubmitlog'), 'w')
                    of.write(out)
                    of.close()

                    if 'non-zero exit status' in out:
                        dt.sendEmailSubject(
                            self.trigger_id,
                            'Error in submitting .json for preprocessing: ' +
                            out)
                    else:
                        for o in out.split('\n'):
                            if 'Use job id' in o:
                                jobid = o.split()[3]
                        if doimmediateremove:
                            out = os.popen(
                                'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; '
                                'jobsub_rm --jobid=' + jobid +
                                ' --group=des --role=DESGW').read()
                            print out

                    thisjson = preprocessing({
                        'jsonfilename':
                        os.path.join(self.datadir, jsonfile),
                        'jobid':
                        jobid,
                        'dagfile':
                        self.dagfile,
                        'status':
                        'Submitted'
                    })

                    self.backend.save(thisjson)
                    self.backend.commit()
                    print 'saved'
                    #sys.exit()
                #raw_input()
                #runProcessingIfNotAlready(image, self.backend)

                #sys.exit()

        print 'Finished submitting minidagmaker with all json files'
        #sys.exit()
        #raw_input()

    # Loop queries for images from mountain and submits them
    # Need to add complexity that monitors filter strategy and waits for entire groups of images to be co-added
    def monitor_images_from_mountain(self):
        #NEED TO ADD COADD LOGIC USING STRATEGY FROM CONFIG

        exposure_filter = np.array(self.strategy['exposure_filter'],
                                   dtype='str')
        uniquefilts = np.unique(self.strategy['exposure_filter'])

        filterstrategy = {}
        for f in uniquefilts:
            filterstrategy[f] = len(exposure_filter[exposure_filter == f])

        print 'filter strategy dictionary  ', filterstrategy

        starttime = time.time()
        pptime = time.time()
        keepgoing = True
        index = -1
        submission_counter = 0
        maxsub = 10000
        postprocessingtime = 2000  #every half hour fire off Tim's code for post-processing
        while keepgoing:
            #os.system('kinit -k -t /var/keytab/desgw.keytab desgw/des/[email protected]')
            index += 1
            newfireds = []
            if time.time() - starttime > 50000:
                keepgoing = False
                continue

            ofile = open(os.path.join(self.triggerdir, 'latestquery.txt'), 'w')

            ofile.write(
                "--------------------------------------------------------------------------------------------------\n"
            )
            ofile.write(
                "EXPNUM\tNITE\tBAND\tEXPTIME\tRADEG\t DECDEG\tPROPID\tOBJECT\n"
            )
            ofile.write(
                "--------------------------------------------------------------------------------------------------\n"
            )

            print "--------------------------------------------------------------------------------------------------"
            print "EXPNUM\tNITE\tBAND\tEXPTIME\tRADEG\t DECDEG\tPROPID\tOBJECT"
            print "--------------------------------------------------------------------------------------------------"

            query = "SELECT expnum,nite,band,exptime,radeg,decdeg,propid,object FROM prod.exposure@desoper WHERE " \
                    "expnum > 475900 and propid=" + propid + "and obstype='object' ORDER BY expnum"  # latest

            self.cursor.execute(query)

            for s in self.cursor:
                ofile.write(
                    str(s[0]) + "\t" + str(s[1]) + "\t" + str(s[2]) + "\t" +
                    str(s[3]) + "\t" + str(s[4]) + "\t" + str(s[5]) + "\t" +
                    str(s[6]) + "\t" + str(s[7]) + '\n')
                print str(s[0]) + "\t" + str(s[1]) + "\t" + str(
                    s[2]) + "\t" + str(s[3]) + "\t" + str(s[4]) + "\t" + str(
                        s[5]) + "\t" + str(s[6]) + "\t" + str(s[7])

                if not 'DESGW' in str(s[7]): continue
                #print 'exptime',float(s[3])
                if not float(s[3]) > 29.:
                    continue  #exposure must be longer than 30 seconds

                expnum = str(s[0])
                nite = str(s[1])
                band = str(s[2])
                exptime = str(s[3])

                #FIRST CHECK HERE THAT THE EXPOSURE NUMBER ISNT ALREADY IN THE DATABASE
                try:
                    exposure = self.backend.get(exposures, {'expnum': expnum})
                    print 'Found this exposure in desgw database...'
                    # print exposure.attributes
                    # if expnum == 506432:
                    #     sys.exit()
                    # self.backend.delete(exposure)
                    # self.backend.commit()
                    # exposure = self.backend.get(exposures, {'expnum': expnum})

                except exposures.DoesNotExist:  # add to database
                    #runProcessingIfNotAlready(image,self.backend)

                    print './diffimg-proc/getTiling.sh ' + expnum

                    res = os.popen('./diffimg-proc/getTiling.sh ' +
                                   expnum).readlines()
                    print res
                    #sys.exit()
                    field, tiling = res[-2], res[-1]
                    #print 'field_tiling',field_tiling
                    hexnite = field.strip() + '_' + tiling.strip() + '_' + str(
                        nite)
                    #print hexnite
                    #sys.exit()
                    #print 'hexnite',hexnite
                    print 'Creating exposure in database...', hexnite
                    #raw_input()
                    if '--' in hexnite:
                        print 'found bad example'
                        #raw_input()

                    exposure = exposures({
                        'expnum': expnum,
                        'nite': nite,
                        'field': field,
                        'tiling': tiling,
                        'hexnite': hexnite,
                        'band': band,
                        'jobid': np.nan,
                        'exptime': exptime,
                        'status': 'Awaiting additional exposures',
                        'triggerid': self.trigger_id,
                        'object': str(s[7])
                    })

                    self.backend.save(exposure)
                    self.backend.commit()

                hexnite = exposure.hexnite
                print 'hexnite', hexnite
                if '--' in hexnite:
                    print exposure.attributes
                    #raw_input()
                #raw_input()
                #sys.exit()
                try:
                    hex = self.backend.get(hexes, {'hexnite': hexnite})
                    #self.backend.delete(hex)
                    #self.backend.commit()
                    #hex = self.backend.get(hexes, {'hexnite': hexnite})

                    #print 'Found this hex in desgw database...'
                except hexes.DoesNotExist:
                    hex = hexes({
                        'hexnite':
                        hexnite,
                        'strategy':
                        self.strategy['exposure_filter'],
                        'num_target_g':
                        len(exposure_filter[exposure_filter == 'g']),
                        'num_target_r':
                        len(exposure_filter[exposure_filter == 'r']),
                        'num_target_i':
                        len(exposure_filter[exposure_filter == 'i']),
                        'num_target_z':
                        len(exposure_filter[exposure_filter == 'z']),
                        'observed_g': [],
                        'observed_r': [],
                        'observed_i': [],
                        'observed_z': [],
                        'exposures': [],
                        'status':
                        'Awaiting additional exposures',
                        'dagfile':
                        None,
                    })

                    self.backend.save(hex)
                    self.backend.commit()
                    print hex.attributes
                    print 'created new hex'
                    #raw_input()

                if hex.status == 'Submitted for processing':
                    print 'This hex has already been submitted for processing'
                    continue

                # if '--' in hexnite:
                #     print hex.attributes
                #     raw_input()
                # if hex.status == 'Submitted for processing':
                #     print 'Hex ',hexnite,' band',band,'exposure',expnum,'has already been submitted for processing'
                #     #raw_input()
                #     continue

                if band == 'g':
                    if not expnum in hex.observed_g:
                        hex.observed_g.append(expnum)
                        hex.exposures.append(expnum)
                if band == 'r':
                    if not expnum in hex.observed_r:
                        hex.observed_r.append(expnum)
                        hex.exposures.append(expnum)
                if band == 'i':
                    if not expnum in hex.observed_i:
                        hex.observed_i.append(expnum)
                        hex.exposures.append(expnum)
                if band == 'z':
                    if not expnum in hex.observed_z:
                        hex.observed_z.append(expnum)
                        hex.exposures.append(expnum)

                self.backend.save(hex)
                self.backend.commit()

                print hex.attributes

                didwork = False
                if len(hex.observed_g) == hex.num_target_g:
                    if len(hex.observed_r) == hex.num_target_r:
                        if len(hex.observed_i) == hex.num_target_i:
                            if len(hex.observed_z) == hex.num_target_z:
                                print 'All exposures in strategy satisfied! '
                                #raw_input()
                                submissionPassed = True

                                for target, exps in zip([
                                        hex.num_target_g, hex.num_target_r,
                                        hex.num_target_i, hex.num_target_z
                                ], [
                                        hex.observed_g, hex.observed_r,
                                        hex.observed_i, hex.observed_z
                                ]):

                                    if target == 0: continue
                                    exposurestring = ''
                                    logstring = ''
                                    for ex in exps:
                                        exposurestring += ex + ' '
                                        logstring += ex + '_'

                                    print 'cd diffimg-proc; source DAGMaker.sh ' + exposurestring
                                    os.chdir("diffimg-proc")
                                    #out = os.popen('ls').read()
                                    out = os.popen('./DAGMaker.sh ' +
                                                   exposurestring).read()

                                    os.chdir("..")
                                    print out
                                    f = open(
                                        os.path.join(
                                            self.processingdir, logstring +
                                            hexnite + '.dagmakerlog'), 'w')
                                    f.write(out)
                                    f.close()
                                    tt = time.time()
                                    if not 'To submit this DAG do' in out:
                                        dt.sendEmailSubject(
                                            self.trigger_id,
                                            'Error in creating dag for desgw hex: '
                                            + out)
                                        submissionPassed = False
                                    else:
                                        for o in out.split('\n'):
                                            if 'file://' in o:
                                                dagfile = o.split('/')[-1]
                                                self.dagfile = os.path.join(
                                                    self.processingdir,
                                                    logstring + 'job.dag')
                                                os.system('cp diffimg-proc/' +
                                                          dagfile + ' ' +
                                                          self.dagfile)
                                        print self.dagfile

                                    print 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; jobsub_submit_dag -G des --role=DESGW file://' + self.dagfile

                                    out = os.popen(
                                        'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; '
                                        'jobsub_submit_dag -G des --role=DESGW file://'
                                        + self.dagfile).read()
                                    print out
                                    f = open(
                                        os.path.join(
                                            self.processingdir, logstring +
                                            hexnite + '.dagsumbitlog'), 'w')
                                    f.write(out)
                                    f.close()

                                    if 'non-zero exit status' in out:
                                        dt.sendEmailSubject(
                                            self.trigger_id,
                                            'Error in submitting hex dag for processing: '
                                            + out)
                                        submissionPassed = False
                                    else:
                                        if doimmediateremove:
                                            for o in out.split('\n'):
                                                if 'Use job id' in o:
                                                    jobid = o.split()[3]
                                            out = os.popen(
                                                'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; '
                                                'jobsub_rm --jobid=' + jobid +
                                                ' --group=des --role=DESGW'
                                            ).read()
                                            print out
                                    ttt = time.time()
                                    #print 'TOTAL JOBSUB FOR A SINGLE DESGW IMAGE',ttt-tt

                                    #sys.exit()
                                #raw_input()
                                if submissionPassed:

                                    hex.status = 'Submitted for processing'
                                    hex.dagfile = self.dagfile
                                    self.backend.save(hex)
                                    self.backend.commit()

                                    for expn in hex.exposures:
                                        print expn, 'updated in database to Submitted For Processing'
                                        exp = self.backend.get(
                                            exposures, {'expnum': expn})
                                        exp.status = 'Submitted for processing'
                                        self.backend.save(exp)
                                        self.backend.commit()
                                    didwork = True
                                print 'didwork', didwork
                                print 'dagfile', self.dagfile
                                #sys.exit()
                                #raw_input()

                if not didwork:
                    print 'Could not find all images in strategy for this hex... Added hex', hexnite,' to database ' \
                        'and will continue waiting...'
                    #raw_input()

                if time.time(
                ) - pptime > postprocessingtime:  #happens every 30 minutes or so...
                    pptime = time.time()
                    print '***** Firing post processing script *****'
                    #sys.exit()

                    #ppout = self.pp.communicate()

                    # if self.thisevent.postprocint > 0:
                    #     print ppout
                    #     f = open(os.path.join(self.processingdir,'postproc_attempt'+str(int(self.thisevent.postprocint))+'.log'),'w')
                    #     f.write(ppout)
                    #     f.close()
                    self.thisevent.postprocint += 1
                    self.backend.save(self.thisevent)
                    self.backend.commit()

                    self.submit_post_processing()
                #sys.exit()
                #print 'Waiting 10s to check from mountain...'
                #sys.exit()
                time.sleep(10)  #looping over checking the mountain top

            # cfiles = os.listdir(os.path.join(trigger_path,trigger_id,'candidates'))
            # for f in cfiles:
            #     if f.split('.')[-1] == 'npz':
            #         cp.makeNewPage(f)

    def submit_post_processing(self):
        #firedlist = open('./processing/firedlist.txt', 'r')
        #fl = firedlist.readlines()
        #firedlist.close()
        #print fl
        #fl = ['475914','475915','475916','482859','482860','482861']

        fl = self.backend.filter(exposures, {'triggerid': self.trigger_id})

        expnumlist = ''
        for f in fl:
            expnumlist += f.expnum.strip() + ' '
        print expnumlist
        #expnumlist = '475905  475909  475913  475917  475921  475925  475929  475933  475937  475941  475945  475949  475953  475957  475961'
        print 'FIRING TIMs CODE'
        try:
            os.mkdir(
                os.path.join(self.trigger_path, self.trigger_id, 'candidates'))
        except:
            print 'Candidates directory exists,', os.path.join(
                self.trigger_path, self.trigger_id, 'candidates')
            pass
        #sys.exit()
        gwpostdir = os.environ['GWPOST_DIR']
        print 'source ' + os.path.join(gwpostdir, 'diffimg_setup.sh') + '; \
                        python '                                +os.path.join(gwpostdir,'postproc.py')\
                         +' --expnums ' + expnumlist\
                         + ' --outputdir ' + os.path.join(self.trigger_path,self.trigger_id,'candidates')\
                         + ' --triggerid '+self.trigger_id+' --season 70 --ups True'
        # os.system('source ' + os.path.join(gwpostdir, 'diffimg_setup.sh') + '; \
        #                  python '+os.path.join(gwpostdir,'postproc.py')\
        #                  +' --expnums ' + expnumlist\
        #                  + ' --outputdir ' + os.path.join(trigger_path,trigger_id,'candidates')\
        #                  + ' --triggerid '+trigger_id+' --season 46 --ups True' )

        #pid = os.spawnlp(os.P_WAIT, "source", os.path.join(gwpostdir, 'diffimg_setup.sh'))
        # args = ['yes | ssh -t [email protected] "source '+ os.path.join(gwpostdir, 'mi_setup.sh')+'; '+
        #                 'yes | python '+os.path.join(gwpostdir,'postproc.py')\
        #                  +' --expnums ' + expnumlist\
        #                  + ' --outputdir ' + os.path.join(self.trigger_path,self.trigger_id,'candidates')\
        #                  + ' --triggerid '+self.trigger_id+' --season 46 --ups True"'
        #         ]

        args = ['yes | python ' + os.path.join(gwpostdir, 'postproc.py') \
                + ' --expnums ' + expnumlist \
                + ' --outputdir ' + os.path.join(self.trigger_path, self.trigger_id, 'candidates') \
                + ' --triggerid ' + self.trigger_id + ' --season 70 --ups True']
        print args

        #sys.exit()
        f = open(
            os.path.join(
                self.processingdir, 'postproc_attempt' +
                str(int(self.thisevent.postprocint))) + '.log', 'w')
        self.pp = subprocess.Popen(args, stdout=f, stderr=f, shell=True)
        #p = subprocess.Popen(args, stdin=None, stdout=None, stderr=None, close_fds=True, shell=True)
        #print 'going'*1000
        #print self.pp.communicate()
        #print 'gone'*1000
        #p = subprocess.Popen(args,stdin=None, stdout=None, stderr=None, close_fds=True,shell=True)
        #p.communicate()
        #sys.exit()
        return

    def getDatetimeOfFirstJson(self, jsonstring):
        #'M263920-30-UTC-2016-12-1-0:44:00.json'
        js = jsonstring.split('UTC')[1]  #-2015-12-27-3:2:00.json
        #date_object = dt.strptime(js, '-%Y-%m-%d-%H_%M_%S.json')
        date_object = dt.strptime(js, '-%Y-%m-%d-%H:%M:%S.json')
        print '***** Datetime of first observation UTC', date_object, '*****'
        return date_object

    def sortHexes(self):
        pass
示例#19
0
class Memos(object):
    def __init__(self, bot):
        self.bot = bot
        self.cfg = PluginConfig(self)
        self.db = FileBackend(self.cfg.get('main_db'))
        mtt = MessageRetargeter(bot)
        self.msg = mtt.msg

    @command
    def note(self, target, mask, args):
        """
        Leaves a note for <name>, containing <text>. The next time I see <name> speak, I will deliver any notes they
        have waiting.

        Notes taken in private are delivered in private, and vice versa.

        Usage:
            %%note <name> <text>...
        """
        if mask.is_channel:
            pubmsg = True
        else:
            pubmsg = False

        if args['<name>'] == self.bot.nick:
            self.msg(mask, target, "You can't leave notes for me, silly :)")
            return

        newmemo = Memo(
                {
                    'sender': target.nick.lower(),
                    'recipient': args['<name>'].lower(),
                    'public': pubmsg,
                    'timestamp': ctime(),
                    'text': ' '.join(args['<text>'])
                }
        )
        newmemo.save(self.db)
        self.db.commit()

        confirmation_msg = "Your note for %s has been queued for delivery." % args['<name>']
        self.msg(mask, target, confirmation_msg)

    @irc3.event(irc3.rfc.PRIVMSG)  # Triggered on every message anywhere.
    def check_notes(self, target, mask, data, event):
        del data, event
        try:
            msgs = self.db.filter(Memo, {'recipient': mask.nick.lower()})
            msgword = "message" if len(msgs) < 2 else "messages"  # Fix: I have 1 messages for you!
        except Memo.DoesNotExist:
            return

        if len(msgs) == 0:
            return

        # Avoid telling people they have messages in public, if any of them are set public=False
        if contains_private_messages(msgs):
            self.msg(mask, mask.nick, "I have %s %s for you, %s!" % (len(msgs), msgword, mask.nick))
        else:
            self.msg(mask, target, "I have %s %s for you, %s!" % (len(msgs), msgword, mask.nick))

        # Actually deliver the memos
        for msg in msgs:
            # This looks ridiculous but we don't care about the timezone really, only the relative time
            # from the local system clock.
            now = datetime.datetime.strptime(ctime(), "%a %b %d %H:%M:%S %Y")
            reltime = humanize.naturaltime(now - datetime.datetime.strptime(msg.timestamp, "%a %b %d %H:%M:%S %Y"))
            message_text = "%s // %s // %s" % (msg.sender, reltime, msg.text)
            if msg.public:
                self.msg(mask, target, message_text)
                self.db.delete(msg)
            else:
                self.bot.privmsg(mask.nick, message_text)
                self.db.delete(msg)
        self.db.commit()
示例#20
0
print("Backend Saved and committed:", os.path.realpath(os.curdir))

# print(backend.get(Movie,{'pk':1}))
# or...
the_godfather = backend.get(Movie, {'name': 'The Godfather'})
print("the_godfather", the_godfather)

the_godfather.cast = {
    'Don Vito Corleone': marlon_brando,
    'Michael Corleone': al_pacino
}

#Documents stored within other objects will be automatically converted to database references.

marlon_brando.performances = [the_godfather]
al_pacino.performances = [the_godfather]

marlon_brando.save(backend)
al_pacino.save(backend)
the_godfather.save(backend)

backend.create_index(Actor, 'performances')
#Will create an index on the 'performances' field, for fast querying

godfather_cast = backend.filter(Actor, {'movies': the_godfather})
print("godfather_cast", godfather_cast)
#Will return 'Al Pacino' and 'Marlon Brando'

print(backend.get(Movie, {'name': 'The Godfather'}))
示例#21
0
    pass


the_godfather = Movie({'name': 'The Godfather', 'year': 1972, 'pk': 1L})

marlon_brando = Actor({'name': 'Marlon Brando', 'pk': 1L})
al_pacino = Actor({'name': 'Al Pacino', 'pk': 2L})

from blitzdb import FileBackend

backend = FileBackend("/tmp/movies")

backend.register(Movie, {'collection': 'movies'})
backend.register(Actor, {'collection': 'actors'})

backend.filter(Movie, {}).delete()
backend.filter(Actor, {}).delete()

the_godfather.save(backend)
marlon_brando.save(backend)
al_pacino.save(backend)

the_godfather = backend.get(Movie, {'pk': 1L})
#or...
the_godfather = backend.get(Movie, {'name': 'The Godfather'})

print the_godfather

movies_from_1972 = backend.filter(Movie, {'year': 1972})

the_godfather.cast = {