예제 #1
0
def load_output(job):

    detids = np.genfromtxt(job.detection_file, dtype=None, encoding='ascii')
    detidstr = str(tuple(detids.tolist()))
    mydir = Path(os.path.dirname(__file__))
    sql = mydir / 'loadphot.sql'
    with open(sql, 'r') as f:
        g = f.read().replace('FILENAME', f"'{job.output_file}'")

    sqlo = job.output_file + '.sql'
    with open(sqlo, 'w') as f:
        f.write(g)

    # do the load
    cmd = f"psql -h {get_secret('hpss_dbhost')} -p {get_secret('hpss_dbport')} " \
          f"-d {get_secret('hpss_dbname')} -U {get_secret('hpss_dbusername')} " \
          f"-f {sqlo}"

    execute(cmd.split())

    query = "update detections set alert_ready = 't' where id in %s" % (
        detidstr, )

    if len(detids) > 0:
        db.DBSession().execute(query)
        db.DBSession().commit()
예제 #2
0
def submit_thumbs():

    thumbids = db.DBSession().query(db.models.Thumbnail.id).filter(
        db.models.Thumbnail.source_id != None,
        db.models.Thumbnail.public_url == None
    )
    thumbids = [t[0] for t in thumbids]


    ndt = datetime.datetime.utcnow()
    nightdate = f'{ndt.year}{ndt.month:02d}{ndt.day:02d}'
    curdir = os.getcwd()

    scriptname = Path(f'/global/cscratch1/sd/dgold/zuds/'
                      f'nightly/{nightdate}/{ndt}.thumb.sh'.replace(' ', '_'))
    scriptname.parent.mkdir(parents=True, exist_ok=True)

    os.chdir(scriptname.parent)

    thumbinname = f'{scriptname}'.replace('.sh', '.in')
    with open(thumbinname, 'w') as f:
        f.write('\n'.join([str(t) for t in thumbids]) + '\n')

    jobscript = f"""#!/bin/bash
#SBATCH --image=registry.services.nersc.gov/dgold/ztf:latest
#SBATCH --volume="/global/homes/d/dgold/lensgrinder/pipeline/:/pipeline;/global/homes/d/dgold:/home/desi;/global/homes/d/dgold/skyportal:/skyportal"
#SBATCH -N 1
#SBATCH -C haswell
#SBATCH -q realtime
#SBATCH --exclusive
#SBATCH -J zuds
#SBATCH -t 00:60:00
#SBATCH -L SCRATCH
#SBATCH -A {get_secret('nersc_account')}

HDF5_USE_FILE_LOCKING=FALSE srun -n 64 -c1 --cpu_bind=cores shifter python $HOME/lensgrinder/scripts/dothumb.py {thumbinname}

"""

    with open(scriptname, 'w') as f:
        f.write(jobscript)

    cmd = f'sbatch {scriptname}'
    process = subprocess.Popen(
        cmd.split(),
        stdout=subprocess.PIPE,
        stderr=subprocess.PIPE
    )

    stdout, stderr = process.communicate()
    print(stdout)

    if process.returncode != 0:
        raise RuntimeError(
            f'Non-zero exit code from sbatch, output was '
            f'"{str(stdout)}", "{str(stdout)}".'
        )

    os.chdir(curdir)
    _ = stdout.strip().split()[-1].decode('ascii')
예제 #3
0
 def get_num_https_ip_port(self):
     try:
         sn = db.DBSession()
         num = sn.session.query(db.AgentServer).filter(db.AgentServer.HTTPS==True).count()
         return num
     except Exception as e:
         print('get_num_https_ip_port:',e)
         pass
예제 #4
0
 def random_https_ip_port():
     '''从数据库中随机获取代理服务器IP端口'''
     try:
         sn = db.DBSession()
         http_ip_port_list = sn.session.query(db.AgentServer).filter(db.AgentServer.HTTPS==True).order_by().all()
         item = random.choice(http_ip_port_list)
         return item.IP_PORT
     except Exception as e:
         print('random_https_ip_port:',e)
         pass
예제 #5
0
 def clear_AgentServerSql(self):
     '''清除数据库中的代理服务器'''
     sn = db.DBSession()
     try:
         res = sn.session.query(db.AgentServer).all()
         for item in res:
             sn.session.delete(item)
         sn.session.commit()
         return True
     except:
         sn.session.rollback()
         return False
예제 #6
0
 def remove(ip_port):
     '''移除代理服务器'''
     sn = db.DBSession()
     try:
         item = sn.session.query(db.AgentServer).filter(db.AgentServer.IP_PORT == ip_port).one()
         sn.session.delete(item)
         sn.session.commit()
         sn.session.close()
         return True
     except:
         sn.session.rollback()
         return False
예제 #7
0
    def start_requests(self):
        session = db.DBSession()
        weibo_stars_info_list = session.query(
            db.WeiboStarsInfo).filter(db.WeiboStarsInfo.weibo_id != 0).all()
        # Todo: Testing
        for weibo_stars_info in weibo_stars_info_list[:200]:
            # for weibo_stars_info in weibo_stars_info_list:
            weibo_id = weibo_stars_info.weibo_id
            url = self.weibo_user_home_url.format(weibo_id=weibo_id)
            yield scrapy.Request(url=url, callback=self.parse_home)

        session.close()
예제 #8
0
 def get_num_ip_port(url):
     try:
         sn = db.DBSession()
         if url[:5] == 'https':
             return sn.session.query(db.AgentServer).filter(db.AgentServer.HTTPS==True).count()
         elif url[:4] == 'http':
             return sn.session.query(db.AgentServer).filter(db.AgentServer.HTTP==True).count()
         else:
             return None
     except Exception as e:
         print('get_num_ip_port:',e)
         pass
예제 #9
0
 def update_AgentServerSource(self):
     sn = db.DBSession()
     try:
         res = sn.session.query(db.AgentServerSource).all()
         for item in res:
             sn.session.delete(item)
         for item in self.AgentsCrawlerList:
             ass = db.AgentServerSource(SOURCES=item.name,URL=item.url)
             sn.session.add(ass)
         sn.session.commit()
     except Exception as e:
         print('error:',e)
         sn.session.rollback()
예제 #10
0
    def insert_bank_crawl_result(self, item):
        session = db.DBSession()

        new_bank_crawl_result = db.BankCrawlResult()
        new_bank_crawl_result.crawl_time = item['crawl_time']
        new_bank_crawl_result.bank_name = item['bank_name']
        new_bank_crawl_result.source_url = item['source_url']
        new_bank_crawl_result.response = item['response']

        session.add(new_bank_crawl_result)
        session.commit()
        session.close()
        return
예제 #11
0
    def insert_fund_info(self, item):
        session = db.DBSession()

        new_fund_info = db.FundInfo()
        new_fund_info.fund_name = item['fund_name']
        new_fund_info.fund_code = item['fund_code']
        new_fund_info.time = item['time']
        new_fund_info.IOPV = item['IOPV']
        new_fund_info.LJJZ = item['LJJZ']

        session.add(new_fund_info)
        session.commit()
        session.close()
        return
def main():
    ''''''
    words = []
    wordRaws = 0
    except_list = [
        'we', 'you', 'she', 'he', 'am', 'is', 'are', 'was', 'were', 'your',
        'may', 'can', 'and', 'or'
    ]
    filepath = 'E:\Data\englishexam2'
    filelist = os.listdir(filepath)
    for file in filelist:
        filename = os.path.join(filepath, file)
        words = words + wordsCollector.words_read(filename)
    words_filted = wordsCollector.words_filter(words, except_list)
    print(len(words), len(words_filted))
    words_times = wordsCollector.words_count(words_filted)
    if not os.path.isfile('words.db'):
        db.db_init()
    else:
        pass
    words = db.Words
    dbcxn = db.DBSession()
    #words_insert(words_times, dbcxn, words)   # 将分析出来的单词插入数据库
    for word, fre in words_times:
        phonogram, exp = wordsCollector.words_trans(word)
        #print(phonogram, exp)
        up_value = [word, fre, exp, phonogram]
        words_update(up_value, dbcxn, words)  # 更新查找到的单词解释
        wordRaws += 1
        if wordRaws == 1000:
            #print(wordRaws)
            dbcxn.commit()
            wordRaws = 0
        else:
            continue
    row = words_Query(dbcxn, words)
    count = 1
    wordsToCsv(['单词', '出现次数', '音标', '单词解释'])
    while True:
        try:
            #print(row)
            wordsToCsv(row)
        except:
            break
        count += 1
    dbcxn.close()
def bank_response_to_info():
    logger.info('bank_response_to_info()')

    session = db.DBSession()

    last_bank_crawl_result_id = 1
    last_bank_crawl_result = session.query(db.BankInfo).order_by(db.BankInfo.bank_crawl_result_id.desc()).first()
    if last_bank_crawl_result is not None:
        last_bank_crawl_result_id = last_bank_crawl_result.bank_crawl_result_id

    deal_bank_crawl_result_list = session.query(db.BankCrawlResult)\
        .filter(db.BankCrawlResult.id > last_bank_crawl_result_id).all()

    for deal_bank_crawl_result in deal_bank_crawl_result_list:
        bank_crawl_response = deal_bank_crawl_result.response
        bank_crawl_response_json = json.loads(bank_crawl_response)
        table1_item_list = bank_crawl_response_json['Table1']

        rmb_gold_customer_sell = None
        rmb_gold_update_beijing_time = None
        for table1_item in table1_item_list:
            if table1_item['ProdName'] == '人民币账户黄金':
                rmb_gold_customer_sell = table1_item['CustomerSell']
                rmb_gold_update_beijing_time = table1_item['UpdateTime']
                break
        if rmb_gold_customer_sell is None or rmb_gold_update_beijing_time is None:
            logger.error('Can not find 人民币账户黄金 from bank_crawl_result.response where bank_crawl_result.id = '
                         + deal_bank_crawl_result.id)
            continue

        new_bank_info = db.BankInfo()
        new_bank_info.bank_crawl_result_id = deal_bank_crawl_result.id
        new_bank_info.bank_name = deal_bank_crawl_result.bank_name
        new_bank_info.rmb_gold_customer_sell = decimal.Decimal(rmb_gold_customer_sell)
        new_bank_info.rmb_gold_update_beijing_time = datetime.strptime(rmb_gold_update_beijing_time, '%Y-%m-%d %H:%M:%S')

        session.add(new_bank_info)

    session.commit()
    session.close()
    return
예제 #14
0
 def save_to_sql(self,datalist):
     '''(IP_PORT,ADDR,IS_ANONYMOUS,SURE_TIME)'''
     sn = db.DBSession()
     item_dir = {}
     for item in  datalist:
         try:
             AgentServerSourceitem = item_dir[item[6]]
         except:
             AgentServerSourceitem = sn.session.query(db.AgentServerSource).filter(
                 db.AgentServerSource.SOURCES == item[6]).first()
             item_dir[item[4]] = AgentServerSourceitem
         d = db.AgentServer(IP_PORT=item[0],
                            ADDR=item[1],
                            IS_ANONYMOUS=item[2],
                            SURE_TIME=item[3],
                            HTTP=item[4],
                            HTTPS=item[5],
                            SOURCES=AgentServerSourceitem.SOURCES)
         sn.session.add(d)
     sn.session.commit()
     sn.close()
예제 #15
0

    bad = []

    while True:

        # check if the cookies need to be reset
        tnow = time.time()
        if tnow - tstart > 86400.:  # assume the cookies expire after 1 day
            tstart = time.time()
            icookie = ipac_authenticate()

        idownload_q = db.DBSession().query(db.ScienceImage).outerjoin(
            db.ZTFFileCopy, db.ScienceImage.id == db.ZTFFileCopy.product_id
        ).filter(
            db.ZTFFileCopy.product_id == None,
            db.ScienceImage.field.in_(ZUDS_FIELDS)
        ).order_by(
            db.ScienceImage.id.desc()
        ).options(db.sa.orm.joinedload(db.ScienceImage.mask_image))

        to_download = idownload_q.all()

        if len(to_download) == 0:
            # download for other fields
            time.sleep(30.)  # sleep for 3 minutes
            continue

        for sci in to_download:
            for t in ['sci', 'mask']:

                if sci.id in bad:
예제 #16
0
def submit_alert_job():
    # get the what needs alerts
    detids = db.DBSession().query(db.Detection.id).outerjoin(
        db.Alert, db.Alert.detection_id == db.Detection.id).filter(
            db.Detection.alert_ready == True, db.Alert.id == None).all()

    detids = [d[0] for d in detids]

    curdir = os.getcwd()

    ndt = datetime.datetime.utcnow()
    nightdate = f'{ndt.year}{ndt.month:02d}{ndt.day:02d}'

    if len(detids) == 0:
        raise RuntimeError('No detections to run make alerts for, abandoning '
                           'forced photometry and alerting ')

    scriptname = Path(
        f'/global/cscratch1/sd/dgold/zuds/nightly/{nightdate}/{ndt}.alert.sh'.
        replace(' ', '_'))
    scriptname.parent.mkdir(parents=True, exist_ok=True)

    os.chdir(scriptname.parent)

    detinname = f'{scriptname}'.replace('.sh', '.in')
    with open(detinname, 'w') as f:
        f.write('\n'.join([str(i) for i in detids]) + '\n')

    jobscript = f"""#!/bin/bash
#SBATCH --image=registry.services.nersc.gov/dgold/ztf:latest
#SBATCH --volume="/global/homes/d/dgold/lensgrinder/pipeline/:/pipeline;/global/homes/d/dgold:/home/desi;/global/homes/d/dgold/skyportal:/skyportal"
#SBATCH -N 1
#SBATCH -C haswell
#SBATCH -q realtime
#SBATCH --exclusive
#SBATCH -J zuds
#SBATCH -t 00:60:00
#SBATCH -L SCRATCH
#SBATCH -A {get_secret('nersc_account')}
#SBATCH -o {str(scriptname).replace('.sh', '.out')}

HDF5_USE_FILE_LOCKING=FALSE srun -n 64 -c1 --cpu_bind=cores shifter python $HOME/lensgrinder/scripts/doalert.py {detinname}

    """

    with open(scriptname, 'w') as f:
        f.write(jobscript)

    cmd = f'sbatch {scriptname}'
    process = subprocess.Popen(cmd.split(),
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE)

    stdout, stderr = process.communicate()
    print(stdout)

    if process.returncode != 0:
        raise RuntimeError(f'Non-zero exit code from sbatch, output was '
                           f'"{str(stdout)}", "{str(stdout)}".')

    os.chdir(curdir)
    jobid = stdout.strip().split()[-1].decode('ascii')

    return jobid
예제 #17
0
        db.DBSession().execute(f'UPDATE thumbnails SET modified=now(), photometry_id=photometry.id, '
                               f'source_id=photometry.source_id from photometry join sources on '
                               f'photometry.source_id = sources.id join objectswithflux on  '
                               f'sources.id = objectswithflux.source_id where objectswithflux.id in {tuple(bestids)} '
                               f'and thumbnails.detection_id = objectswithflux.id')


        db.DBSession().execute(
            f'''update detections set triggers_alert = 't'
            from sources s join objectswithflux o on s.id=o.source_id
            where detections.id = o.id'''
        )

        xmatch([s.id for s in sources])

        # need to commit so that sources will be there for forced photometry
        # jobs running via slurm
        db.DBSession().commit()

        if os.getenv('NERSC_HOST') == 'cori':
            submit_thumbs()
    else:
        print('nothing to do')
        db.DBSession().commit()



if __name__ == '__main__':
    db.DBSession().get_bind().echo=True
    associate()
예제 #18
0
def xmatch(source_ids, insert_neighbors=True):

    sids = '(' + ','.join([f"'{id}'" for id in source_ids]) + ')'

    q = '''
    create temp table rbacc as (select o.source_id, sum(rb.rb_score)  as sumrb from
    detections d join objectswithflux o on d.id = o.id join realbogus
    rb on rb.detection_id = d.id group by o.source_id);

    create index on rbacc (source_id);
    '''

    db.DBSession().execute(q)

    q = '''
    update sources set score = rbacc.sumrb from rbacc where 
    sources.id = rbacc.source_id and sources.id in %s;
    ''' % (sids,)

    db.DBSession().execute(q)

    for chunk in ['north', 'south']:

        tablename = f'dr8_{chunk}_join_neighbors'

        # update dr8_join
        insert = f'''
        insert into {tablename} (select s.id as sid, d.*, rank() over (partition by
        s.id  order by q3c_dist(s.ra, s.dec, d."RA", d."DEC") asc), 
        q3c_dist(s.ra, s.dec, d."RA", d."DEC") * 3600 sep
        from sources s join dr8_%s d 
        on q3c_join(s.ra, s.dec, d."RA", d."DEC", 30./3600.) where s.id in 
        %s)
        ''' % (chunk, sids)

        # this should take about 40 minutes

        if insert_neighbors:
            db.DBSession().execute(insert)

        q = '''
        update sources set score = -1, 
        altdata = ('{"rejected": "matched to GAIA dr8"}')::jsonb 
        from %s d where d.sid = sources.id and  d.sep < 1.5  and d."PARALLAX" > 0
        and sources.id in %s;
        ''' % (tablename, sids)

        db.DBSession().execute(q)

        q = '''
        update sources set score = -1, 
        altdata = ('{"rejected": "matched to dr8 masked source ID"}')::jsonb 
        from %s d where d.sid = sources.id 
        and d.sep < 2 and 
        (d."FRACMASKED_G" > 0.2 OR d."FRACMASKED_R" > 0.2 OR d."FRACMASKED_Z" > 0.2) 
        and sources.id in %s;
        ''' % (tablename, sids)

        db.DBSession().execute(q)

        q = '''
        update sources set score = -1,
        altdata = ('{"rejected": "matched to hits  ID ' || h.id::text || '"}')::jsonb
        from %s d join hits h on
        q3c_join(d."RA", d."DEC", h.ra, h.dec, 0.0002777 * 1.5)
        where d.sid = sources.id and sources.id in %s and d.sep < 1.5;
        ''' % (tablename, sids,)

        db.DBSession().execute(q)

        q = '''
    
        update sources set score = -1,
        altdata = ('{"rejected": "matched to MQ  ID ' || m.id::text || '"}')::jsonb
        from %s d join milliquas_v6 m
        on q3c_join(d."RA", d."DEC", m.ra, m.dec, 0.0002777 * 1.5)
        where d.sid = sources.id and sources.id in %s and d.sep < 1.5;
        ''' % (tablename, sids,)

        db.DBSession().execute(q)

        q = '''
        update sources set score = -1,
        altdata = ('{"rejected": "right on top of (< 1 arcsec) DR8 PSF "}')::jsonb
        from %s d where d.sid = sources.id and
        (d."TYPE" = 'PSF') and
        sep < 1 and sources.id in %s;
        '''  % (tablename, sids,)

        db.DBSession().execute(q)

        q = '''
        update sources set neighbor_info = to_json(d) 
        from %s d where d.sid = sources.id and d.rank = 1 
        and sources.id in %s 
        ''' % (tablename, sids,)

        db.DBSession().execute(q)


    for chunk in ['north', 'south']:

        tablename = f'dr8_{chunk}_join_neighbors'

        q = '''
        update sources set redshift = (case when d.z_spec = -99 then d.z_phot_median else d.z_spec end)
        from %s d where d.rank = 1 and d.sid = sources.id and sources.id in %s;
        ''' % (tablename, sids,)

        db.DBSession().execute(q)

    q = '''
    update sources set score = -1,
    altdata = ('{"rejected": "rejected for having z_dr8 < 0.0001"}')::jsonb
    where sources.redshift <= 0.0001 or sources.redshift is null and sources.id in %s;'''  % (sids,)

    db.DBSession().execute(q)


    q = '''

    create temp table detection_times as (select d.id, 
    s.obsjd - 2400000.5 as mjd from detections d join objectswithflux o on 
    d.id = o.id join singleepochsubtractions se on 
    se.id = o.image_id join scienceimages s on s.id = se.target_image_id
    where o.source_id is not null) ;

    insert into detection_times (id, mjd)  (select d.id, 
    to_char(c.binright, 'J')::double precision - 2400000.5 
    from detections d join objectswithflux o on d.id = o.id 
    join multiepochsubtractions m on m.id = o.image_id join 
    sciencecoadds c on c.id = m.target_image_id where 
    o.source_id is not null);

    create index on detection_times (id, mjd);
    create index on detection_times (mjd, id);
    
    create temp table detection_order as (select s.id as source_id, d.id, 
    rank() over (partition by s.id order by dt.mjd desc) 
    from sources s join objectswithflux o on s.id = o.source_id 
    join detections d on d.id = o.id join 
    detection_times dt on d.id = dt.id );

    create index on detection_order (source_id, id, rank);

    update sources set last_mjd = dt.mjd from detection_order 
    dord join detection_times dt on dord.id = dt.id where 
    sources.id = dord.source_id and dord.rank = 1;    
    '''

    db.DBSession().execute(q)
예제 #19
0
def associate(debug=False):

    db.DBSession().execute('SET TRANSACTION ISOLATION LEVEL REPEATABLE READ;')

    # make source / detection association temp table

    db.DBSession().execute('''
    create temp table detsource as (select d.id as detection_id, s.id as 
    source_id from detections d join sources s on q3c_join(s.ra, s.dec,
    d.ra, d.dec, 0.0002777 * 2)) ;

    create index on detsource (detection_id, source_id);
    create index on detsource (source_id, detection_id);
    ''')

    r = db.DBSession().execute(
    '''update objectswithflux set source_id = s.id, 
    modified = now() from  detections d join objectswithflux o 
    on d.id = o.id join detsource ds on ds.detection_id = d.id
    join sources s on ds.source_id = s.id where  o.source_id is NULL  
    and o.created_at > now() - interval '7 days'
    AND objectswithflux.id = d.id returning d.id, s.id'''
    )


    r = list(r)
    triggers_alert = [row[0] for row in r]

    print(f'associated {len(r)} detections with existing sources')

    db.DBSession().execute('''
    update sources set ra=dummy.ra, dec=dummy.dec, modified=now()
    from (select g.id, g.ra, g.dec from
    (select s.id, d.ra, d.dec, rank() over
    (partition by o.source_id order by o.flux / o.fluxerr desc)
    from sources s join objectswithflux o on o.source_id = s.id
    join detections d on o.id = d.id ) g where rank = 1)
    dummy where sources.id = dummy.id;
    ''')


    q = f'''select d.id, d.ra, d.dec, o.flux / o.fluxerr as snr
    from detections d join objectswithflux o on d.id = o.id join realbogus
    rb on rb.detection_id = d.id where o.source_id is NULL
    and rb.rb_score > {ASSOC_RB_MIN} order by o.id asc'''

    if debug:
        q += ' LIMIT 10000'

    df = pd.DataFrame(
        list(db.DBSession().execute(q)),
        columns=['id', 'ra', 'dec', 'snr']
    )

    df = df.set_index('id')

    coord = SkyCoord(df['ra'], df['dec'], unit='deg')
    idx1, idx2, sep, _ = search_around_sky(coord, coord, ASSOC_RADIUS_ARCSEC * u.arcsec)
    dropdupes = idx1 != idx2
    idx1 = idx1[dropdupes]
    idx2 = idx2[dropdupes]
    sep = sep[dropdupes]

    # cluster the detections into sources using DBSCAN
    clustering = DBSCAN(
        eps=ASSOC_RADIUS_ARCSEC,
        min_samples=2,
        metric='precomputed'
    )

    # construct the sparse pairwise distance matrix
    nobj = len(df)
    darcsec = sep.to('arcsec').value
    distmat = csr_matrix((darcsec, (idx1, idx2)), shape=(nobj, nobj))
    clustering.fit(distmat)

    df['source'] = clustering.labels_
    df = df[df['source'] != -1]

    with db.DBSession().no_autoflush:
        default_group = db.DBSession().query(
            db.models.Group
        ).get(DEFAULT_GROUP)

        default_instrument = db.DBSession().query(
            db.models.Instrument
        ).get(DEFAULT_INSTRUMENT)

    bestdets = source_bestdet_from_solution(df)

    # cache d1 and d2
    # get thumbnail pics
    from sqlalchemy.orm import joinedload
    d1 = db.DBSession().query(db.Detection).filter(db.Detection.id.in_(
        [int(v) for v in bestdets.values()]
    )).all()

    detcache = {d.id: d for d in d1}
    sourceid_map = {}

    curval = db.DBSession().execute("select nextval('namenum')").first()[0]
    stups = []
    gtups = []
    sources = []

    if len(bestdets) > 0:
        for sourceid in tqdm(bestdets):
            bestdet = detcache[bestdets[sourceid]]

            name = publish.get_next_name(num=curval)
            curval += 1
            source = db.models.Source(
                id=name,
                groups=[default_group],
                ra=bestdet.ra,
                dec=bestdet.dec
            )

            sourceid_map[sourceid] = source
            sources.append(source)

            stups.append(f"('{name}', {bestdet.ra}, {bestdet.dec}, now(), now(), 'f', 'f', 'f', 0, 0)")
            gtups.append(f"('{name}', 1, now(), now())")

        db.DBSession().execute('INSERT INTO sources (id, ra, dec, created_at, modified, transient, varstar, is_roid, "offset", score) VALUES '
                               f'{",".join(stups)}')
        db.DBSession().execute('INSERT INTO group_sources (source_id, group_id, created_at, modified) '
                               f'VALUES {",".join(gtups)}')

        pid = [row[0] for row in db.DBSession().execute(
            'INSERT INTO photometry (source_id, instrument_id, created_at, modified) '
            f'VALUES {",".join(gtups)} RETURNING ID'
        )]

        stups = [f"(now(), now(), {p}, '{source.sdss_url}', 'sdss')" for p, source in zip(
            pid, sources
        )]

        dtups = [f"(now(), now(), {p}, '{source.desi_dr8_url}', 'dr8')" for p, source in zip(
            pid, sources
        )]

        db.DBSession().execute(
            'insert into thumbnails (created_at, modified, photometry_id, public_url, type) '
            f"VALUES  {','.join(stups)}")
        db.DBSession().execute(
            'insert into thumbnails (created_at, modified, photometry_id, public_url, type) '
            f"VALUES {','.join(dtups)}")


        db.DBSession().execute(f"select setval('namenum', {curval})")
        db.DBSession().flush()

        query = []
        for sourceid, group in df.groupby('source'):
            realid = sourceid_map[sourceid].id
            dets = group.index.tolist()
            query.append(
                f'''
                update objectswithflux set source_id = '{realid}', modified=now()
                where objectswithflux.id in {tuple(dets)}
                '''
            )

        db.DBSession().execute(
            ';'.join(query)
        )

        bestids =  [int(v) for v in bestdets.values()]

        db.DBSession().execute(f'UPDATE thumbnails SET modified=now(), photometry_id=photometry.id, '
                               f'source_id=photometry.source_id from photometry join sources on '
                               f'photometry.source_id = sources.id join objectswithflux on  '
                               f'sources.id = objectswithflux.source_id where objectswithflux.id in {tuple(bestids)} '
                               f'and thumbnails.detection_id = objectswithflux.id')


        db.DBSession().execute(
            f'''update detections set triggers_alert = 't'
            from sources s join objectswithflux o on s.id=o.source_id
            where detections.id = o.id'''
        )

        xmatch([s.id for s in sources])

        # need to commit so that sources will be there for forced photometry
        # jobs running via slurm
        db.DBSession().commit()

        if os.getenv('NERSC_HOST') == 'cori':
            submit_thumbs()
    else:
        print('nothing to do')
        db.DBSession().commit()
예제 #20
0
def submit_forcephot_chain():
    # get the what needs alerts
    detids = db.DBSession().query(db.Detection.id).outerjoin(
        db.Alert, db.Alert.detection_id == db.Detection.id).filter(
            db.Detection.triggers_alert == True,
            db.sa.or_(db.Detection.alert_ready == None,
                      db.Detection.alert_ready == False), db.Alert.id == None)

    detids = [d[0] for d in detids]

    ndt = datetime.datetime.utcnow()
    nightdate = f'{ndt.year}{ndt.month:02d}{ndt.day:02d}'

    curdir = os.getcwd()

    scriptname = Path(f'/global/cscratch1/sd/dgold/zuds/'
                      f'nightly/{nightdate}/{ndt}.phot.sh'.replace(' ', '_'))
    scriptname.parent.mkdir(parents=True, exist_ok=True)

    os.chdir(scriptname.parent)

    image_names = db.DBSession().query(
        db.SingleEpochSubtraction.basename, db.SingleEpochSubtraction.id).join(
            db.ReferenceImage, db.SingleEpochSubtraction.reference_image_id
            == db.ReferenceImage.id).filter(
                db.ReferenceImage.version == 'zuds5', ).all()

    image_names = sorted(image_names,
                         key=lambda s: s[0].split('ztf_')[1].split('_')[0],
                         reverse=True)
    image_names = image_names[:FORCEPHOT_IMAGE_LIMIT]

    if len(detids) > 0:
        # only use the latest images
        random.shuffle(image_names)

    imginname = f'{scriptname}'.replace('.sh', '.in')
    photoutname = imginname.replace('.in', '.output')

    outnames = []
    with open(imginname, 'w') as f:
        for name, idnum in image_names:
            # name = name[0]
            g = name.split('_sciimg')[0].split('_')
            q = g[-1]
            c = g[-3]
            b = g[-4]
            field = g[-5]
            outnames.append(
                f'/global/cfs/cdirs/m937/www/data/scratch/{field}/{c}/'
                f'{q}/{b}/{name} {idnum}')

        f.write('\n'.join(outnames) + '\n')

    detinname = f'{scriptname}'.replace('.sh', '.det.in')

    with open(detinname, 'w') as f:
        f.write('\n'.join([str(i) for i in detids]) + '\n')

        jobscript = f"""#!/bin/bash
#SBATCH --image=registry.services.nersc.gov/dgold/ztf:latest
#SBATCH --volume="/global/homes/d/dgold/lensgrinder/pipeline/:/pipeline;/global/homes/d/dgold:/home/desi;/global/homes/d/dgold/skyportal:/skyportal"
#SBATCH -N 13
#SBATCH -C haswell
#SBATCH -q realtime
#SBATCH --exclusive
#SBATCH -J forcephot
#SBATCH -t 00:60:00
#SBATCH -L SCRATCH
#SBATCH -A {get_secret('nersc_account')}
#SBATCH -o {str(scriptname).replace('.sh', '.out')}

HDF5_USE_FILE_LOCKING=FALSE srun -n 832 -c1 --cpu_bind=cores shifter python $HOME/lensgrinder/scripts/dophot.py {imginname} {photoutname}

    """

    with open(scriptname, 'w') as f:
        f.write(jobscript)

    cmd = f'sbatch {scriptname}'
    process = subprocess.Popen(cmd.split(),
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE)

    stdout, stderr = process.communicate()
    print(stdout)

    if process.returncode != 0:
        raise RuntimeError(f'Non-zero exit code from sbatch, output was '
                           f'"{str(stdout)}", "{str(stdout)}".')

    jobid = stdout.strip().split()[-1].decode('ascii')

    os.chdir(curdir)
    return jobid, detinname, photoutname
예제 #21
0
    print(stdout)

    if process.returncode != 0:
        raise RuntimeError(f'Non-zero exit code from sbatch, output was '
                           f'"{str(stdout)}", "{str(stdout)}".')

    jobid = stdout.strip().split()[-1].decode('ascii')

    os.chdir(curdir)
    return jobid, detinname, photoutname


if __name__ == '__main__':
    while True:

        db.DBSession().rollback()

        # look for active jobs
        active_jobs = db.DBSession().query(db.ForcePhotJob).filter(
            db.sa.or_(
                db.ForcePhotJob.status == 'processing',
                db.ForcePhotJob.status == 'ready_for_loading',
            ))

        # get the slurm jobs and their statuses

        try:
            job_statuses = get_job_statuses()
        except RuntimeError as e:
            exc_info = sys.exc_info()
            traceback.print_exception(*exc_info)
예제 #22
0
 def get_useful_AgentsServer(self):
     sn = db.DBSession()
     res = sn.session.query(db.AgentServer).all()
     return (item.IP_PORT for item in res)
예제 #23
0
    print(stdout)

    if process.returncode != 0:
        raise RuntimeError(f'Non-zero exit code from sbatch, output was '
                           f'"{str(stdout)}", "{str(stdout)}".')

    os.chdir(curdir)
    jobid = stdout.strip().split()[-1].decode('ascii')

    return jobid


if __name__ == '__main__':
    while True:

        db.DBSession().rollback()

        # look for failed jobs and mark them
        currently_processing = db.DBSession().query(
            db.Job).filter(db.Job.status == 'processing')

        alert_processing = db.DBSession().query(
            db.AlertJob).filter(db.AlertJob.status == 'processing')

        # get the slurm jobs and their statuses

        try:
            job_statuses = get_job_statuses()
        except RuntimeError as e:
            exc_info = sys.exc_info()
            traceback.print_exception(*exc_info)