Esempio n. 1
0
def agn_b_scores(name, username, password, colors=False):
    k = Kowalski(username=username, password=password, verbose=False)
    q = {
        "query_type": "find",
        "query": {
            "catalog": 'ZTF_alerts',
            "filter": {
                "objectId": name
            },
            "projection": {
                "_id": 0,
                "cutoutScience": 0,
                "cutoutTemplate": 0,
                "cutoutDifference": 0
            },
        }
    }
    r = k.query(query=q)
    alerts = r['result_data']['query_result']
    ra, dec = alerts[0]['candidate']['ra'], alerts[0]['candidate']['dec']

    cc = SkyCoord(ra, dec, unit=(u.deg, u.deg))
    table = Irsa.query_region(coordinates=cc,
                              catalog="allwise_p3as_psd",
                              spatial="Cone",
                              radius=2 * u.arcsec)

    # AGN WISE
    if len(table['w1mpro']) == 0:
        agn = False
        temp_points = 6
    else:
        w1, w1_err, w2, w2_err, w3, w3_err = table['w1mpro'], table[
            'w1sigmpro'], table['w2mpro'], table['w2sigmpro'], table[
                'w3mpro'], table['w3sigmpro']
        if w1 - w2 > 0.8 + 0.1 and w2_err < 0.5 and w1_err < 0.5:
            agn = True
            temp_points = -2
        elif w2 - w3 > 2.5 + 0.1 and w2_err < 0.5 and w3_err < 0.5:
            agn = True
            temp_points = -2
        elif w1 - w2 > 0.8 and w2_err < 0.5 and w1_err < 0.5:
            agn = True
            temp_points = 0
        elif w2 - w3 > 2.5 and w2_err < 0.5 and w3_err < 0.5:
            agn = True
            temp_points = 0
        elif w1 - w2 > 0.8 - 0.2 and w2_err < 0.5 and w1_err < 0.5:
            agn = False
            temp_points = 2
        elif w2 - w3 > 2.5 - 0.3 and w2_err < 0.5 and w3_err < 0.5:
            agn = False
            temp_points = 2
        elif w1 - w2 > 0.8 - 0.5 and w2_err < 0.5 and w1_err < 0.5:
            agn = False
            temp_points = 4
        elif w2 - w3 > 2.5 - 0.5 and w2_err < 0.5 and w3_err < 0.5:
            agn = False
            temp_points = 4

        else:
            agn = False
            temp_points = 6
    # low b
    if np.abs(cc.galactic.b.value) < 15:
        b_temp_points = -10
    else:
        b_temp_points = 0

    if colors:
        return temp_points, agn, [w1 - w2, w2 - w3]
    else:
        return temp_points, b_temp_points
Esempio n. 2
0
if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='Check neighboring alerts')
    parser.add_argument('names',
                        metavar='ZTF objectIds',
                        type=str,
                        nargs='+',
                        help='ZTF names of the candidates to check')
    args = parser.parse_args()

    # Read the secrets
    secrets = ascii.read('./secrets.csv', format='csv')
    username = secrets['kowalski_user'][0]
    password = secrets['kowalski_pwd'][0]

    kow = Kowalski(username=username, password=password)

    if len(args.names) > 0:
        print("Checking alerts...")
        ind_check_alerts = []
        problematic = []
        for objid in args.names:
            try:
                index_check = alert_check_complete(kow, objid)
                ind_check_alerts.append(index_check)
            except IndexError:
                ind_check_alerts = []
                print(f"Problematic: {objid}")
                problematic.append(objid)

        ind_check_alerts = np.array(ind_check_alerts)
Esempio n. 3
0
batch_size = opts.batch_size
Ncatalog = opts.Ncatalog
algorithm = opts.algorithm

catalogDir = os.path.join(outputDir, 'catalog', algorithm)

slurmDir = os.path.join(outputDir, 'slurm')
if not os.path.isdir(slurmDir):
    os.makedirs(slurmDir)

logsDir = os.path.join(slurmDir, 'logs')
if not os.path.isdir(logsDir):
    os.makedirs(logsDir)

if opts.doQuadrantScale:
    kow = Kowalski(username=opts.user, password=opts.pwd)

if opts.lightcurve_source == "Kowalski":
    if opts.source_type == "quadrant":
        fields, ccds, quadrants = np.arange(1, 880), np.arange(1,
                                                               17), np.arange(
                                                                   1, 5)
        #ccds, quadrants = [1], [1]

        fields1 = [683, 853, 487, 718, 372, 842, 359, 778, 699, 296]
        fields2 = [841, 852, 682, 717, 488, 423, 424, 563, 562, 297, 700, 777]
        fields3 = [851, 848, 797, 761, 721, 508, 352, 355, 364, 379]
        fields4 = [1866, 1834, 1835, 1804, 1734, 1655, 1565]

        fields_complete = fields1 + fields2 + fields3 + fields4
        fields = np.arange(1600, 1700)
Esempio n. 4
0
 def test_authenticate(self, username, password):
     # if authentication fails, exception will be raised
     Kowalski(username=username, password=password)
Esempio n. 5
0
    def test_query(self, username, password, benchmark=False):
        with Kowalski(username=username, password=password) as k:

            assert k.check_connection()

            # base query:
            # qu = {"query_type": "general_search",
            #       "query": "db['ZTF_alerts'].find_one({}, {'_id': 1})",
            #       "kwargs": {}}
            qu = {
                "query_type": "find_one",
                "query": {
                    "catalog": collection,
                    "query": {},
                },
                "kwargs": {}
            }
            timeout = 1  # seconds

            # query: enqueue_only, save=True, save=False
            qu['kwargs']['enqueue_only'] = True
            resp = k.query(query=qu, timeout=timeout)
            # print(resp)
            assert 'query_id' in resp
            assert resp['status'] == 'enqueued'

            # fetch enqueued/saved query
            time.sleep(timeout)
            qid = resp['query_id']
            result = k.get_query(query_id=qid, part='result')
            # print(result)
            assert 'result' in result
            assert 'query_result' in result['result']

            # delete saved query
            result = k.delete_query(query_id=qid)
            assert 'message' in result
            assert result['message'] == 'success'

            # little benchmark:
            if benchmark:
                # qu = {"query_type": "general_search",
                #       "query": "db['ZTF_alerts'].aggregate([{'$match': {'candidate.rb': {'$gt': 0.98}}}, {'$project': {'_id': 1}}, {'$sample': {'size': 1}}])",
                #       "kwargs": {}}
                qu = {
                    "query_type": "aggregate",
                    "query": {
                        "catalog":
                        'ZTF_alerts',
                        "pipeline": [{
                            '$match': {
                                'candidate.rb': {
                                    '$gt': 0.98
                                }
                            }
                        }, {
                            '$project': {
                                '_id': 1
                            }
                        }, {
                            '$sample': {
                                'size': 1
                            }
                        }],
                    },
                    "kwargs": {
                        "save": False
                    }
                }
                timeout = 5  # seconds
                qu['kwargs'] = dict()
                qu['kwargs']['save'] = False
                times = []
                for i in range(5):
                    tic = time.time()
                    result = k.query(query=qu, timeout=timeout)
                    toc = time.time()
                    assert 'result_data' in result
                    assert 'query_result' in result['result_data']
                    times.append(toc - tic)

                print(times)
Esempio n. 6
0
def query_kowalski(username,
                   password,
                   ra_center,
                   dec_center,
                   radius,
                   jd_trigger,
                   min_days,
                   max_days,
                   slices,
                   ndethist_min,
                   within_days,
                   after_trigger=True):
    '''Query kowalski and apply the selection criteria'''

    k = Kowalski(username=username, password=password, verbose=False)
    # Initialize a set for the results
    set_objectId_all = set([])
    slices = slices + 1

    for slice_lim, i in zip(
            np.linspace(0, len(ra_center), slices)[:-1],
            np.arange(len(np.linspace(0, len(ra_center), slices)[:-1]))):
        try:
            ra_center_slice = ra_center[int(slice_lim):int(
                np.linspace(0, len(ra_center), slices)[:-1][i + 1])]
            dec_center_slice = dec_center[int(slice_lim):int(
                np.linspace(0, len(dec_center), slices)[:-1][i + 1])]
        except IndexError:
            ra_center_slice = ra_center[int(slice_lim):]
            dec_center_slice = dec_center[int(slice_lim):]
        coords_arr = []
        for ra, dec in zip(ra_center_slice, dec_center_slice):
            try:
                # Remove points too far south for ZTF.
                # Say, keep only Dec>-40 deg to be conservative
                if dec < -40.:
                    continue
                coords = SkyCoord(ra=float(ra) * u.deg, dec=float(dec) * u.deg)
                coords_arr.append((coords.ra.deg, coords.dec.deg))
            except ValueError:
                print("Problems with the galaxy coordinates?")
                continue

        # Correct the minimum number of detections
        ndethist_min_corrected = int(ndethist_min - 1)

        # Correct the jd_trigger if the user specifies to query
        # also before the trigger
        if after_trigger is False:
            jd_trigger = 0
        try:
            print(
                f"slice: {int(slice_lim)}:{int(np.linspace(0,len(ra_center),slices)[:-1][i+1])}"
            )
        except:
            print(f"slice: {int(slice_lim)}:{int(len(ra_center))}")
        q = {
            "query_type": "cone_search",
            "object_coordinates": {
                "radec": f"{coords_arr}",
                "cone_search_radius": f"{radius}",
                "cone_search_unit": "arcmin"
            },
            "catalogs": {
                "ZTF_alerts": {
                    "filter": {
                        "candidate.jd": {
                            '$gt': jd_trigger
                        },
                        "candidate.drb": {
                            '$gt': 0.5
                        },
                        "candidate.ndethist": {
                            '$gt': ndethist_min_corrected
                        },
                        "candidate.jdstarthist": {
                            '$gt': jd_trigger,
                            '$lt': jd_trigger + within_days
                        }
                    },
                    "projection": {
                        "objectId": 1,
                        "candidate.rcid": 1,
                        "candidate.ra": 1,
                        "candidate.dec": 1,
                        "candidate.jd": 1,
                        "candidate.ndethist": 1,
                        "candidate.jdstarthist": 1,
                        "candidate.jdendhist": 1,
                        "candidate.jdendhist": 1,
                        "candidate.magpsf": 1,
                        "candidate.sigmapsf": 1,
                        "candidate.fid": 1,
                        "candidate.programid": 1,
                        "candidate.isdiffpos": 1,
                        "candidate.ndethist": 1,
                        "candidate.ssdistnr": 1,
                        "candidate.rb": 1,
                        "candidate.drb": 1,
                        "candidate.distpsnr1": 1,
                        "candidate.sgscore1": 1,
                        "candidate.srmag1": 1,
                        "candidate.distpsnr2": 1,
                        "candidate.sgscore2": 1,
                        "candidate.srmag2": 1,
                        "candidate.distpsnr3": 1,
                        "candidate.sgscore3": 1,
                        "candidate.srmag3": 1
                    }
                }
            },
            "kwargs": {
                "hint": "gw01"
            }
        }

        # Perform the query
        r = k.query(query=q)
        print('Search completed for this slice.')

        objectId_list = []
        with_neg_sub = []
        old = []
        out_of_time_window = []
        stellar_list = []

        try:
            if r['result_data']['ZTF_alerts'] == []:
                continue
            keys_list = list(r['result_data']['ZTF_alerts'].keys())
        except AttributeError:
            print("Error in the keys list?? Check 'r' ")
            # Try one more time
            print("Trying to query again the same slice")
            try:
                r = k.query(query=q)
                keys_list = list(r['result_data']['ZTF_alerts'].keys())
            except AttributeError:
                print("The query failed again, skipping slice..")
                continue

        for key in keys_list:
            all_info = r['result_data']['ZTF_alerts'][key]

            for info in all_info:
                if info['objectId'] in old:
                    continue
                if info['objectId'] in stellar_list:
                    continue
                if np.abs(info['candidate']['ssdistnr']) < 10:
                    continue
                if info['candidate']['isdiffpos'] in ['f', 0]:
                    with_neg_sub.append(info['objectId'])
                if (info['candidate']['jdendhist'] -
                        info['candidate']['jdstarthist']) < min_days:
                    continue
                if (info['candidate']['jdendhist'] -
                        info['candidate']['jdstarthist']) > max_days:
                    old.append(info['objectId'])
                if (info['candidate']['jdstarthist'] -
                        jd_trigger) > within_days:
                    old.append(info['objectId'])
                # REMOVE!  Only for O3a paper
                # if (info['candidate']['jdendhist'] -
                # info['candidate']['jdstarthist']) >= 72./24. and
                # info['candidate']['ndethist'] <= 2. :
                #     out_of_time_window.append(info['objectId'])
                if after_trigger is True:
                    if (info['candidate']['jdendhist'] -
                            jd_trigger) > max_days:
                        out_of_time_window.append(info['objectId'])
                else:
                    if (info['candidate']['jdendhist'] -
                            info['candidate']['jdstarthist']) > max_days:
                        out_of_time_window.append(info['objectId'])
                try:
                    if (np.abs(info['candidate']['distpsnr1']) < 1.5
                            and info['candidate']['sgscore1'] > 0.50):
                        stellar_list.append(info['objectId'])
                except (KeyError, ValueError):
                    pass
                try:
                    if (np.abs(info['candidate']['distpsnr1']) < 15.
                            and info['candidate']['srmag1'] < 15.
                            and info['candidate']['srmag1'] > 0.
                            and info['candidate']['sgscore1'] >= 0.5):
                        continue
                except (KeyError, ValueError):
                    pass
                try:
                    if (np.abs(info['candidate']['distpsnr2']) < 15.
                            and info['candidate']['srmag2'] < 15.
                            and info['candidate']['srmag2'] > 0.
                            and info['candidate']['sgscore2'] >= 0.5):
                        continue
                except (KeyError, ValueError):
                    pass
                try:
                    if (np.abs(info['candidate']['distpsnr3']) < 15.
                            and info['candidate']['srmag3'] < 15.
                            and info['candidate']['srmag3'] > 0.
                            and info['candidate']['sgscore3'] >= 0.5):
                        continue
                except (KeyError, ValueError):
                    pass

                objectId_list.append(info['objectId'])

        set_objectId = set(objectId_list)

        # Remove those objects with negative subtraction
        for n in set(with_neg_sub):
            try:
                set_objectId.remove(n)
            except (ValueError, KeyError):
                pass

        # Remove stellar objects
        for n in set(stellar_list):
            try:
                set_objectId.remove(n)
            except (ValueError, KeyError):
                pass

        # Remove those objects considered old
        for n in set(old):
            try:
                set_objectId.remove(n)
            except (ValueError, KeyError):
                pass

        # Remove those objects whole alerts go bejond jd_trigger+max_days
        for n in set(out_of_time_window):
            try:
                set_objectId.remove(n)
            except (ValueError, KeyError):
                pass
        print(set_objectId)

        set_objectId_all = set_objectId_all | set_objectId
        print("Cumulative:", set_objectId_all)

    return set_objectId_all
Esempio n. 7
0
from penquins import Kowalski
from time import time
import numpy as np

if __name__ == '__main__':
    k = Kowalski(username='******', password='******', verbose=False)

    batch_size = 100
    num_batches = 100
    # batch_size = 10
    # num_batches = 100

    times = []

    for nb in range(num_batches):
        qu = {
            "query_type":
            "general_search",
            "query":
            "db['ZTF_sources_20190412'].find({}, " +
            "{'_id': 1, 'data.programid': 1, 'data.hjd': 1, " +
            f"'data.mag': 1, 'data.magerr': 1}}).skip({nb*batch_size}).limit({batch_size})"
        }

        # print(qu)
        tic = time()
        r = k.query(query=qu)
        toc = time()
        times.append(toc - tic)
        print(
            f'Fetching batch {nb+1}/{num_batches} with {batch_size} sources/LCs took: {toc-tic:.3f} seconds'
    absmagWD = gmag + 5 * (np.log10(np.abs(parallax)) - 2)

baseoutputDir = opts.outputDir
modelPaths = opts.modelPath.split(",")

if opts.doPlots:
    kow = []
    nquery = 10
    cnt = 0
    while cnt < nquery:
        try:
            TIMEOUT = 60
            protocol, host, port = "https", "gloria.caltech.edu", 443
            kow = Kowalski(username=opts.user,
                           password=opts.pwd,
                           timeout=TIMEOUT,
                           protocol=protocol,
                           host=host,
                           port=port)
            break
        except:
            time.sleep(5)
        cnt = cnt + 1
    if cnt == nquery:
        raise Exception('Kowalski connection failed...')

catalogPaths = []
for modelPath in modelPaths:
    folders = glob.glob(os.path.join(modelPath, '*_*')) + glob.glob(
        os.path.join(modelPath, '*.*'))
    for folder in folders:
        if opts.doField:
Esempio n. 9
0
def query_kowalski(name, DEFAULT_AUTH_kowalski):
    '''
    Please see 
    https://kowalski.caltech.edu/docs/python_client
    of description
    '''
    # generate directory to store data
    cwd = os.getcwd()
    targetdir = cwd + '/' + name + '/'
    try:
        os.stat(targetdir)
    except:
        os.mkdir(targetdir)

    try:
        os.stat(targetdir + 'lightcurves/')
    except:
        os.mkdir(targetdir + 'lightcurves/')

    # kowalski query
    k = Kowalski(username=DEFAULT_AUTH_kowalski[0],
                 password=DEFAULT_AUTH_kowalski[1],
                 verbose=False)

    qu = {
        "query_type": "general_search",
        "query": "db['ZTF_alerts'].find({'objectId': {'$eq': '%s'}})" % name
    }
    r = k.query(query=qu)

    if 'result_data' in r.keys():
        rdata = r['result_data']
        rrdata = rdata['query_result']
        n = len(rrdata)
        jds = np.zeros(n)
        fids = np.zeros(n)
        jdstarts = np.zeros(n)
        jdends = np.zeros(n)
        fieldids = np.zeros(n)
        magpsfs = np.zeros(n)
        sigmapsfs = np.zeros(n)
        diffimfile = [' ' * 57 for x in range(n)]

        for i in range(n):
            rrr = rrdata[i]
            # yyao: please see rrr['candidate'].keys() for what infomation you can get
            # I only saved jd, filterid, fieldid, magpsf, sigmapsf, jdstartref, jdendref, and diffimfilename in this demo...
            jds[i] = rrr['candidate']['jd']
            fids[i] = rrr['candidate']['fid']
            fieldids[i] = rrr['candidate']['field']
            magpsfs[i] = rrr['candidate']['magpsf']
            sigmapsfs[i] = rrr['candidate']['sigmapsf']

            # yyao: sometimes these infomation is not in the keys (reference image not made?)
            if 'jdstartref' in rrr['candidate'].keys():
                jdstarts[i] = rrr['candidate']['jdstartref']
            if 'jdendref' in rrr['candidate'].keys():
                jdends[i] = rrr['candidate']['jdendref']
            if 'pdiffimfilename' in rrr['candidate'].keys():
                diffimfile[i] = rrr['candidate']['pdiffimfilename']
        tb = Table([
            jds, fids, fieldids, magpsfs, sigmapsfs, jdstarts, jdends,
            diffimfile
        ],
                   names=[
                       'jd', 'fid', 'fieldid', 'magpsf', 'sigmapsf',
                       'jdstartref', 'jdendref', 'diffimfilename'
                   ])
        tb.write(targetdir + 'lightcurves/' + '/kowalski_lc_' + name + '.csv')

    else:
        print('query is not succesful for %s' % name)
Esempio n. 10
0
def query_kowalski(username, password, clu, args):
    '''Query kowalski and apply the selection criteria'''

    k = Kowalski(username=username, password=password, verbose=False)
    #Initialize a set for the results
    set_objectId_all = set([])

    for slice_lim,i in zip(np.linspace(0,len(clu),args.slices)[:-1], np.arange(len(np.linspace(0,len(clu),args.slices)[:-1]))):
        try:
            t = clu[int(slice_lim):int(np.linspace(0,len(clu),args.slices)[:-1][i+1])]
        except IndexError:
            t = clu[int(slice_lim):]
        coords_arr = []
        galaxy_names_arr = []
        for galaxy,ra, dec in zip(t["name"],t["ra"], t["dec"]):
            try:
                coords = SkyCoord(ra=ra*u.deg, dec=dec*u.deg)
                coords_arr.append((coords.ra.deg,coords.dec.deg))
            except ValueError:
                print("Problems with the galaxy coordinates?")
                pdb.set_trace()
                continue
            galaxy_names_arr.append(galaxy)
        try: 
            print(f"slice: {int(slice_lim)}:{int(np.linspace(0,len(clu),args.slices)[:-1][i+1])}" )
        except:
            print(f"slice: {int(slice_lim)}:{int(len(clu))}" )
        q = {"query_type": "cone_search",
        "object_coordinates": {
             "radec": f"{coords_arr}", 
             "cone_search_radius": f"{args.radius}",
             "cone_search_unit": "arcmin"
         },
         "catalogs": {
             "ZTF_alerts": {
                 "filter": {
		 "candidate.ndethist": {'$gt': 1},
                 "candidate.rb": {'$gt': 0.2}
		 },
                 "projection": {
                     "objectId": 1,
                     "candidate.rcid": 1,
                     "candidate.ra": 1,
                     "candidate.dec": 1,
                     "candidate.jd": 1,
                     "candidate.ndethist": 1,
                     "candidate.jdstarthist": 1,
                     "candidate.jdendhist": 1,
                     "candidate.jdendhist": 1,
                     "candidate.magpsf": 1,
                     "candidate.sigmapsf": 1,
                     "candidate.fid": 1,
                     "candidate.programid": 1,
                     "candidate.isdiffpos": 1,
                     "candidate.ndethist": 1,
                     "candidate.ssdistnr": 1,
                     "candidate.rb": 1,
                     "candidate.drb": 1,
                     "candidate.distpsnr1": 1,   
                     "candidate.sgscore1": 1,
                     "candidate.srmag1": 1,
                     "candidate.distpsnr2": 1,   
                     "candidate.sgscore2": 1,
                     "candidate.srmag2": 1,
                     "candidate.distpsnr3": 1,   
                     "candidate.sgscore3": 1,
                     "candidate.srmag3": 1
                 }
             }
         }
         }

        #Perform the query
        r = k.query(query=q)

        print('Search completed for this slice.')
#        #Dump the results in a json file
#        with open(f'results_clu25Mpc_1week_{i+1}.json', 'w') as j:
#            json.dump(r, j)

        #Identify 'candid' for all relevant candidates
        objectId_list = []
        with_neg_sub = []
        old = []
        stellar_list = []
        try:
            keys_list = list(r['result_data']['ZTF_alerts'].keys())
        except:
            print("Error in the keys list?? Check 'r' ")
            pdb.set_trace()
        for key in keys_list:
            all_info = r['result_data']['ZTF_alerts'][key]
            for info in all_info:
#                #Stop at a certain candidId for debugging
#                if info['objectId'] == 'ZTF19aanfkyc':
#                    pdb.set_trace()
                if info['objectId'] in old:
                    continue
                if info['objectId'] in stellar_list:
                    continue
                try:
                    if info['candidate']['drb'] < 0.5:
                        continue
                except:
                    do = 'do nothing.'
                if np.abs(info['candidate']['ssdistnr']) < 10:
                    continue
                if info['candidate']['isdiffpos'] in ['f',0]:
                    with_neg_sub.append(info['objectId'])
                if (info['candidate']['jdendhist'] - info['candidate']['jdstarthist']) < args.min_days:
                    continue
                if (info['candidate']['jdendhist'] - info['candidate']['jdstarthist']) > args.max_days:
                    old.append(info['objectId'])
                try:
                    if (np.abs(info['candidate']['distpsnr1']) < 1. and info['candidate']['sgscore1'] > 0.0):
                        stellar_list.append(info['objectId'])
                except:
                    do = 'do nothing.'
                try:
                    if (np.abs(info['candidate']['distpsnr1']) < 15. and info['candidate']['srmag1'] < 15. and info['candidate']['srmag1'] > 0. and info['candidate']['sgscore1'] >= 0.5):
                        continue
                except:
                    do = 'do nothing.'
                try:
                    if (np.abs(info['candidate']['distpsnr2']) < 15. and info['candidate']['srmag2'] < 15. and info['candidate']['srmag2'] > 0. and info['candidate']['sgscore2'] >= 0.5):
                        continue
                except:
                    do = 'do nothing.'
                try:
                    if (np.abs(info['candidate']['distpsnr3']) < 15. and info['candidate']['srmag3'] < 15. and info['candidate']['srmag3'] > 0. and info['candidate']['sgscore3'] >= 0.5):
                        continue
                except:
                    do = 'do nothing.'

                objectId_list.append(info['objectId'])

        set_objectId = set(objectId_list)

        #Remove those objects with negative subtraction
        for n in set(with_neg_sub):
            try:
                set_objectId.remove(n)
            except:
                do = 'do nothing'

        #Remove stellar objects
        for n in set(stellar_list):
            try:
                set_objectId.remove(n)
            except:
                do = 'do nothing'

        #Remove those objects considered old
        for n in set(old):
            try:
                set_objectId.remove(n)
            except:
                do = 'do nothing'

        print(set_objectId)

        set_objectId_all = set_objectId_all | set_objectId
        print("Cumulative:", set_objectId_all)

        '''
        print('----stats-----')
        print('Number of sources with negative sub: ', len(set(with_neg_sub)))
        print('Number of sources with only pos subtraction: ', len(set_objectId))
        print(f"Number of sources older than {args.max_days} days: {len(set(old))}, specifically {set(old)}")
        '''

    return set_objectId_all
Esempio n. 11
0
from ...models import (
    DBSession,
    Obj,
    Source,
    Candidate,
    User,
    Token,
    Group,
    Spectrum,
    CronJobRun,
)

_, cfg = load_env()
kowalski = Kowalski(
    token=cfg["app.kowalski.token"],
    protocol=cfg["app.kowalski.protocol"],
    host=cfg["app.kowalski.host"],
    port=int(cfg["app.kowalski.port"]),
)


class StatsHandler(BaseHandler):
    @permissions(["System admin"])
    def get(self):
        """
        ---
        description: Retrieve basic DB statistics
        tags:
          - system_info
        responses:
          200:
            content:
Esempio n. 12
0
import csv

# load dataset
parser = argparse.ArgumentParser()
parser.add_argument("inputfile")
parser.add_argument("--id", type=int, default=1, help="group id on Fritz")
args = parser.parse_args()
with open(args.inputfile) as f:
    data = csv.reader(f)
    trainingset = pd.DataFrame(data)

# Kowalski
with open('password.txt', 'r') as f:
    password = f.read().splitlines()
G = Kowalski(username=password[0],
             password=password[1],
             host='gloria.caltech.edu',
             timeout=1000)

# get scores and data and combine
scores = get_highscoring_objects(G, otype='vnv', condition='$or')

index = scores.index
condition = ((scores["vnv_dnn"] > 0.95) &
             (scores['vnv_xgb'] <= 0.1)) | ((scores["vnv_dnn"] <= 0.1) &
                                            (scores['vnv_xgb'] > 0.95))
disagreements = index[condition]
disagreeing_scores = scores.iloc[disagreements, :]

stats = get_stats(G, disagreeing_scores['_id'])
data = pd.merge(disagreeing_scores, stats, left_on='_id', right_on='_id')
data['train'] = np.isin(data['_id'], trainingset['ztf_id'])