def main(argv=None):
    opts = docopt(__doc__, version='0.1')
    opts = cleanOptions(opts)

    # Use utils.Struct to convert the dict into an object for compatibility with old optparse code.
    options = Struct(**opts)
    ingestDataMultiprocess(options)
示例#2
0
def main(argv = None):
    opts = docopt(__doc__, version='0.1')
    opts = cleanOptions(opts)

    # Use utils.Struct to convert the dict into an object for compatibility with old optparse code.
    options = Struct(**opts)

    fkDict = {}
    # If we have a foreign key table, read the data once only.  Pass this to the subprocesses.
    if options.fktable:
        fkeys = readGenericDataFile(options.fktable, delimiter='\t')
        for row in fkeys:
            fkDict[row[options.fkfield]] = row

    ingestDataMultiprocess(options, fkDict = fkDict)
示例#3
0
def main(argv = None):
    opts = docopt(__doc__, version='0.1')
    opts = cleanOptions(opts)

    # Use utils.Struct to convert the dict into an object for compatibility with old optparse code.
    options = Struct(**opts)

    atlasCentres = readGenericDataFile(options.atlasCentresFile, delimiter='\t')
    atlasRowLen = len(atlasCentres[0].keys())
    inputCoords = readGenericDataFile(options.inputCoordsFile, delimiter=',')

    radius = 3.86
    try:
        radius = float(options.searchradius)

    except ValueError as e:
        pass

    if options.footprints:
        for row in inputCoords:
            if options.debug:
                print(row)
            try:
                ra = float(row['ra'])
                dec = float(row['dec'])
            except ValueError as e:
                ra, dec = coords_sex_to_dec(row['ra'], row['dec'])

            for r in atlasCentres:
                if isObjectInsideATLASFootprint(ra, dec, float(r['ra']), float(r['dec'])):
                    if options.checkmjd:
                        if abs(float(r['mjd']) - float(row['mjd'])) < float(options.mjdtolerance):
                            matches = doRegexMatch(r['expname'])
                            if matches:
                                red = ''
                                if options.red:
                                    red = '/atlas/red/' + matches['camera'] + '/' + matches['mjd'] + '/' + r['expname'] + '.fits.fz'
                                    print(row['name'], red)
                                else:
                                    print(row['name'], r['expname'])
                            else:
                                print(row['name'], r['expname'])

                    else:
                        matches = doRegexMatch(r['expname'])
                        if matches:
                            red = ''
                            if options.red:
                                red = '/atlas/red/' + matches['camera'] + '/' + matches['mjd'] + '/' + r['expname'] + '.fits.fz'
                                print(row['name'], red)
                            else:
                                print(row['name'], r['expname'])
                        else:
                            print(row['name'], r['expname'])

    else:
        for row in inputCoords:
            if options.debug:
                print(row)
            try:
                ra = float(row['ra'])
                dec = float(row['dec'])
            except ValueError as e:
                ra, dec = coords_sex_to_dec(row['ra'], row['dec'])

            header, results = bruteForceGenericConeSearch(options.atlasCentresFile, [[ra, dec]], radius*3600.0, raIndex = 'ra', decIndex = 'dec')
            for r in results:
                if options.checkmjd:
                    exps = r.split()
                    if abs(float(exps[3]) - float(row['mjd'])) < float(options.mjdtolerance):
                        matches = doRegexMatch(exps[0])
                        if matches:
                            red = ''
                            if options.red:
                                red = '/atlas/red/' + matches['camera'] + '/' + matches['mjd'] + '/' + exps[0] + '.fits.fz'
                                print (row['name'], red, "%.2f" % (float(exps[atlasRowLen+1])/3600.0))
                            else:
                                print (row['name'], exps[0], "%.2f" % (float(exps[atlasRowLen+1])/3600.0))
                        else:
                            print (row['name'], exps[0], "%.2f" % (float(exps[atlasRowLen+1])/3600.0))
                else:
                    exps = r.split()
                    matches = doRegexMatch(exps[0])
                    if matches:
                        red = ''
                        if options.red:
                            red = '/atlas/red/' + matches['camera'] + '/' + matches['mjd'] + '/' + exps[0] + '.fits.fz'
                            print (row['name'], red, "%.2f" % (float(exps[atlasRowLen+1])/3600.0))
                        else:
                            print (row['name'], exps[0], "%.2f" % (float(exps[atlasRowLen+1])/3600.0))
                    else:
                        print (row['name'], exps[0], "%.2f" % (float(exps[atlasRowLen+1])/3600.0))
示例#4
0
def main():
    """main.
    """
    opts = docopt(__doc__, version='0.1')
    opts = cleanOptions(opts)
    options = Struct(**opts)

    configFile = options.configfile

    import yaml
    with open(configFile) as yaml_file:
        config = yaml.load(yaml_file)

    username = config['databases']['local']['username']
    password = config['databases']['local']['password']
    database = config['databases']['local']['database']
    hostname = config['databases']['local']['hostname']

    detectionList = 1
    customList = None

    conn = dbConnect(hostname, username, password, database)

    update = options.update
    limit = int(options.limit)
    mostRecent = not (options.earliest)
    nondetections = options.nondetections
    discoverylimit = int(options.discoverylimit)
    lastdetectionlimit = int(options.lastdetectionlimit)

    try:
        requestType = REQUESTTYPES[options.requesttype]
    except KeyError as e:
        requestType = REQUESTTYPES['incremental']

    objectList = []

    if options.candidate is not None and len(options.candidate) > 0:
        for cand in options.candidate:
            objectList.append({'id': int(cand)})
    else:

        if options.customlist is not None:
            if int(options.customlist) > 0 and int(options.customlist) < 100:
                customList = int(options.customlist)
                objectList = getObjectsByCustomList(conn, customList)
            else:
                print(
                    "The list must be between 1 and 100 inclusive.  Exiting.")
                sys.exit(1)
        else:
            if options.detectionlist is not None:
                if int(options.detectionlist) >= 0 and int(
                        options.detectionlist) < 9:
                    detectionList = int(options.detectionlist)
                    objectList = getObjectsByList(conn, listId=detectionList)
                else:
                    print(
                        "The list must be between 0 and 6 inclusive.  Exiting."
                    )
                    sys.exit(1)

    print("LENGTH OF OBJECTLIST = ", len(objectList))

    PSSImageRootLocation = '/' + hostname + '/images/' + database

    #exposureSet = getUniqueExposures(conn, objectList, limit = limit, mostRecent = mostRecent)
    # Only download exposures if requested. Otherwise assume we already HAVE the data.
    if not options.skipdownload:
        exposureSet = getUniqueExposures(conn,
                                         objectList,
                                         limit=limit,
                                         mostRecent=mostRecent,
                                         nonDets=nondetections,
                                         discoveryLimit=discoverylimit,
                                         lastDetectionLimit=lastdetectionlimit,
                                         ddc=options.ddc)
        exposureSet.sort()
        for row in exposureSet:
            print(row)
        downloadExposures(exposureSet)

    makeATLASObjectPostageStamps3(conn,
                                  objectList,
                                  PSSImageRootLocation,
                                  limit=limit,
                                  mostRecent=mostRecent,
                                  nonDets=nondetections,
                                  discoveryLimit=discoverylimit,
                                  lastDetectionLimit=lastdetectionlimit,
                                  requestType=requestType,
                                  ddc=options.ddc,
                                  wpwarp=options.wpwarp,
                                  options=options)

    conn.close()
示例#5
0
def test_me():

    # Setup the test - read the test data from the input file. Connect to the database.
    def setup(options):
        inputData = readGenericDataFile(options.filename)
        return inputData

    # Exercise the code - insert the test data
    def run(options, inputData):
        import yaml
        with open(options.configFile) as yaml_file:
            config = yaml.safe_load(yaml_file)

        username = config['cassandra']['local']['username']
        password = config['cassandra']['local']['password']
        keyspace = config['cassandra']['local']['keyspace']
        hostname = config['cassandra']['local']['hostname']

        db = {
            'username': username,
            'password': password,
            'keyspace': keyspace,
            'hostname': hostname
        }

        # Get n lightcurves. Consider doing this in parallel for a proper test.
        # As an initial test, run it single threaded.

        # We have the inputData, get a random subset.
        subset = inputData
        if len(inputData) > int(options.number):
            subset = random.sample(inputData, int(options.number))

        if int(options.nprocesses) > 1 and len(subset) > 1:
            # Do it in parallel!
            currentDate = datetime.now().strftime("%Y:%m:%d:%H:%M:%S")
            (year, month, day, hour, min, sec) = currentDate.split(':')
            dateAndTime = "%s%s%s_%s%s%s" % (year, month, day, hour, min, sec)
            nProcessors, listChunks = splitList(subset,
                                                bins=int(options.nprocesses),
                                                preserveOrder=True)

            print("%s Parallel Processing..." %
                  (datetime.now().strftime("%Y:%m:%d:%H:%M:%S")))
            parallelProcess(db,
                            dateAndTime,
                            nProcessors,
                            listChunks,
                            worker,
                            miscParameters=[options],
                            drainQueues=False)
            print("%s Done Parallel Processing" %
                  (datetime.now().strftime("%Y:%m:%d:%H:%M:%S")))
        else:
            cluster = Cluster(db['hostname'])
            session = cluster.connect()
            session.row_factory = dict_factory
            session.set_keyspace(db['keyspace'])

            lightcurves = getLCByObject(options, session, subset)
            #            for k,v in lightcurves.items():
            #                print(k, v)

            cluster.shutdown()


#        lightcurves = getLCByObject(options, session, subset)
#        lightcurves = {}
#        for row in subset:
#            # Turn off paging by default. Default page size is 5000.
#            simple_statement = SimpleStatement("select * from candidates where objectId = %s;", consistency_level=ConsistencyLevel.ONE, fetch_size=None)
#            # Can only iterate once through the output data. Store in a list.
#            outputData = list(session.execute(simple_statement, (row['objectId'],)))
#            lightcurves[row['objectId']] = outputData

#        for k,v in lightcurves.items():
#            print(k, v)

#        cluster.shutdown()

# Verify the test - read the test data from the database
#    def verify(options, inputData):
#        db = {'hostname': options.hostname, 'keyspace': options.keyspace}
#        cluster = Cluster(db['hostname'])
#        session = cluster.connect()
#        session.row_factory = dict_factory
#        session.set_keyspace(db['keyspace'])
#
#        # Turn off paging by default. Default page size is 5000.
#        simple_statement = SimpleStatement("select * from test_noncandidates;", consistency_level=ConsistencyLevel.ONE, fetch_size=None)
#        # Can only iterate once through the output data. Store in a list.
#        outputData = list(session.execute(simple_statement))
#
#        sortedinputData = sorted(inputData, key=lambda d: (d['objectId'], d['jd']))
#        sortedoutputData = sorted(outputData, key=lambda d: (d['objectid'], d['jd']))
#
#        inputArrayFid = array([int(x['fid']) for x in sortedinputData])
#        outputArrayFid = array([x['fid'] for x in sortedoutputData])
#
#        inputArrayObjectId = array([x['objectId'] for x in sortedinputData])
#        outputArrayObjectId = array([x['objectid'] for x in sortedoutputData])
#
#        inputArrayJd = array([float(x['jd']) for x in sortedinputData])
#        outputArrayJd = array([x['jd'] for x in sortedoutputData])
#
#        inputArrayDiffmaglim = array([float(x['diffmaglim']) for x in sortedinputData])
#        outputArrayDiffmaglim = array([x['diffmaglim'] for x in sortedoutputData])
#
#        assert_array_equal(inputArrayFid, outputArrayFid)
#        assert_array_equal(inputArrayObjectId, outputArrayObjectId)
#        assert_array_almost_equal(inputArrayJd, outputArrayJd)
#        assert_array_almost_equal(inputArrayDiffmaglim, outputArrayDiffmaglim)
#
#        # Yeah, I need to setup the verification. Let's get the thing executing first.
#        # Executes from the command line, but will not execute in pytest.
#
#        cluster.shutdown()

#    # Cleanup - truncate the test table. Disconnect from the database.
#    def cleanup(options):
#        db = {'hostname': options.hostname, 'keyspace': options.keyspace}
#        cluster = Cluster(db['hostname'])
#        session = cluster.connect()
#        session.set_keyspace(db['keyspace'])
#
#        session.execute("truncate table test_noncandidates;")
#
#        cluster.shutdown()

    opts = docopt(__doc__, version='0.1')
    opts = cleanOptions(opts)
    options = Struct(**opts)

    testData = setup(options)
    run(options, testData)
def main(argv=None):
    """main.
    """
    opts = docopt(__doc__, version='0.1')
    opts = cleanOptions(opts)
    options = Struct(**opts)

    configFile = options.configfile

    import yaml
    with open(configFile) as yaml_file:
        config = yaml.load(yaml_file)

    username = config['databases']['local']['username']
    password = config['databases']['local']['password']
    database = config['databases']['local']['database']
    hostname = config['databases']['local']['hostname']

    detectionList = 1
    customList = None

    conn = dbConnect(hostname, username, password, database)

    update = options.update
    limit = int(options.limit)
    limitafter = int(options.limitafter)

    objectList = []

    if options.candidate is not None and len(options.candidate) > 0:
        for cand in options.candidate:
            obj = getATLASObject(conn, objectId=int(cand))
            if obj:
                objectList.append(obj)

    else:

        if options.customlist is not None:
            if int(options.customlist) > 0 and int(options.customlist) < 100:
                customList = int(options.customlist)
                objectList = getObjectsByCustomList(conn,
                                                    customList,
                                                    processingFlags=0)
            else:
                print(
                    "The list must be between 1 and 100 inclusive.  Exiting.")
                sys.exit(1)
        else:
            if options.detectionlist is not None:
                if int(options.detectionlist) >= 0 and int(
                        options.detectionlist) < 9:
                    detectionList = int(options.detectionlist)
                    objectList = getObjectsByList(conn,
                                                  listId=detectionList,
                                                  processingFlags=0)
                else:
                    print(
                        "The list must be between 0 and 9 inclusive.  Exiting."
                    )
                    sys.exit(1)

    print("LENGTH OF OBJECTLIST = ", len(objectList))

    perObjectExps, allExps = getForcedPhotometryUniqueExposures(
        conn,
        objectList,
        discoveryLimit=limit,
        cutoffLimit=limitafter,
        incremental=True,
        ddc=options.ddc,
        useFlagDate=options.useflagdate)
    if options.test:
        for obj in objectList:
            print(obj['id'])
            for exp in perObjectExps[obj['id']]['exps']:
                print(exp)

        return 0

    if not options.skipdownload:
        doRsync(allExps, 'diff')
        doRsync(allExps, 'red', getMetadata=True, metadataExtension='.tph')

    fphot = doForcedPhotometry(options, objectList, perObjectExps)

    if options.update:
        insertForcedPhotometry(conn, fphot)
def main(argv=None):
    """main.

    Args:
        argv:
    """

    opts = docopt(__doc__, version='0.1')
    opts = cleanOptions(opts)
    options = Struct(**opts)

    configFile = options.configfile

    import yaml
    with open(configFile) as yaml_file:
        config = yaml.load(yaml_file)

    username = config['databases']['local']['username']
    password = config['databases']['local']['password']
    database = config['databases']['local']['database']
    hostname = config['databases']['local']['hostname']

    MAX_NUMBER_OF_OBJECTS = int(
        config['postage_stamp_parameters']['max_number_of_objects'])

    db = []
    db.append(username)
    db.append(password)
    db.append(database)
    db.append(hostname)

    detectionList = 1
    customList = None

    conn = dbConnect(hostname, username, password, database)

    update = options.update
    limit = int(options.limit)
    limitafter = int(options.limitafter)

    mlscore = None
    if options.mlscore is not None:
        mlscore = float(options.mlscore)

    objectList = []

    flagDate = '2015-12-20'
    if options.flagdate is not None:
        try:
            flagDate = '%s-%s-%s' % (options.flagdate[0:4],
                                     options.flagdate[4:6],
                                     options.flagdate[6:8])
        except:
            flagDate = '2015-12-20'

    if options.candidate is not None and len(options.candidate) > 0:
        for cand in options.candidate:
            obj = getATLASObject(conn, objectId=int(cand))
            if obj:
                objectList.append(obj)
    else:

        if options.customlist is not None:
            if int(options.customlist) > 0 and int(options.customlist) < 100:
                customList = int(options.customlist)
                objectList = getObjectsByCustomList(conn,
                                                    customList,
                                                    processingFlags=0)
            else:
                print(
                    "The list must be between 1 and 100 inclusive.  Exiting.")
                sys.exit(1)
        else:
            if options.detectionlist is not None:
                if int(options.detectionlist) >= 0 and int(
                        options.detectionlist) < 9:
                    detectionList = int(options.detectionlist)
                    objectList = getObjectsByList(conn,
                                                  listId=detectionList,
                                                  dateThreshold=flagDate,
                                                  processingFlags=0)
                else:
                    print(
                        "The list must be between 0 and 9 inclusive.  Exiting."
                    )
                    sys.exit(1)

    print("LENGTH OF OBJECTLIST = ", len(objectList))

    if mlscore is not None and not (
            options.candidate
    ):  # Only do this filter if the IDs are not provided explicitly.
        updatedList = []
        for row in objectList:
            if row['zooniverse_score'] is not None and row[
                    'zooniverse_score'] >= mlscore:
                updatedList.append(row)
        if len(updatedList) > 0:
            objectList = updatedList
            print("LENGTH OF CLIPPED OBJECTLIST = ", len(objectList))

    currentDate = datetime.datetime.now().strftime("%Y:%m:%d:%H:%M:%S")
    (year, month, day, hour, min, sec) = currentDate.split(':')
    dateAndTime = "%s%s%s_%s%s%s" % (year, month, day, hour, min, sec)

    # Single threaded
    #perObjectExps, exposureSet = getForcedPhotometryUniqueExposures(conn, objectList, discoveryLimit = limit, ddc = options.ddc, useFlagDate = options.useflagdate)
    perObjectExps, exposureSet = getForcedPhotometryUniqueExposures(
        conn,
        objectList,
        discoveryLimit=limit,
        cutoffLimit=limitafter,
        ddc=options.ddc,
        useFlagDate=options.useflagdate)
    if options.test:
        for obj in objectList:
            print(obj['id'])
            for exp in perObjectExps[obj['id']]['exps']:
                print(exp)
        return 0
    # We'll hand the entire perObjectExps dictionary to each thread.

    # Download threads with multiprocessing - try 10 threads by default
    print("TOTAL OBJECTS = %d" % len(exposureSet))

    print("Downloading exposures...")

    if not options.skipdownload:
        if len(exposureSet) > 0:
            nProcessors, listChunks = splitList(exposureSet,
                                                bins=int(
                                                    options.downloadthreads))

            print("%s Parallel Processing..." %
                  (datetime.datetime.now().strftime("%Y:%m:%d:%H:%M:%S")))
            parallelProcess(db,
                            dateAndTime,
                            nProcessors,
                            listChunks,
                            workerExposureDownloader,
                            miscParameters=[options],
                            drainQueues=False)
            print("%s Done Parallel Processing" %
                  (datetime.datetime.now().strftime("%Y:%m:%d:%H:%M:%S")))

            # Belt and braces - try again with one less thread, just in case the previous one failed.
            nProcessors, listChunks = splitList(
                exposureSet, bins=int(options.downloadthreads) - 1)

            print("%s Parallel Processing..." %
                  (datetime.datetime.now().strftime("%Y:%m:%d:%H:%M:%S")))
            parallelProcess(db,
                            dateAndTime,
                            nProcessors,
                            listChunks,
                            workerExposureDownloader,
                            miscParameters=[options],
                            drainQueues=False)
            print("%s Done Parallel Processing" %
                  (datetime.datetime.now().strftime("%Y:%m:%d:%H:%M:%S")))

    # Produce stamps with multiprocessing - try n(CPUs) threads by default
    print("Doing Forced Photometry...")

    if len(objectList) > 0:
        nProcessors, listChunks = splitList(objectList)

        print("%s Parallel Processing..." %
              (datetime.datetime.now().strftime("%Y:%m:%d:%H:%M:%S")))
        objectsForUpdate = parallelProcess(
            db,
            dateAndTime,
            nProcessors,
            listChunks,
            workerForcedPhotometry,
            miscParameters=[options, perObjectExps])
        print("%s Done Parallel Processing" %
              (datetime.datetime.now().strftime("%Y:%m:%d:%H:%M:%S")))
        if len(objectsForUpdate) > 0 and update:
            insertForcedPhotometry(conn, objectsForUpdate)

    conn.close()

    return 0
示例#8
0
def main():
    opts = docopt(__doc__, version='0.1')
    opts = cleanOptions(opts)
    options = Struct(**opts)

    configFile = options.configfile
    regex = options.regex

    import yaml
    with open(configFile) as yaml_file:
        config = yaml.safe_load(yaml_file)

    username = config['databases']['local']['username']
    password = config['databases']['local']['password']
    database = config['databases']['local']['database']
    hostname = config['databases']['local']['hostname']

    db = []
    db.append(username)
    db.append(password)
    db.append(database)
    db.append(hostname)

    conn = dbConnect(hostname, username, password, database)

    warnings.filterwarnings("ignore")

    # Parse command line

    currentDate = datetime.datetime.now().strftime("%Y:%m:%d:%H:%M:%S")
    (year, month, day, hour, min, sec) = currentDate.split(':')
    dateAndTime = "%s%s%s_%s%s%s" % (year, month, day, hour, min, sec)

    pid = int(options.pid)
    maxjobs = int(options.maxjobs)
    days = int(options.days)
    camera = options.camera
    try:
        mjdToIngest = options.mjd
    except TypeError as e:
        mjdToIngest = None

    print("camera =", camera)
    print("regex =", regex)

    todayMJD = getCurrentMJD()

    # Use + 1 to include today!
    mjdthreshold = int(todayMJD) - days + 1

    # Specified MJD trumps mjd Threshold, so just go as far back
    # as the specified date
    if mjdToIngest:
        mjdthreshold = int(mjdToIngest[0:5]) - 1

    ingester = options.ingester

    fileList = getFiles(regex,
                        camera,
                        mjdToIngest=mjdToIngest,
                        mjdthreshold=mjdthreshold,
                        days=days,
                        atlasroot=options.atlasroot,
                        options=options)
    ingestedFiles = getFilesIngestedddc2(conn,
                                         mjdthreshold=mjdthreshold,
                                         camera=camera)

    fileListDict = OrderedDict()

    print("List of files...")
    for row in fileList:
        fileListDict[os.path.basename(row)] = row
        print(row)

    print("List of ingested files...")
    for row in ingestedFiles:
        print(row)

    filesToIngest = [
        fileListDict[x]
        for x in list(set(fileListDict.keys()) - set(ingestedFiles))
    ]
    filesToIngest.sort()

    print("List of files to ingest...")
    for row in filesToIngest:
        print(row)

    print("TOTAL OBJECTS TO CHECK = %d" % len(filesToIngest))

    if len(fileList) > 0:
        # 2018-02-06 KWS Use half the default number of processes. This may ironically speed up ingest.
        nProcessors, listChunks = splitList(filesToIngest, bins=28)

        print("%s Parallel Processing..." %
              (datetime.datetime.now().strftime("%Y:%m:%d:%H:%M:%S")))
        parallelProcess(db,
                        dateAndTime,
                        nProcessors,
                        listChunks,
                        worker,
                        miscParameters=[options],
                        drainQueues=False)
        print("%s Done Parallel Processing" %
              (datetime.datetime.now().strftime("%Y:%m:%d:%H:%M:%S")))

    conn.close()
    return 0
示例#9
0
def main(argv=None):
    opts = docopt(__doc__, version='0.1')
    opts = cleanOptions(opts)

    # Use utils.Struct to convert the dict into an object for compatibility with old optparse code.
    options = Struct(**opts)

    #keyspace = 'atlas'
    #host = ['db0', 'db1', 'db2', 'db3', 'db4']

    # random star
    #ra = 83.20546
    #dec = -20.70055

    # ATLAS17nij
    #ra = 82.46704
    #dec = -19.52058

    # ATLAS20biio
    #ra = 83.24691
    #dec = -19.11739

    # ATLAS20bbio - very good!!
    #ra = 81.27903
    #dec = -21.24643

    # ATLAS18vre
    #ra = 84.19551
    #dec = -22.41100

    # ATLAS19bdbm
    #ra = 85.10436
    #dec = -18.09766

    # ATLAS20bbff
    #ra = 86.52075
    #dec = -23.56601

    # ATLAS20ymv - THIS IS the CENTRE OBJECT. We did a 10 degree sweep around this.
    #ra = 74.55677
    #dec = -20.35753

    # ATLAS17lvn - bright foreground star
    #ra = 68.75953
    #dec = -14.22797

    import yaml
    with open(options.configFile) as yaml_file:
        config = yaml.safe_load(yaml_file)

    username = config['cassandra']['local']['username']
    password = config['cassandra']['local']['password']
    keyspace = config['cassandra']['local']['keyspace']
    hostname = config['cassandra']['local']['hostname']

    db = {
        'username': username,
        'password': password,
        'keyspace': keyspace,
        'hostname': hostname
    }

    coordslist = []

    if options.coordsfromfile:
        coordslist = readGenericDataFile(options.coords, delimiter=',')
    else:
        coordslist.append({
            'ra': options.coords.split(',')[0],
            'dec': options.coords.split(',')[1]
        })

    if options.number and int(options.number) < len(coordslist):
        coordslist = random.sample(coordslist, int(options.number))

    if int(options.nprocesses) > 1 and len(coordslist) > 1:
        # Do it in parallel!
        currentDate = datetime.now().strftime("%Y:%m:%d:%H:%M:%S")
        (year, month, day, hour, min, sec) = currentDate.split(':')
        dateAndTime = "%s%s%s_%s%s%s" % (year, month, day, hour, min, sec)
        nProcessors, listChunks = splitList(coordslist,
                                            bins=int(options.nprocesses),
                                            preserveOrder=True)

        print("%s Parallel Processing..." %
              (datetime.now().strftime("%Y:%m:%d:%H:%M:%S")))
        parallelProcess(db,
                        dateAndTime,
                        nProcessors,
                        listChunks,
                        worker,
                        miscParameters=[options],
                        drainQueues=False)
        print("%s Done Parallel Processing" %
              (datetime.now().strftime("%Y:%m:%d:%H:%M:%S")))
    else:
        cluster = Cluster(db['hostname'])
        session = cluster.connect()
        session.row_factory = dict_factory
        session.set_keyspace(db['keyspace'])

        getLCData(options, session, coordslist)

        cluster.shutdown()
示例#10
0
def main():
    opts = docopt(__doc__, version='0.1')
    opts = cleanOptions(opts)
    options = Struct(**opts)

    configFile = options.configfile
    regex = options.regex

    import yaml
    with open(configFile) as yaml_file:
        config = yaml.load(yaml_file)

    username = config['databases']['local']['username']
    password = config['databases']['local']['password']
    database = config['databases']['local']['database']
    hostname = config['databases']['local']['hostname']

    conn = dbConnect(hostname, username, password, database)

    warnings.filterwarnings("ignore")

    # Parse command line

    currentDate = datetime.datetime.now().strftime("%Y:%m:%d:%H:%M:%S")
    (year, month, day, hour, min, sec) = currentDate.split(':')
    dateAndTime = "%s%s%s_%s%s%s" % (year, month, day, hour, min, sec)

    pid = int(options.pid)
    maxjobs = int(options.maxjobs)
    days = int(options.days)
    camera = options.camera
    try:
        mjdToIngest = options.mjd
    except TypeError as e:
        mjdToIngest = None

    print("camera =", camera)
    print("regex =", regex)

    todayMJD = getCurrentMJD()
    mjdthreshold = int(todayMJD) - days + 1

    # Specified MJD trumps mjd Threshold, so just go as far back
    # as the specified date
    if mjdToIngest:
        mjdthreshold = int(mjdToIngest[0:5]) - 1

    ingester = options.ingester

    fileList = getFiles(regex,
                        camera,
                        mjdToIngest=mjdToIngest,
                        mjdthreshold=mjdthreshold,
                        days=days,
                        options=options)
    ingestedFiles = getFilesIngestedddc2(conn,
                                         mjdthreshold=mjdthreshold,
                                         mjdToIngest=mjdToIngest,
                                         camera=camera)

    fileListDict = OrderedDict()

    print("List of files...")
    for row in fileList:
        fileListDict[os.path.basename(row)] = row
        print(row)

    print("List of ingested files...")
    for row in ingestedFiles:
        print(row)

    filesToIngest = [
        fileListDict[x]
        for x in list(set(fileListDict.keys()) - set(ingestedFiles))
    ]
    filesToIngest.sort()

    print("List of files to ingest...")
    for row in filesToIngest:
        print(row)

    print("TOTAL OBJECTS TO CHECK = %d" % len(filesToIngest))

    ingesterWrapper(ingester, configFile, filesToIngest)

    conn.close()
    return 0
示例#11
0
def main():
    """main.
    """

    opts = docopt(__doc__, version='0.1')
    opts = cleanOptions(opts)
    options = Struct(**opts)

    configFile = options.configfile

    import yaml
    with open(configFile) as yaml_file:
        config = yaml.load(yaml_file)

    username = config['databases']['local']['username']
    password = config['databases']['local']['password']
    database = config['databases']['local']['database']
    hostname = config['databases']['local']['hostname']

    MAX_NUMBER_OF_OBJECTS = int(
        config['postage_stamp_parameters']['max_number_of_objects'])

    db = []
    db.append(username)
    db.append(password)
    db.append(database)
    db.append(hostname)

    detectionList = 1
    customList = None

    conn = dbConnect(hostname, username, password, database)

    update = options.update
    limit = int(options.limit)
    mostRecent = not (options.earliest)
    nondetections = options.nondetections
    discoverylimit = int(options.discoverylimit)
    lastdetectionlimit = int(options.lastdetectionlimit)

    objectList = []

    try:
        requestType = REQUESTTYPES[options.requesttype]
    except KeyError as e:
        requestType = REQUESTTYPES['incremental']

    print("REQUEST TYPE = ", requestType)

    flagDate = '2015-12-20'
    if options.flagdate is not None:
        try:
            flagDate = '%s-%s-%s' % (options.flagdate[0:4],
                                     options.flagdate[4:6],
                                     options.flagdate[6:8])
        except:
            flagDate = '2015-12-20'

    if options.candidate is not None and len(options.candidate) > 0:
        for cand in options.candidate:
            objectList.append({'id': int(cand)})
    else:

        if options.customlist is not None:
            if int(options.customlist) > 0 and int(options.customlist) < 100:
                customList = int(options.customlist)
                objectList = getObjectsByCustomList(conn, customList)
            else:
                print(
                    "The list must be between 1 and 100 inclusive.  Exiting.")
                sys.exit(1)
        else:
            if options.detectionlist is not None:
                if int(options.detectionlist) >= 0 and int(
                        options.detectionlist) < 9:
                    detectionList = int(options.detectionlist)
                    objectList = getObjectsByList(conn,
                                                  listId=detectionList,
                                                  dateThreshold=flagDate)
                else:
                    print(
                        "The list must be between 0 and 6 inclusive.  Exiting."
                    )
                    sys.exit(1)

    currentDate = datetime.datetime.now().strftime("%Y:%m:%d:%H:%M:%S")
    (year, month, day, hour, min, sec) = currentDate.split(':')
    dateAndTime = "%s%s%s_%s%s%s" % (year, month, day, hour, min, sec)

    if len(objectList) > MAX_NUMBER_OF_OBJECTS:
        sys.stderr.write(
            "The number of objects (%d) exceeds the maximum allowed (%d). Cannot continue.\n"
            % (len(objectList), MAX_NUMBER_OF_OBJECTS))
        sys.exit(1)

    # Only download exposures if requested. Otherwise assume we already HAVE the data.
    if not options.skipdownload:
        exposureSet = getUniqueExposures(conn,
                                         objectList,
                                         limit=limit,
                                         mostRecent=mostRecent,
                                         nonDets=nondetections,
                                         discoveryLimit=discoverylimit,
                                         lastDetectionLimit=lastdetectionlimit,
                                         requestType=requestType,
                                         ddc=options.ddc)

        # Download threads with multiprocessing - try 10 threads by default
        print("TOTAL OBJECTS = %d" % len(exposureSet))

        print("Downloading exposures...")

        if len(exposureSet) > 0:
            nProcessors, listChunks = splitList(exposureSet,
                                                bins=int(
                                                    options.downloadthreads))

            print("%s Parallel Processing..." %
                  (datetime.datetime.now().strftime("%Y:%m:%d:%H:%M:%S")))
            parallelProcess(db,
                            dateAndTime,
                            nProcessors,
                            listChunks,
                            workerImageDownloader,
                            miscParameters=[options],
                            drainQueues=False)
            print("%s Done Parallel Processing" %
                  (datetime.datetime.now().strftime("%Y:%m:%d:%H:%M:%S")))

            # Belt and braces. Do again, with one less thread.
            nProcessors, listChunks = splitList(
                exposureSet, bins=int(options.downloadthreads) - 1)

            print("%s Parallel Processing..." %
                  (datetime.datetime.now().strftime("%Y:%m:%d:%H:%M:%S")))
            parallelProcess(db,
                            dateAndTime,
                            nProcessors,
                            listChunks,
                            workerImageDownloader,
                            miscParameters=[options],
                            drainQueues=False)
            print("%s Done Parallel Processing" %
                  (datetime.datetime.now().strftime("%Y:%m:%d:%H:%M:%S")))

    # Produce stamps with multiprocessing - try n(CPUs) threads by default
    print("Producing stamps...")

    if len(objectList) > 0:
        nProcessors, listChunks = splitList(objectList, bins=48)

        print("%s Parallel Processing..." %
              (datetime.datetime.now().strftime("%Y:%m:%d:%H:%M:%S")))
        parallelProcess(db,
                        dateAndTime,
                        nProcessors,
                        listChunks,
                        workerStampCutter,
                        miscParameters=[
                            limit, mostRecent, nondetections, discoverylimit,
                            lastdetectionlimit, requestType, options.ddc,
                            options.wpwarp, options
                        ],
                        drainQueues=False)
        print("%s Done Parallel Processing" %
              (datetime.datetime.now().strftime("%Y:%m:%d:%H:%M:%S")))

    conn.close()

    return 0