def test_update_transient_summaries_function3(self): from marshallEngine.housekeeping import update_transient_summaries updater = update_transient_summaries( log=log, settings=settings, dbConn=dbConn ).update()
def test_update_transient_summaries_function(self): from marshallEngine.housekeeping import update_transient_summaries updater = update_transient_summaries( log=log, settings=settings, dbConn=dbConn ) updater._add_galactic_coords()
def ingest( self, withinLastDays): """*Ingest the data into the marshall feeder survey table* **Key Arguments** - ``withinLastDays`` -- within the last number of days. *Default: 50* """ self.log.info('starting the ``ingest`` method') allLists = [] # MIGHT NEED SOMETHING LIKE THIS ... OTHERWISE DELETE AND ADD ANOTHER IMPORT METHOD # csvDicts = self.get_csv_data( # url=self.settings["panstarrs urls"]["ps13pi"]["summary csv"], # user=self.settings["credentials"]["ps13pi"]["username"], # pwd=self.settings["credentials"]["ps13pi"]["password"] # ) # allLists.extend(self._clean_data_pre_ingest( # surveyName="ps13pi", withinLastDays=withinLastDays)) self.dictList = allLists self._import_to_feeder_survey_table() self.insert_into_transientBucket( updateTransientSummaries=False) sqlQuery = u""" select transientBucketId from fs_user_added where transientBucketId is not null order by dateCreated desc limit 1 """ % locals() rows = readquery( log=self.log, sqlQuery=sqlQuery, dbConn=self.dbConn ) if len(rows): transientBucketId = rows[0]["transientBucketId"] print(transientBucketId) else: transientBucketId = False # UPDATE THE TRANSIENT BUCKET SUMMARY TABLE IN THE MARSHALL DATABASE updater = update_transient_summaries( log=self.log, settings=self.settings, dbConn=self.dbConn, transientBucketId=transientBucketId ) updater.update() self.log.info('completed the ``ingest`` method') return None
def test_update_transient_summaries_function_exception(self): from marshallEngine.housekeeping import update_transient_summaries try: this = update_transient_summaries( log=log, settings=settings, fakeKey="break the code" ) this.get() assert False except Exception as e: assert True print(str(e))
def ingest(self, withinLastDays): """*Ingest the data into the marshall feeder survey table* **Key Arguments** - ``withinLastDays`` -- within the last number of days. *Default: 50* """ self.log.info('starting the ``ingest`` method') allLists = [] self.dictList = allLists self._import_to_feeder_survey_table() self.insert_into_transientBucket(updateTransientSummaries=False) sqlQuery = u""" select transientBucketId from fs_user_added where transientBucketId is not null order by dateCreated desc limit 1 """ % locals() rows = readquery(log=self.log, sqlQuery=sqlQuery, dbConn=self.dbConn) if len(rows): transientBucketId = rows[0]["transientBucketId"] print(transientBucketId) else: transientBucketId = False # UPDATE THE TRANSIENT BUCKET SUMMARY TABLE IN THE MARSHALL DATABASE updater = update_transient_summaries( log=self.log, settings=self.settings, dbConn=self.dbConn, transientBucketId=transientBucketId) updater.update() # CLEAN UP TASKS TO MAKE THE TICKET UPDATE self.clean_up() self.log.info('completed the ``ingest`` method') return None
def main(arguments=None): """ *The main function used when `cl_utils.py` is run as a single script from the cl, or when installed as a cl command* """ # setup the command-line util settings su = tools(arguments=arguments, docString=__doc__, logLevel="WARNING", options_first=False, projectName="marshallEngine", defaultSettingsFile=True) arguments, settings, log, dbConn = su.setup() # tab completion for raw_input readline.set_completer_delims(' \t\n;') readline.parse_and_bind("tab: complete") readline.set_completer(tab_complete) # UNPACK REMAINING CL ARGUMENTS USING `EXEC` TO SETUP THE VARIABLE NAMES # AUTOMATICALLY a = {} for arg, val in list(arguments.items()): if arg[0] == "-": varname = arg.replace("-", "") + "Flag" else: varname = arg.replace("<", "").replace(">", "") a[varname] = val if arg == "--dbConn": dbConn = val a["dbConn"] = val log.debug('%s = %s' % ( varname, val, )) ## START LOGGING ## startTime = times.get_now_sql_datetime() log.info('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, )) init = a["init"] clean = a["clean"] iimport = a["import"] lightcurve = a["lightcurve"] transientBucketId = a["transientBucketId"] survey = a["survey"] withInLastDay = a["withInLastDay"] settingsFlag = a["settingsFlag"] # set options interactively if user requests if "interactiveFlag" in a and a["interactiveFlag"]: # load previous settings moduleDirectory = os.path.dirname(__file__) + "/resources" pathToPickleFile = "%(moduleDirectory)s/previousSettings.p" % locals() try: with open(pathToPickleFile): pass previousSettingsExist = True except: previousSettingsExist = False previousSettings = {} if previousSettingsExist: previousSettings = pickle.load(open(pathToPickleFile, "rb")) # x-raw-input # x-boolean-raw-input # x-raw-input-with-default-value-from-previous-settings # save the most recently used requests pickleMeObjects = [] pickleMe = {} theseLocals = locals() for k in pickleMeObjects: pickleMe[k] = theseLocals[k] pickle.dump(pickleMe, open(pathToPickleFile, "wb")) if a["init"]: from os.path import expanduser home = expanduser("~") filepath = home + "/.config/marshallEngine/marshallEngine.yaml" try: cmd = """open %(filepath)s""" % locals() p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) except: pass try: cmd = """start %(filepath)s""" % locals() p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) except: pass return # CALL FUNCTIONS/OBJECTS # DEFAULT VALUES if not withInLastDay: withInLastDay = 30 # CALL FUNCTIONS/OBJECTS if clean: # RESCUE ORPHANED TRANSIENTS - NO MASTER ID FLAG print("rescuing orphaned transients") from fundamentals.mysql import writequery procedureNames = [ "update_transients_with_no_masteridflag", "insert_new_transients_into_transientbucketsummaries", "resurrect_objects", "update_sherlock_xmatch_counts", "update_inbox_auto_archiver", "update_transient_akas" ] # CALL EACH PROCEDURE for p in procedureNames: sqlQuery = "CALL `%(p)s`();" % locals() writequery( log=log, sqlQuery=sqlQuery, dbConn=dbConn, ) # UPDATE THE TRANSIENT BUCKET SUMMARY TABLE IN THE MARSHALL DATABASE from marshallEngine.housekeeping import update_transient_summaries updater = update_transient_summaries(log=log, settings=settings, dbConn=dbConn).update() if iimport: if survey.lower() == "panstarrs": from marshallEngine.feeders.panstarrs.data import data from marshallEngine.feeders.panstarrs import images if survey.lower() == "atlas": from marshallEngine.feeders.atlas.data import data from marshallEngine.feeders.atlas import images if survey.lower() == "useradded": from marshallEngine.feeders.useradded.data import data from marshallEngine.feeders.useradded import images if survey.lower() == "tns": from marshallEngine.feeders.tns.data import data from marshallEngine.feeders.tns import images if survey.lower() == "ztf": from marshallEngine.feeders.ztf.data import data from marshallEngine.feeders.ztf import images ingester = data(log=log, settings=settings, dbConn=dbConn).ingest(withinLastDays=withInLastDay) cacher = images(log=log, settings=settings, dbConn=dbConn).cache(limit=3000) from marshallEngine.services import panstarrs_location_stamps ps_stamp = panstarrs_location_stamps(log=log, settings=settings, dbConn=dbConn).get() if lightcurve: from marshallEngine.lightcurves import marshall_lightcurves lc = marshall_lightcurves(log=log, dbConn=dbConn, settings=settings, transientBucketIds=transientBucketId) filepath = lc.plot() print( "The lightcurve plot for transient %(transientBucketId)s can be found here: %(filepath)s" % locals()) if "dbConn" in locals() and dbConn: dbConn.commit() dbConn.close() ## FINISH LOGGING ## endTime = times.get_now_sql_datetime() runningTime = times.calculate_time_difference(startTime, endTime) log.info( '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % ( endTime, runningTime, )) return
def insert_into_transientBucket(self, importUnmatched=True, updateTransientSummaries=True): """*insert objects/detections from the feeder survey table into the transientbucket* **Key Arguments** - ``importUnmatched`` -- import unmatched (new) transients into the marshall (not wanted in some circumstances) - ``updateTransientSummaries`` -- update the transient summaries and lightcurves? Can be True or False, or alternatively a specific transientBucketId This method aims to reduce crossmatching and load on the database by: 1. automatically assign the transientbucket id to feeder survey detections where the object name is found in the transientbukcet (no spatial crossmatch required). Copy matched feeder survey rows to the transientbucket. 2. crossmatch remaining unique, unmatched sources in feeder survey with sources in the transientbucket. Add associated transientBucketIds to matched feeder survey sources. Copy matched feeder survey rows to the transientbucket. 3. assign a new transientbucketid to any feeder survey source not matched in steps 1 & 2. Copy these unmatched feeder survey rows to the transientbucket as new transient detections. **Return** - None **Usage** ```python ingester.insert_into_transientBucket() ``` """ self.log.debug( 'starting the ``crossmatch_with_transientBucket`` method') fsTableName = self.fsTableName # 1. automatically assign the transientbucket id to feeder survey # detections where the object name is found in the transientbukcet (no # spatial crossmatch required). Copy matched feeder survey rows to the # transientbucket. self._feeder_survey_transientbucket_name_match_and_import() # 2. crossmatch remaining unique, unmatched sources in feeder survey # with sources in the transientbucket. Add associated # transientBucketIds to matched feeder survey sources. Copy matched # feeder survey rows to the transientbucket. from HMpTy.mysql import add_htm_ids_to_mysql_database_table add_htm_ids_to_mysql_database_table( raColName="raDeg", declColName="decDeg", tableName="transientBucket", dbConn=self.dbConn, log=self.log, primaryIdColumnName="primaryKeyId", dbSettings=self.settings["database settings"]) unmatched = self._feeder_survey_transientbucket_crossmatch() # 3. assign a new transientbucketid to any feeder survey source not # matched in steps 1 & 2. Copy these unmatched feeder survey rows to # the transientbucket as new transient detections. if importUnmatched: self._import_unmatched_feeder_survey_sources_to_transientbucket( unmatched) # UPDATE OBSERVATION DATES FROM MJDs sqlQuery = "call update_transientbucket_observation_dates()" writequery(log=self.log, sqlQuery=sqlQuery, dbConn=self.dbConn) # UPDATE THE TRANSIENT BUCKET SUMMARY TABLE IN THE MARSHALL DATABASE if updateTransientSummaries: if isinstance(updateTransientSummaries, int) and not isinstance( updateTransientSummaries, bool): transientBucketId = updateTransientSummaries else: transientBucketId = False from marshallEngine.housekeeping import update_transient_summaries updater = update_transient_summaries( log=self.log, settings=self.settings, dbConn=self.dbConn, transientBucketId=transientBucketId) updater.update() self.log.debug( 'completed the ``crossmatch_with_transientBucket`` method') return None