Ejemplo n.º 1
0
 def refresh_database(self):
     """
     *Refresh the unit test database*
     """
     from fundamentals.mysql import directory_script_runner
     from fundamentals import tools
     packageDirectory = self.get_project_root()
     su = tools(
         arguments={"settingsFile": packageDirectory +
                    "/test_settings.yaml"},
         docString=__doc__,
         logLevel="DEBUG",
         options_first=False,
         projectName=None,
         defaultSettingsFile=False
     )
     arguments, settings, log, dbConn = su.setup()
     directory_script_runner(
         log=log,
         pathToScriptDirectory=packageDirectory + "/tests/input",
         dbConn=dbConn,
         successRule=None,
         failureRule=None
     )
     # DATABASE IMPORT WAS STALLING UNITTESTS
     import time
     time.sleep(20)
    def test_directory_script_runner_function(self):

        from fundamentals.mysql import directory_script_runner
        directory_script_runner(log=log,
                                pathToScriptDirectory=pathToOutputDir +
                                "/mysql_scripts",
                                databaseName="dryx_unit_testing",
                                loginPath="unittesting")
    def test_directory_script_runner_function(self):

        from fundamentals.mysql import directory_script_runner
        directory_script_runner(
            log=log,
            pathToScriptDirectory=pathToOutputDir + "/mysql_scripts",
            databaseName="dryx_unit_testing",
            loginPath="unittesting"
        )
    def test_directory_script_runner_function_03(self):

        from fundamentals.mysql import directory_script_runner
        directory_script_runner(log=log,
                                pathToScriptDirectory=pathToOutputDir +
                                "/mysql_scripts",
                                databaseName="dryx_unit_testing",
                                loginPath="unittesting",
                                successRule="delete",
                                failureRule="failed")
    def test_directory_script_runner_function_exception(self):

        from fundamentals.mysql import directory_script_runner
        try:
            directory_script_runner(log=log,
                                    settings=settings,
                                    fakeKey="break the code")
            assert False
        except Exception as e:
            assert True
    def test_directory_script_runner_function_03(self):

        from fundamentals.mysql import directory_script_runner
        directory_script_runner(
            log=log,
            pathToScriptDirectory=pathToOutputDir + "/mysql_scripts",
            databaseName="dryx_unit_testing",
            loginPath="unittesting",
            successRule="delete",
            failureRule="failed"
        )
    def test_directory_script_runner_function_exception(self):

        from fundamentals.mysql import directory_script_runner
        try:
            directory_script_runner(
                log=log,
                settings=settings,
                fakeKey="break the code"
            )
            assert False
        except Exception, e:
            assert True
            print str(e)
Ejemplo n.º 8
0
    def ingest(self):
        """*Ingest the ePESSTO Marshall transient stream into the catalogues database*

        The method first creates the tables for the various marshall feeder surveys in the sherlock-catalogues database (if they do not yet exist). Then the marshall database is queried for each transient survey and the results imported into the sherlock-catalogues tables,

        See the class docstring for usage

        .. todo ::

            - convert the directory_script_runner to 'load in file'
        """
        self.log.debug('starting the ``get`` method')

        # A YAML DICTIONARY OF sherlock-catalogues TABLE NAME AND THE SELECT
        # QUERY TO LIFT THE DATA FROM THE MARSHALL
        yamlFilePath = '/'.join(string.split(
            __file__,
            '/')[:-1]) + "/resources/pessto_marshall_table_selects.yaml"
        stream = file(yamlFilePath, 'r')
        marshallQueries = yaml.load(stream)
        stream.close()
        self.primaryIdColumnName = "primaryId"
        self.raColName = "raDeg"
        self.declColName = "decDeg"

        # CREATE THE MARSHALL IMPORT CATALOGUE TABLES (IF THEY DO NOT EXIST)
        directory_script_runner(
            log=self.log,
            pathToScriptDirectory='/'.join(string.split(__file__, '/')[:-1]) +
            "/resources",
            databaseName=self.settings["database settings"]
            ["static catalogues"]["db"],
            loginPath=self.settings["database settings"]["static catalogues"]
            ["loginPath"],
            successRule="delete",
            failureRule="failed")

        for k, v in marshallQueries["pessto queries"].iteritems():
            self.dbTableName = k
            self.databaseInsertbatchSize = 500
            dictList = self._create_dictionary_of_marshall(
                marshallQuery=v["query"], marshallTable=v["marshallTable"])

            tableName = self.dbTableName
            self.add_data_to_database_table(dictList=dictList)

        self.log.debug('completed the ``get`` method')
        return None
Ejemplo n.º 9
0
    def _run_bookkeeping_sql_scripts(self):
        """*run bookkeeping sql scripts*
        """
        self.log.info('starting the ``_run_bookkeeping_sql_scripts`` method')

        moduleDirectory = os.path.dirname(__file__)
        mysqlScripts = moduleDirectory + "/mysql"

        directory_script_runner(log=self.log,
                                pathToScriptDirectory=mysqlScripts,
                                databaseName=self.settings["database settings"]
                                ["atlasMovers"]["db"],
                                force=True,
                                loginPath=self.settings["database settings"]
                                ["atlasMovers"]["loginPath"],
                                waitForResult=True,
                                successRule=False,
                                failureRule=False)

        self.log.info('completed the ``_run_bookkeeping_sql_scripts`` method')
        return None
Ejemplo n.º 10
0
    def convert_sqlite_to_mysql(self):
        """*copy the contents of the sqlite database into the mysql database*

        See class docstring for usage
        """
        from fundamentals.renderer import list_of_dictionaries
        from fundamentals.mysql import directory_script_runner
        self.log.debug('starting the ``convert_sqlite_to_mysql`` method')

        con = lite.connect(self.pathToSqlite)
        con.row_factory = lite.Row
        cur = con.cursor()

        # GET ALL TABLE NAMES
        cur.execute("SELECT name FROM sqlite_master WHERE type='table';")
        tables = cur.fetchall()

        createStatements = []
        inserts = []
        for table in tables:
            table = table['name']
            if table == "sqlite_sequence":
                continue

            # CREATE TABLE collection_books (folder_id, fingerprint, primary key(folder_id, fingerprint));
            # GENEREATE THE MYSQL CREATE STATEMENTS FOR EACH TABLE
            cur.execute(
                "SELECT sql FROM sqlite_master WHERE name = '%(table)s';" %
                locals())
            createStatement = cur.fetchone()
            createStatement = createStatement[0].replace('"', '`') + ";"
            if "DEFAULT" not in createStatement:
                if "primary key(" in createStatement:
                    tmp = createStatement.split("primary key(")
                    tmp[0] = tmp[0].replace(",", " varchar(150) DEFAULT NULL,")
                    createStatement = ("primary key(").join(tmp)
                if "primary key," in createStatement:
                    tmp = createStatement.split("primary key,")
                    tmp[1] = tmp[1].replace(",", " varchar(150) DEFAULT NULL,")
                    tmp[1] = tmp[1].replace(");",
                                            " varchar(150) DEFAULT NULL);")
                    createStatement = ("primary key,").join(tmp)
            createStatement = createStatement.replace(
                "INTEGER PRIMARY KEY", "INTEGER AUTO_INCREMENT PRIMARY KEY")
            createStatement = createStatement.replace("AUTOINCREMENT",
                                                      "AUTO_INCREMENT")
            createStatement = createStatement.replace("DEFAULT 't'",
                                                      "DEFAULT '1'")
            createStatement = createStatement.replace("DEFAULT 'f'",
                                                      "DEFAULT '0'")
            createStatement = createStatement.replace(",'t'", ",'1'")
            createStatement = createStatement.replace(",'f'", ",'0'")
            if "CREATE TABLE `" in createStatement:
                createStatement = createStatement.replace(
                    "CREATE TABLE `",
                    "CREATE TABLE IF NOT EXISTS `" + self.tablePrefix)
            else:
                createStatement = createStatement.replace(
                    "CREATE TABLE ",
                    "CREATE TABLE IF NOT EXISTS " + self.tablePrefix)
            if ", primary key(" in createStatement:
                createStatement = createStatement.replace(
                    ", primary key(", """,
`dateLastModified` datetime DEFAULT NULL,
`updated` tinyint(4) DEFAULT '0',
primary key(""")
            else:
                createStatement = createStatement.replace(
                    ");", """,
    `dateLastModified` datetime DEFAULT NULL,
    `updated` tinyint(4) DEFAULT '0');
                """)
            createStatement = createStatement.replace(
                " text primary key", " varchar(100) primary key")
            createStatement = createStatement.replace(
                "`EntryText` TEXT NOT NULL,", "`EntryText` TEXT,")
            createStatement = createStatement.replace(
                "`SelectionText` TEXT NOT NULL", "`SelectionText` TEXT")
            createStatement = createStatement.replace(
                "`Filename` INTEGER NOT NULL,", "`Filename` TEXT NOT NULL,")
            createStatement = createStatement.replace(
                "`SessionPartUUID` TEXT NOT NULL UNIQUE,",
                "`SessionPartUUID` VARCHAR(100) NOT NULL UNIQUE,")
            createStatement = createStatement.replace(
                "`Name` TEXT PRIMARY KEY NOT NULL",
                "`Name` VARCHAR(100) PRIMARY KEY NOT NULL")

            # GRAB THE DATA TO ADD TO THE MYSQL DATABASE TABLES
            cur.execute("SELECT * from '%(table)s';" % locals())
            rows = cur.fetchall()

            allRows = []
            for row in rows:
                allRows.append(dict(row))

            # RECURSIVELY CREATE MISSING DIRECTORIES
            if not os.path.exists("/tmp/headjack/"):
                os.makedirs("/tmp/headjack/")

            # DUMP THE DATA INTO A MYSQL DATABASE
            dataSet = list_of_dictionaries(log=self.log,
                                           listOfDictionaries=allRows)
            originalList = dataSet.list
            now = datetime.now()
            now = now.strftime("%Y%m%dt%H%M%S%f.sql")
            mysqlData = dataSet.mysql(tableName=self.tablePrefix + table,
                                      filepath="/tmp/headjack/" + now,
                                      createStatement=createStatement)

            directory_script_runner(
                log=self.log,
                pathToScriptDirectory="/tmp/headjack/",
                databaseName=self.settings["database settings"]["db"],
                loginPath=self.settings["database settings"]["loginPath"],
                successRule="delete",
                failureRule="failed")

        con.close()

        self.log.debug('completed the ``convert_sqlite_to_mysql`` method')
        return None
    def update_gravity_event_annotations(
            self):
        """*update gravity event annotations*

        **Key Arguments:**
            # -

        **Return:**
            - None

        **Usage:**
            ..  todo::

                - add usage info
                - create a sublime snippet for usage
                - write a command-line tool for this method
                - update package tutorial with command-line tool info if needed

            .. code-block:: python

                usage code

        """
        self.log.debug(
            'completed the ````update_gravity_event_annotations`` method')

        from breaker.transients import annotator

        # CREATE THE ANNOTATION HELPER TABLES IF THEY DON"T EXIST
        moduleDirectory = os.path.dirname(__file__)
        mysql_scripts = moduleDirectory + "/resources/mysql"
        for db in ["ps1gw", "ps13pi", "atlas"]:
            directory_script_runner(
                log=self.log,
                pathToScriptDirectory=mysql_scripts,
                databaseName=self.settings["database settings"][db]["db"],
                loginPath=self.settings["database settings"][db]["loginPath"],
                waitForResult=True,
                successRule=False,
                failureRule=False
            )
        for db in ["ligo_virgo_waves"]:
            directory_script_runner(
                log=self.log,
                pathToScriptDirectory=mysql_scripts + "/ps1_skycell_help_tables",
                databaseName=self.settings["database settings"][db]["db"],
                loginPath=self.settings["database settings"][db]["loginPath"],
                waitForResult=True,
                successRule=False,
                failureRule=False
            )

        # UPDATE THE TABLE WITH THE METADATA OF EACH GRAVITY EVENT
        sqlQuery = ""
        for g in self.settings["gravitational waves"]:
            h = self.settings["gravitational waves"][g]["human-name"]
            m = self.settings["gravitational waves"][g]["mjd"]
            cmd = """insert ignore into tcs_gravity_events (`gracedb_id`, `gravity_event_id`, `mjd`) VALUES ("%(g)s", "%(h)s", %(m)s) on duplicate key update mjd=%(m)s;\n""" % locals(
            )
            sqlQuery += cmd
        for db in [self.atlasDbConn, self.ps1gwDbConn, self.ps13piDbConn]:
            writequery(
                log=self.log,
                sqlQuery=sqlQuery,
                dbConn=db
            )
        sqlQuery = sqlQuery.replace("tcs_gravity_events", "gravity_events")
        writequery(
            log=self.log,
            sqlQuery=sqlQuery,
            dbConn=self.ligo_virgo_wavesDbConn,
        )
        for db in ["ps1gw", "ps13pi", "atlas"]:
            directory_script_runner(
                log=self.log,
                pathToScriptDirectory=mysql_scripts,
                databaseName=self.settings["database settings"][db]["db"],
                loginPath=self.settings["database settings"][db]["loginPath"],
                waitForResult=True,
                successRule=False,
                failureRule=False
            )
        for db in ["ligo_virgo_waves"]:
            directory_script_runner(
                log=self.log,
                pathToScriptDirectory=mysql_scripts + "/ps1_skycell_help_tables",
                databaseName=self.settings["database settings"][db]["db"],
                loginPath=self.settings["database settings"][db]["loginPath"],
                waitForResult=True,
                successRule=False,
                failureRule=False
            )

        dbDict = {
            "ps1gw": self.ps1gwDbConn,
            "atlas": self.atlasDbConn,
            "ps13pi": self.ps13piDbConn,
            "ligo_virgo_waves": self.ligo_virgo_wavesDbConn
        }

        for db in dbDict.keys():

            for g in self.settings["gravitational waves"]:
                h = self.settings["gravitational waves"][g]["human-name"]
                print "Annotating new transients associated with gravity event %(h)s" % locals()
                m = self.settings["gravitational waves"][g]["mjd"]
                mapPath = self.settings["gravitational waves"][g]["mapPath"]
                mapName = os.path.basename(mapPath)

                thisDbConn = dbDict[db]

                if thisDbConn in [self.ps1gwDbConn, self.ps13piDbConn]:

                    sqlQuery = u"""
                        SELECT
                            a.transient_object_id, a.gracedb_id, t.ra_psf, t.dec_psf
                        FROM
                            tcs_transient_objects t,
                            tcs_gravity_event_annotations a
                        WHERE
                            a.transient_object_id = t.id
                                AND t.detection_list_id != 0
                                AND (a.map_name !=
                                     "%(mapName)s"  or a.map_name is null)
                                AND a.gracedb_id="%(g)s";
                    """ % locals()

                    rows = readquery(
                        log=self.log,
                        sqlQuery=sqlQuery,
                        dbConn=thisDbConn,
                        quiet=False
                    )

                    transients = {}
                    for r in rows:
                        transients[r["transient_object_id"]] = (
                            r["ra_psf"], r["dec_psf"])

                    an = annotator(
                        log=self.log,
                        settings=self.settings,
                        gwid=g
                    )
                    transientNames, probs = an.annotate(transients)

                if thisDbConn in [self.atlasDbConn]:
                    sqlQuery = u"""
                        SELECT
                            a.transient_object_id, a.gracedb_id, t.ra, t.dec
                        FROM
                            atlas_diff_objects t,
                            tcs_gravity_event_annotations a
                        WHERE
                            a.transient_object_id = t.id
                                AND t.detection_list_id != 0
                                AND (a.map_name !=
                                     "%(mapName)s"  or a.map_name is null)
                                AND a.gracedb_id="%(g)s";
                    """ % locals()
                    rows = readquery(
                        log=self.log,
                        sqlQuery=sqlQuery,
                        dbConn=thisDbConn,
                        quiet=False
                    )

                    transients = {}
                    for r in rows:
                        transients[r["transient_object_id"]] = (
                            r["ra"], r["dec"])

                    an = annotator(
                        log=self.log,
                        settings=self.settings,
                        gwid=g
                    )
                    transientNames, probs = an.annotate(transients)

                if thisDbConn in [self.ligo_virgo_wavesDbConn]:

                    # PANSTARRS SKYCELLS
                    sqlQuery = u"""
                        SELECT 
                                a.skycell_id, a.gracedb_id, t.raDeg, t.decDeg
                            FROM
                                ps1_skycell_map t,
                                ps1_skycell_gravity_event_annotations a
                            WHERE
                                a.skycell_id = t.skycell_id
                                AND (a.map_name != "%(mapName)s"  or a.map_name is null)
                                AND a.gracedb_id="%(g)s"; 
                    """ % locals()
                    rows = readquery(
                        log=self.log,
                        sqlQuery=sqlQuery,
                        dbConn=thisDbConn,
                        quiet=False
                    )

                    exposures = {}
                    for r in rows:
                        exposures[r["skycell_id"]] = (
                            r["raDeg"], r["decDeg"])

                    stats = survey_footprint(
                        log=self.log,
                        settings=self.settings,
                        gwid=g
                    )
                    exposureIDs, probs = stats.annotate_exposures(
                        exposures=exposures,
                        pointingSide=0.4
                    )

                    dataList = []
                    for p, t in zip(probs, exposureIDs):
                        dataList.append({
                            "skycell_id": t,
                            "prob_coverage": p,
                            "gracedb_id": g,
                            "map_name": mapName
                        })
                    tableName = "ps1_skycell_gravity_event_annotations"

                    dataSet = list_of_dictionaries(
                        log=self.log,
                        listOfDictionaries=dataList,
                        reDatetime=re.compile('^[0-9]{4}-[0-9]{2}-[0-9]{2}T')
                    )
                    # RECURSIVELY CREATE MISSING DIRECTORIES
                    if not os.path.exists("/tmp/mysqlinsert/%(db)s" % locals()):
                        os.makedirs("/tmp/mysqlinsert/%(db)s" % locals())
                    now = datetime.now()
                    now = now.strftime("%Y%m%dt%H%M%S%f")
                    mysqlData = dataSet.mysql(
                        tableName=tableName, filepath="/tmp/mysqlinsert/%(db)s/%(now)s.sql" % locals(), createStatement=False)

                    # ATLAS EXPOSURES
                    sqlQuery = u"""
                        SELECT 
                                atlas_object_id, gracedb_id, raDeg, decDeg
                            FROM
                                atlas_exposure_gravity_event_annotations
                            WHERE
                                (map_name != "%(mapName)s"  or map_name is null)
                                AND gracedb_id="%(g)s"; 
                    """ % locals()
                    rows = readquery(
                        log=self.log,
                        sqlQuery=sqlQuery,
                        dbConn=thisDbConn,
                        quiet=False
                    )

                    exposures = {}
                    for r in rows:
                        exposures[r["atlas_object_id"]] = (
                            r["raDeg"], r["decDeg"])

                    stats = survey_footprint(
                        log=self.log,
                        settings=self.settings,
                        gwid=g
                    )
                    exposureIDs, probs = stats.annotate_exposures(
                        exposures=exposures,
                        pointingSide=5.46
                    )

                    dataList = []
                    for p, t in zip(probs, exposureIDs):
                        dataList.append({
                            "atlas_object_id": t,
                            "prob_coverage": p,
                            "gracedb_id": g,
                            "map_name": mapName
                        })
                    tableName = "atlas_exposure_gravity_event_annotations"

                    dataSet = list_of_dictionaries(
                        log=self.log,
                        listOfDictionaries=dataList,
                        reDatetime=re.compile('^[0-9]{4}-[0-9]{2}-[0-9]{2}T')
                    )
                    # RECURSIVELY CREATE MISSING DIRECTORIES
                    if not os.path.exists("/tmp/mysqlinsert/%(db)s" % locals()):
                        os.makedirs("/tmp/mysqlinsert/%(db)s" % locals())
                    now = datetime.now()
                    now = now.strftime("%Y%m%dt%H%M%S%f")
                    mysqlData = dataSet.mysql(
                        tableName=tableName, filepath="/tmp/mysqlinsert/%(db)s/%(now)s.sql" % locals(), createStatement=False)

                if thisDbConn not in [self.ligo_virgo_wavesDbConn]:
                    dataList = []
                    for p, t in zip(probs, transientNames):
                        dataList.append({
                            "transient_object_id": t,
                            "enclosing_contour": p,
                            "gracedb_id": g,
                            "map_name": mapName
                        })
                        tableName = "tcs_gravity_event_annotations"

                    dataSet = list_of_dictionaries(
                        log=self.log,
                        listOfDictionaries=dataList,
                        reDatetime=re.compile('^[0-9]{4}-[0-9]{2}-[0-9]{2}T')
                    )
                    # RECURSIVELY CREATE MISSING DIRECTORIES
                    if not os.path.exists("/tmp/mysqlinsert/%(db)s" % locals()):
                        os.makedirs("/tmp/mysqlinsert/%(db)s" % locals())
                    now = datetime.now()
                    now = now.strftime("%Y%m%dt%H%M%S%f")
                    mysqlData = dataSet.mysql(
                        tableName=tableName, filepath="/tmp/mysqlinsert/%(db)s/%(now)s.sql" % locals(), createStatement=False)

        for db in dbDict.keys():
            directory_script_runner(
                log=self.log,
                pathToScriptDirectory="/tmp/mysqlinsert/%(db)s" % locals(),
                databaseName=self.settings["database settings"][db]["db"],
                loginPath=self.settings["database settings"][db]["loginPath"],
                waitForResult=True,
                successRule=False,
                failureRule=False
            )

        self.log.debug(
            'completed the ``update_gravity_event_annotations`` method')
        return None
    def parse_panstarrs_nightlogs(
            self,
            updateAll=False):
        """*download and parse the ps1 night logs from the range of time a wave survey campaign is active*

        The night-log data is added to the ps1_nightlogs table

        **Key Arguments:**
            - ``updateAll`` -- update all of the PS1 nightlogs. This will take sometime, the default is to lift the logs from the last 7 days. Default *False*.

        **Return:**
            - None

        **Usage:**
            ..  todo::

                - add usage info
                - create a sublime snippet for usage
                - update package tutorial if needed

            .. code-block:: python

                usage code

        """
        self.log.debug('starting the ``parse_panstarrs_nightlogs`` method')

        # CONVERTER TO CONVERT MJD TO DATE
        converter = conversions(
            log=self.log
        )

        createStatement = """
CREATE TABLE `ps1_nightlogs` (
  `primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter',
  `airm` double DEFAULT NULL,
  `comments` varchar(200) DEFAULT NULL,
  `decDeg` double DEFAULT NULL,
  `etime` double DEFAULT NULL,
  `f` varchar(10) DEFAULT NULL,
  `filesetID` varchar(100) DEFAULT NULL,
  `raDeg` double DEFAULT NULL,
  `telescope_pointing` varchar(200) DEFAULT NULL,
  `time_registered` datetime DEFAULT NULL,
  `type` varchar(100) DEFAULT NULL,
  `dateCreated` datetime DEFAULT CURRENT_TIMESTAMP,
  `dateLastModified` datetime DEFAULT CURRENT_TIMESTAMP,
  `updated` varchar(45) DEFAULT '0',
  PRIMARY KEY (`primaryId`),
  UNIQUE KEY `filesetid` (`filesetID`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
"""

        from astrocalc.times import now
        mjdNow = now(
            log=self.log
        ).get_mjd()

        # WAVE METADATA FOUND IN SETTINGS FILE
        for wave in self.settings["gravitational waves"]:
            # GIVE A 3 DAY WINDOW EITHER SIDE OF WAVE TIME-RANGE
            mjdLower = int(self.settings["gravitational waves"][
                wave]["mjd"] - 21. - 3.)
            mjdUpper = int(self.settings["gravitational waves"][
                wave]["mjd"] + 31. + 3.)

            if updateAll == False:
                if mjdUpper < mjdNow - 7.:
                    continue
                if mjdUpper > mjdNow:
                    mjdUpper = int(mjdNow)
                if mjdLower < mjdNow - 7.:
                    mjdLower = int(mjdNow - 7.)

            # METRIC NIGHT LOGS FOR EACH NIGHT FOUND AT A URL SIMILAR TO :
            # "http://ipp0022.ifa.hawaii.edu/ps1sc/metrics/2016-12-14/index.html"
            urls = []
            for i in range(mjdUpper - mjdLower + 3):
                mjd = i + mjdLower
                utDate = converter.mjd_to_ut_datetime(
                    mjd=mjd,
                    sqlDate=False,
                    datetimeObject=True
                )
                utDate = utDate.strftime("%Y-%m-%d")
                urls.append("http://ipp0022.ifa.hawaii.edu/ps1sc/metrics/%(utDate)s/index.html" % locals(
                ))

            localUrls = multiobject_download(
                urlList=urls,
                downloadDirectory="/tmp",
                log=self.log,
                timeStamp=True,
                timeout=180,
                concurrentDownloads=2,
                resetFilename=False,
                credentials=False,  # { 'username' : "...", "password", "..." }
                longTime=True,
                indexFilenames=False
            )

            for url in localUrls:
                if not url:
                    continue
                pathToReadFile = url
                try:
                    self.log.debug("attempting to open the file %s" %
                                   (pathToReadFile,))
                    readFile = codecs.open(
                        pathToReadFile, encoding='utf-8', mode='r')
                    thisData = readFile.read()
                    readFile.close()
                except IOError, e:
                    message = 'could not open the file %s' % (pathToReadFile,)
                    self.log.critical(message)
                    raise IOError(message)
                readFile.close()

                regex = re.compile(r'<pre>\s*# (filesetID.*?)</pre>', re.S)
                matchObject = re.finditer(
                    regex,
                    thisData
                )

                for match in matchObject:
                    csvReader = csv.DictReader(
                        io.StringIO(match.group(1)), delimiter='|')
                    nightLog = []
                    for row in csvReader:
                        cleanDict = {}
                        for k, v in row.iteritems():
                            cleanDict[k.strip().replace(" ", "_")] = v.strip()
                        if "telescope_pointing" in cleanDict:
                            cleanDict["raDeg"] = cleanDict["telescope_pointing"].split()[
                                0]
                            cleanDict["decDeg"] = cleanDict["telescope_pointing"].split()[
                                1]
                        if "time_registered" in cleanDict:
                            cleanDict["time_registered"] = cleanDict[
                                "time_registered"].replace("Z", "")
                        nightLog.append(cleanDict)

                dataSet = list_of_dictionaries(
                    log=self.log,
                    listOfDictionaries=nightLog
                )
                # Recursively create missing directories
                if not os.path.exists("/tmp/ps1_nightlogs"):
                    os.makedirs("/tmp/ps1_nightlogs")
                mysqlData = dataSet.mysql(
                    tableName="ps1_nightlogs", filepath="/tmp/ps1_nightlogs/ps1_nightlog_%(utDate)s.sql" % locals(), createStatement=createStatement)

                directory_script_runner(
                    log=self.log,
                    pathToScriptDirectory="/tmp/ps1_nightlogs",
                    databaseName=self.settings["database settings"][
                        "ligo_virgo_waves"]["db"],
                    loginPath=self.settings["database settings"][
                        "ligo_virgo_waves"]["loginPath"],
                    successRule="delete",
                    failureRule="failed"
                )
    os.makedirs(pathToOutputDir)

settings["database settings"]["static catalogues"] = settings[
    "database settings"]["static catalogues2"]

# SETUP ALL DATABASE CONNECTIONS
from sherlock import database
db = database(log=log, settings=settings)
dbConns, dbVersions = db.connect()
transientsDbConn = dbConns["transients"]
cataloguesDbConn = dbConns["catalogues"]

from fundamentals.mysql import directory_script_runner
directory_script_runner(
    log=log,
    pathToScriptDirectory=pathToInputDir.replace("/input", "/resources") +
    "/transient_database",
    dbConn=transientsDbConn)


class test_transient_classifier(unittest.TestCase):
    def test_transient_update_classified_annotations_function(self):

        from sherlock import transient_classifier
        this = transient_classifier(log=log, settings=settings, update=True)
        # this.update_peak_magnitudes()
        this.update_classification_annotations_and_summaries()

    def test_transient_classifier_function(self):

        from sherlock import transient_classifier