Esempio n. 1
0
def main(arguments=None):
    """
    *The main function used when ``mjd_to_date.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="DEBUG",
        options_first=False
    )
    arguments, settings, log, dbConn = su.setup()

    # tab completion for raw_input
    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE mjd_to_date.py AT %s' %
        (startTime,))

    # call the worker function
    # x-if-settings-or-database-credientials
    thisDate = mjd_to_date(
        log=log,
        mjd=float(mjd),
        fraction=fractionFlag,
        sqlDate=sqlDateFlag
    )
    print thisDate.get()

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info('-- FINISHED ATTEMPT TO RUN THE mjd_to_date.py AT %s (RUNTIME: %s) --' %
             (endTime, runningTime, ))

    return
def main(arguments=None):
    """
    *The main function used when ``convert_mysql_database_to_innodb.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    ########## IMPORTS ##########
    ## STANDARD LIB ##
    ## THIRD PARTY ##
    ## LOCAL APPLICATION ##
    from fundamentals import tools, times

    su = tools(
        arguments=arguments,
        docString=__doc__
    )
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE convert_mysql_database_to_innodb.py AT %s' %
        (startTime,))

    if "tableSchema" not in locals():
        tableSchema = False

    # call the worker function
    # x-if-settings-or-database-credientials
    convert_mysql_database_to_innodb(
        log=log,
        dbConn=dbConn,
        tableSchema=tableSchema,
    )

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE convert_mysql_database_to_innodb.py AT %s (RUNTIME: %s) --' %
        (endTime, runningTime, ))

    return
def main(arguments=None):
    """
    *The main function used when ``get_angular_separation.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="WARNING",
        options_first=True
    )
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE get_angular_separation.py AT %s' %
        (startTime,))

    # call the worker function
    # x-if-settings-or-database-credientials
    angularSeparation, north, east = get_angular_separation(
        log=log,
        ra1=ra1,
        dec1=dec1,
        ra2=ra2,
        dec2=dec2
    )

    print """%(angularSeparation)6.4f\" (%(north)6.4f\" N, %(east)6.4f\" E)""" % locals()

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info('-- FINISHED ATTEMPT TO RUN THE get_angular_separation.py AT %s (RUNTIME: %s) --' %
             (endTime, runningTime, ))

    return
def main(arguments=None):
    """
    *The main function used when ``convert_excel_workbook_to_binary_fits_table.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="WARNING",
        options_first=False
    )
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE convert_excel_workbook_to_binary_fits_table.py AT %s' %
        (startTime,))

    # call the worker function
    fitsFile = convert_excel_workbook_to_binary_fits_table(
        log=log,
        pathToWorkbook=pathToExcelFile,
        pathToOutputFits=pathToOutputFits
    )
    fitsFile.get()

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info('-- FINISHED ATTEMPT TO RUN THE convert_excel_workbook_to_binary_fits_table.py AT %s (RUNTIME: %s) --' %
             (endTime, runningTime, ))

    return
Esempio n. 5
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="WARNING",
               options_first=False,
               projectName="qubits")
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if varname == "import":
            varname = "iimport"
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val, ))
        else:
            exec(varname + " = %s" % (val, ))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, ))

    if init:
        from . import workspace
        ws = workspace(log=log, pathToWorkspace=pathToWorkspace)
        ws.setup()
        return

    # IMPORT THE SIMULATION SETTINGS
    (allSettings, programSettings, limitingMags, sampleNumber,
     peakMagnitudeDistributions, explosionDaysFromSettings,
     extendLightCurveTail, relativeSNRates, lowerRedshiftLimit,
     upperRedshiftLimit, redshiftResolution, restFrameFilter,
     kCorrectionTemporalResolution, kCorPolyOrder, kCorMinimumDataPoints,
     extinctionType, extinctionConstant, hostExtinctionDistributions,
     galacticExtinctionDistribution, surveyCadenceSettings, snLightCurves,
     surveyArea, CCSNRateFraction, transientToCCSNRateFraction,
     extraSurveyConstraints, lightCurvePolyOrder,
     logLevel) = cu.read_in_survey_parameters(
         log, pathToSettingsFile=pathToSettingsFile)

    logFilePath = pathToOutputDirectory + "/qubits.log"
    del log
    log = _set_up_command_line_tool(level=str(logLevel),
                                    logFilePath=logFilePath)

    # dbConn, log = cu.settings(
    #     pathToSettingsFile=pathToSettingsFile,
    #     dbConn=False,
    #     log=True
    # )

    ## START LOGGING ##
    startTime = dcu.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE qubits AT %s' % (startTime, ))

    resultsDict = {}

    pathToOutputPlotDirectory = pathToOutputDirectory + "/plots/"
    dcu.dryx_mkdir(log, directoryPath=pathToOutputPlotDirectory)

    pathToResultsFolder = pathToOutputDirectory + "/results/"
    dcu.dryx_mkdir(log, directoryPath=pathToResultsFolder)

    if not programSettings[
            'Extract Lightcurves from Spectra'] and not programSettings[
                'Generate KCorrection Database'] and not programSettings[
                    'Run the Simulation'] and not programSettings[
                        'Compile and Plot Results']:
        print "All stages of the simulatation have been switched off. Please switch on at least one stage of the simulation under the 'Programming Settings' in the settings file `%(pathToSettingsFile)s`" % locals(
        )

    # GENERATE THE DATA FOR SIMULATIONS
    if programSettings['Extract Lightcurves from Spectra']:
        log.info('generating the Lightcurves')
        dg.generate_model_lightcurves(
            log=log,
            pathToSpectralDatabase=pathToSpectralDatabase,
            pathToOutputDirectory=pathToOutputDirectory,
            pathToOutputPlotDirectory=pathToOutputPlotDirectory,
            explosionDaysFromSettings=explosionDaysFromSettings,
            extendLightCurveTail=extendLightCurveTail,
            polyOrder=lightCurvePolyOrder)
        print "The lightcurve file can be found here: %(pathToOutputDirectory)stransient_light_curves.yaml" % locals(
        )
        print "The lightcurve plots can be found in %(pathToOutputPlotDirectory)s" % locals(
        )

    if programSettings['Generate KCorrection Database']:
        log.info('generating the kcorrection data')
        dg.generate_kcorrection_listing_database(
            log,
            pathToOutputDirectory=pathToOutputDirectory,
            pathToSpectralDatabase=pathToSpectralDatabase,
            restFrameFilter=restFrameFilter,
            temporalResolution=kCorrectionTemporalResolution,
            redshiftResolution=redshiftResolution,
            redshiftLower=lowerRedshiftLimit,
            redshiftUpper=upperRedshiftLimit + redshiftResolution)
        log.info('generating the kcorrection polynomials')
        dg.generate_kcorrection_polynomial_database(
            log,
            pathToOutputDirectory=pathToOutputDirectory,
            restFrameFilter=restFrameFilter,
            kCorPolyOrder=kCorPolyOrder,  # ORDER OF THE POLYNOMIAL TO FIT
            kCorMinimumDataPoints=kCorMinimumDataPoints,
            redshiftResolution=redshiftResolution,
            redshiftLower=lowerRedshiftLimit,
            redshiftUpper=upperRedshiftLimit + redshiftResolution,
            plot=programSettings['Generate KCorrection Plots'])

        print "The k-correction database has been generated here: %(pathToOutputDirectory)sk_corrections" % locals(
        )
        if programSettings['Generate KCorrection Plots']:
            print "The k-correction polynomial plots can also be found in %(pathToOutputDirectory)sk_corrections" % locals(
            )

    if programSettings['Run the Simulation']:
        # CREATE THE OBSERVABLE UNIVERSE!
        log.info('generating the redshift array')
        redshiftArray = u.random_redshift_array(
            log,
            sampleNumber,
            lowerRedshiftLimit,
            upperRedshiftLimit,
            redshiftResolution=redshiftResolution,
            pathToOutputPlotDirectory=pathToOutputPlotDirectory,
            plot=programSettings['Plot Simulation Helper Plots'])
        resultsDict['Redshifts'] = redshiftArray.tolist()

        log.info('generating the SN type array')
        snTypesArray = u.random_sn_types_array(
            log,
            sampleNumber,
            relativeSNRates,
            pathToOutputPlotDirectory=pathToOutputPlotDirectory,
            plot=programSettings['Plot Simulation Helper Plots'])
        resultsDict['SN Types'] = snTypesArray.tolist()

        log.info('generating peak magnitudes for the SNe')
        peakMagnitudesArray = u.random_peak_magnitudes(
            log,
            peakMagnitudeDistributions,
            snTypesArray,
            plot=programSettings['Plot Simulation Helper Plots'])

        log.info('generating the SN host extictions array')
        hostExtinctionArray = u.random_host_extinction(
            log,
            sampleNumber,
            extinctionType,
            extinctionConstant,
            hostExtinctionDistributions,
            plot=programSettings['Plot Simulation Helper Plots'])

        log.info('generating the SN galactic extictions array')
        galacticExtinctionArray = u.random_galactic_extinction(
            log,
            sampleNumber,
            extinctionType,
            extinctionConstant,
            galacticExtinctionDistribution,
            plot=programSettings['Plot Simulation Helper Plots'])

        log.info('generating the raw lightcurves for the SNe')
        rawLightCurveDict = u.generate_numpy_polynomial_lightcurves(
            log,
            snLightCurves=snLightCurves,
            pathToOutputDirectory=pathToOutputDirectory,
            pathToOutputPlotDirectory=pathToOutputPlotDirectory,
            plot=programSettings['Plot Simulation Helper Plots'])

        log.info('generating the k-correction array for the SNe')
        kCorrectionArray = u.build_kcorrection_array(
            log,
            redshiftArray,
            snTypesArray,
            snLightCurves,
            pathToOutputDirectory=pathToOutputDirectory,
            plot=programSettings['Plot Simulation Helper Plots'])

        log.info('generating the observed lightcurves for the SNe')
        observedFrameLightCurveInfo, peakAppMagList = u.convert_lightcurves_to_observered_frame(
            log,
            snLightCurves=snLightCurves,
            rawLightCurveDict=rawLightCurveDict,
            redshiftArray=redshiftArray,
            snTypesArray=snTypesArray,
            peakMagnitudesArray=peakMagnitudesArray,
            kCorrectionArray=kCorrectionArray,
            hostExtinctionArray=hostExtinctionArray,
            galacticExtinctionArray=galacticExtinctionArray,
            restFrameFilter=restFrameFilter,
            pathToOutputDirectory=pathToOutputDirectory,
            pathToOutputPlotDirectory=pathToOutputPlotDirectory,
            polyOrder=lightCurvePolyOrder,
            plot=programSettings['Plot Simulation Helper Plots'])

        log.info('generating the survey observation cadence')
        cadenceDictionary = ss.survey_cadence_arrays(
            log,
            surveyCadenceSettings,
            pathToOutputDirectory=pathToOutputDirectory,
            pathToOutputPlotDirectory=pathToOutputPlotDirectory,
            plot=programSettings['Plot Simulation Helper Plots'])

        log.info('determining if the SNe are discoverable by the survey')
        discoverableList = ss.determine_if_sne_are_discoverable(
            log,
            redshiftArray=redshiftArray,
            limitingMags=limitingMags,
            observedFrameLightCurveInfo=observedFrameLightCurveInfo,
            pathToOutputDirectory=pathToOutputDirectory,
            pathToOutputPlotDirectory=pathToOutputPlotDirectory,
            plot=programSettings['Plot Simulation Helper Plots'])

        log.info(
            'determining the day (if and) when each SN is first discoverable by the survey'
        )
        ripeDayList = ss.determine_when_sne_are_ripe_for_discovery(
            log,
            redshiftArray=redshiftArray,
            limitingMags=limitingMags,
            discoverableList=discoverableList,
            observedFrameLightCurveInfo=observedFrameLightCurveInfo,
            plot=programSettings['Plot Simulation Helper Plots'])

        # log.info('determining the day when each SN is disappears fainter than the survey limiting mags')
        # disappearDayList = determine_when_discovered_sne_disappear(
        #     log,
        #     redshiftArray=redshiftArray,
        #     limitingMags=limitingMags,
        #     ripeDayList=ripeDayList,
        #     observedFrameLightCurveInfo=observedFrameLightCurveInfo,
        #     plot=programSettings['Plot Simulation Helper Plots'])

        log.info('determining if and when each SN is discovered by the survey')
        lightCurveDiscoveryDayList, surveyDiscoveryDayList, snCampaignLengthList = ss.determine_if_sne_are_discovered(
            log,
            limitingMags=limitingMags,
            ripeDayList=ripeDayList,
            cadenceDictionary=cadenceDictionary,
            observedFrameLightCurveInfo=observedFrameLightCurveInfo,
            extraSurveyConstraints=extraSurveyConstraints,
            plot=programSettings['Plot Simulation Helper Plots'])

        resultsDict[
            'Discoveries Relative to Peak Magnitudes'] = lightCurveDiscoveryDayList
        resultsDict[
            'Discoveries Relative to Survey Year'] = surveyDiscoveryDayList
        resultsDict['Campaign Length'] = snCampaignLengthList
        resultsDict['Cadence Dictionary'] = cadenceDictionary
        resultsDict['Peak Apparent Magnitudes'] = peakAppMagList

        now = datetime.now()
        now = now.strftime("%Y%m%dt%H%M%S")
        fileName = pathToOutputDirectory + \
            "/simulation_results_%s.yaml" % (now,)
        stream = file(fileName, 'w')
        yamlContent = dict(allSettings.items() + resultsDict.items())
        yaml.dump(yamlContent, stream, default_flow_style=False)
        stream.close()

        print "The simulation output file can be found here: %(fileName)s. Remember to update your settings file 'Simulation Results File Used for Plots' parameter with this filename before compiling the results." % locals(
        )
        if programSettings['Plot Simulation Helper Plots']:
            print "The simulation helper-plots found in %(pathToOutputPlotDirectory)s" % locals(
            )

    # COMPILE AND PLOT THE RESULTS
    if programSettings['Compile and Plot Results']:
        pathToYamlFile = pathToOutputDirectory + "/" + \
            programSettings['Simulation Results File Used for Plots']
        result_log = r.log_the_survey_settings(log, pathToYamlFile)
        snSurveyDiscoveryTimes, lightCurveDiscoveryTimes, snTypes, redshifts, cadenceDictionary, peakAppMagList, snCampaignLengthList = r.import_results(
            log, pathToYamlFile)
        snRatePlotLink, totalRate, tooFaintRate, shortCampaignRate = r.determine_sn_rate(
            log,
            lightCurveDiscoveryTimes,
            snSurveyDiscoveryTimes,
            redshifts,
            surveyCadenceSettings=surveyCadenceSettings,
            lowerRedshiftLimit=lowerRedshiftLimit,
            upperRedshiftLimit=upperRedshiftLimit,
            redshiftResolution=redshiftResolution,
            surveyArea=surveyArea,
            CCSNRateFraction=CCSNRateFraction,
            transientToCCSNRateFraction=transientToCCSNRateFraction,
            peakAppMagList=peakAppMagList,
            snCampaignLengthList=snCampaignLengthList,
            extraSurveyConstraints=extraSurveyConstraints,
            pathToOutputPlotFolder=pathToOutputPlotDirectory)
        result_log += """
## Results ##

This simulated survey discovered a total of **%s** transients per year. An extra **%s** transients were detected but deemed too faint to constrain a positive transient identification and a further **%s** transients where detected but an observational campaign of more than **%s** days could not be completed to ensure identification. See below for the various output plots.

        """ % (
            totalRate, tooFaintRate, shortCampaignRate,
            extraSurveyConstraints["Observable for at least ? number of days"])
        cadenceWheelLink = r.plot_cadence_wheel(
            log,
            cadenceDictionary,
            pathToOutputPlotFolder=pathToOutputPlotDirectory)
        result_log += """%s""" % (cadenceWheelLink, )
        discoveryMapLink = r.plot_sn_discovery_map(
            log,
            snSurveyDiscoveryTimes,
            peakAppMagList,
            snCampaignLengthList,
            redshifts,
            extraSurveyConstraints,
            pathToOutputPlotFolder=pathToOutputPlotDirectory)
        result_log += """%s""" % (discoveryMapLink, )
        ratioMapLink = r.plot_sn_discovery_ratio_map(
            log,
            snSurveyDiscoveryTimes,
            redshifts,
            peakAppMagList,
            snCampaignLengthList,
            extraSurveyConstraints,
            pathToOutputPlotFolder=pathToOutputPlotDirectory)
        result_log += """%s""" % (ratioMapLink, )
        result_log += """%s""" % (snRatePlotLink, )

        now = datetime.now()
        now = now.strftime("%Y%m%dt%H%M%S")
        mdLogPath = pathToResultsFolder + \
            "simulation_result_log_%s.md" % (now,)
        mdLog = open(mdLogPath, 'w')
        mdLog.write(result_log)
        mdLog.close()

        dmd.convert_to_html(log=log, pathToMMDFile=mdLogPath, css="amblin")

        print "Results can be found here: %(pathToResultsFolder)s" % locals()
        html = mdLogPath.replace(".md", ".html")
        print "Open this file in your browser: %(html)s" % locals()

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (
            endTime,
            runningTime,
        ))

    return
def main(arguments=None):
    """
    *The main function used when ``update_request_watcher.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    from dryxPython import commonutils as dcu
    from dryxPython.projectsetup import setup_main_clutil
    from dryxPython import logs as dl
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="WARNING"
    )
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE update_request_watcher.py AT %s' %
        (startTime,))

    # find all update request files in "/_updates_required_" folder
    basePath = pathToGitRepos + "/_updates_required_"
    for d in os.listdir(basePath):
        if os.path.isfile(os.path.join(basePath, d)):
            if "gitupdates" in d:
                thisRepo = d.replace(".gitupdates", "")

                # check local git-repo requiring an update actually exists --
                # trigger pull if it does
                pathToRepo = pathToGitRepos + "/" + thisRepo
                if not os.path.exists(pathToRepo):
                    message = "the path to the Folder folder %s does not exist on this machine" % (
                        pathToRepo,)
                    log.warning(message)
                else:
                    dcu.update_git_repos.update_git_repos(
                        log=log,
                        gitProjectRoot=pathToRepo,
                        branchToUpdate="master",
                        installClUtils=True
                    )

                # finally delete the update request file
                os.remove(os.path.join(basePath, d))

    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info('-- FINISHED ATTEMPT TO RUN THE update_request_watcher.py AT %s (RUNTIME: %s) --' %
             (endTime, runningTime, ))

    return
Esempio n. 7
0
def convert_dictionary_to_mysql_table(log,
                                      dictionary,
                                      dbTableName,
                                      uniqueKeyList=[],
                                      dbConn=False,
                                      createHelperTables=False,
                                      dateModified=False,
                                      returnInsertOnly=False,
                                      replace=False,
                                      batchInserts=True,
                                      reDatetime=False,
                                      skipChecks=False):
    """convert dictionary to mysql table

    **Key Arguments:**
        - ``log`` -- logger
        - ``dictionary`` -- python dictionary
        - ``dbConn`` -- the db connection
        - ``dbTableName`` -- name of the table you wish to add the data to (or create if it does not exist)
        - ``uniqueKeyList`` - a lists column names that need combined to create the primary key
        - ``createHelperTables`` -- create some helper tables with the main table, detailing original keywords etc
        - ``returnInsertOnly`` -- returns only the insert command (does not execute it)
        - ``dateModified`` -- add a modification date to the mysql table
        - ``replace`` -- use replace instead of mysql insert statements (useful when updates are required)
        - ``batchInserts`` -- if returning insert statements return separate insert commands and value tuples
        - ``reDatetime`` -- compiled regular expression matching datetime (passing this in cuts down on execution time as it doesn't have to be recompiled everytime during multiple iterations of ``convert_dictionary_to_mysql_table``)
        - ``skipChecks`` -- skip reliability checks. Less robust but a little faster.

    **Return:**
        - ``returnInsertOnly`` -- the insert statement if requested

    **Usage:**

        To add a python dictionary to a database table, creating the table and/or columns if they don't yet exist:

        .. code-block:: python

            from fundamentals.mysql import convert_dictionary_to_mysql_table
            dictionary = {"a newKey": "cool", "and another": "super cool",
                      "uniquekey1": "cheese", "uniqueKey2": "burgers"}

            convert_dictionary_to_mysql_table(
                dbConn=dbConn,
                log=log,
                dictionary=dictionary,
                dbTableName="testing_table",
                uniqueKeyList=["uniquekey1", "uniqueKey2"],
                dateModified=False,
                returnInsertOnly=False,
                replace=True
            )

        Or just return the insert statement with a list of value tuples, i.e. do not execute the command on the database:

            insertCommand, valueTuple = convert_dictionary_to_mysql_table(
                dbConn=dbConn,
                log=log,
                dictionary=dictionary,
                dbTableName="testing_table",
                uniqueKeyList=["uniquekey1", "uniqueKey2"],
                dateModified=False,
                returnInsertOnly=True,
                replace=False,
                batchInserts=True
            )

            print insertCommand, valueTuple

            # OUT: 'INSERT IGNORE INTO `testing_table`
            # (a_newKey,and_another,dateCreated,uniqueKey2,uniquekey1) VALUES
            # (%s, %s, %s, %s, %s)', ('cool', 'super cool',
            # '2016-06-21T12:08:59', 'burgers', 'cheese')

        You can also return a list of single insert statements using ``batchInserts = False``. Using ``replace = True`` will also add instructions about how to replace duplicate entries in the database table if found:

            inserts = convert_dictionary_to_mysql_table(
                dbConn=dbConn,
                log=log,
                dictionary=dictionary,
                dbTableName="testing_table",
                uniqueKeyList=["uniquekey1", "uniqueKey2"],
                dateModified=False,
                returnInsertOnly=True,
                replace=True,
                batchInserts=False
            )

            print inserts

            # OUT: INSERT INTO `testing_table` (a_newKey,and_another,dateCreated,uniqueKey2,uniquekey1)
            # VALUES ("cool" ,"super cool" ,"2016-09-14T13:12:08" ,"burgers" ,"cheese")
            # ON DUPLICATE KEY UPDATE  a_newKey="cool", and_another="super
            # cool", dateCreated="2016-09-14T13:12:08", uniqueKey2="burgers",
            # uniquekey1="cheese"
    """

    log.info('starting the ``convert_dictionary_to_mysql_table`` function')

    if not reDatetime:
        reDatetime = re.compile('^[0-9]{4}-[0-9]{2}-[0-9]{2}T')

    if not replace:
        insertVerb = "INSERT"
    else:
        insertVerb = "INSERT IGNORE"

    if returnInsertOnly == False:
        # TEST THE ARGUMENTS
        if str(type(dbConn).__name__) != "Connection":
            message = 'Please use a valid MySQL DB connection.'
            log.critical(message)
            raise TypeError(message)

        if not isinstance(dictionary, dict):
            message = 'Please make sure "dictionary" argument is a dict type.'
            log.critical(message)
            raise TypeError(message)

        if not isinstance(uniqueKeyList, list):
            message = 'Please make sure "uniqueKeyList" is a list'
            log.critical(message)
            raise TypeError(message)

        for i in uniqueKeyList:
            if i not in dictionary.keys():
                message = 'Please make sure values in "uniqueKeyList" are present in the "dictionary" you are tring to convert'
                log.critical(message)
                raise ValueError(message)

        for k, v in dictionary.iteritems():
            # log.debug('k: %s, v: %s' % (k, v,))
            if isinstance(v, list) and len(v) != 2:
                message = 'Please make sure the list values in "dictionary" 2 items in length'
                log.critical("%s: in %s we have a %s (%s)" %
                             (message, k, v, type(v)))
                raise ValueError(message)
            if isinstance(v, list):
                if not (isinstance(v[0], str) or isinstance(v[0], int)
                        or isinstance(v[0], bool) or isinstance(v[0], float)
                        or isinstance(v[0], long)
                        or isinstance(v[0], datetime.date) or v[0] == None):
                    message = 'Please make sure values in "dictionary" are of an appropriate value to add to the database, must be str, float, int or bool'
                    log.critical("%s: in %s we have a %s (%s)" %
                                 (message, k, v, type(v)))
                    raise ValueError(message)
            else:
                if not (isinstance(v, str) or isinstance(v, int)
                        or isinstance(v, bool) or isinstance(v, float)
                        or isinstance(v, long) or isinstance(v, unicode)
                        or isinstance(v, datetime.date) or v == None):
                    this = type(v)
                    message = 'Please make sure values in "dictionary" are of an appropriate value to add to the database, must be str, float, int or bool : %(k)s is a %(this)s' % locals(
                    )
                    log.critical("%s: in %s we have a %s (%s)" %
                                 (message, k, v, type(v)))
                    raise ValueError(message)

        if not isinstance(createHelperTables, bool):
            message = 'Please make sure "createHelperTables" is a True or False'
            log.critical(message)
            raise TypeError(message)

        # TEST IF TABLE EXISTS
        if skipChecks:
            tableExists = table_exists.table_exists(dbConn=dbConn,
                                                    log=log,
                                                    dbTableName=dbTableName)
        else:
            tableExists = False

        # CREATE THE TABLE IF IT DOES NOT EXIST
        if tableExists is False:
            sqlQuery = """
                CREATE TABLE `%(dbTableName)s`
                (`primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter',
                `dateCreated` DATETIME NULL DEFAULT CURRENT_TIMESTAMP,
                `dateLastModified` DATETIME NULL DEFAULT CURRENT_TIMESTAMP,
                `updated` tinyint(4) DEFAULT '0',
                PRIMARY KEY (`primaryId`))
                ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
            """ % locals()
            writequery(
                log=log,
                sqlQuery=sqlQuery,
                dbConn=dbConn,
            )

    qCreateColumn = ''
    formattedKey = ''
    formattedKeyList = []
    myValues = []

    # ADD EXTRA COLUMNS TO THE DICTIONARY todo: do I need this?
    if dateModified and replace == False:
        dictionary['dateLastModified'] = [
            str(times.get_now_sql_datetime()), "date row was modified"
        ]
        dictionary['updated'] = [0, "this row has been updated"]

    # ITERATE THROUGH THE DICTIONARY AND GENERATE THE TABLE COLUMN WITH THE
    # NAME OF THE KEY, IF IT DOES NOT EXIST
    count = len(dictionary)
    i = 1
    for (key, value) in dictionary.items():
        if (isinstance(value, list) and value[0] is None):
            del dictionary[key]
    # SORT THE DICTIONARY BY KEY
    odictionary = c.OrderedDict(sorted(dictionary.items()))
    for (key, value) in odictionary.iteritems():

        formattedKey = key.replace(" ", "_").replace("-", "_")
        # DEC A KEYWORD IN MYSQL - NEED TO CHANGE BEFORE INGEST
        if formattedKey == "dec":
            formattedKey = "decl"
        if formattedKey == "DEC":
            formattedKey = "DECL"

        formattedKeyList.extend([formattedKey])
        if len(key) > 0:
            # CONVERT LIST AND FEEDPARSER VALUES TO YAML (SO I CAN PASS IT AS A
            # STRING TO MYSQL)
            if isinstance(value, list) and (isinstance(value[0], list)):
                value[0] = yaml.dump(value[0])
                value[0] = str(value[0])
            # REMOVE CHARACTERS THAT COLLIDE WITH MYSQL
            # if type(value[0]) == str or type(value[0]) == unicode:
            #     value[0] = value[0].replace('"', """'""")
            # JOIN THE VALUES TOGETHER IN A LIST - EASIER TO GENERATE THE MYSQL
            # COMMAND LATER
            if isinstance(value, str):
                value = value.replace('\\', '\\\\')
                value = value.replace('"', '\\"')
                try:
                    udata = value.decode("utf-8", "ignore")
                    value = udata.encode("ascii", "ignore")
                except:
                    log.error('cound not decode value %(value)s' % locals())

                # log.debug('udata: %(udata)s' % locals())

            if isinstance(value, unicode):
                value = value.replace('"', '\\"')
                value = value.encode("ascii", "ignore")

            if isinstance(value, list) and isinstance(value[0], unicode):
                myValues.extend(['%s' % value[0].strip()])
            elif isinstance(value, list):
                myValues.extend(['%s' % (value[0], )])
            else:
                myValues.extend(['%s' % (value, )])

            if returnInsertOnly == False:
                # CHECK IF COLUMN EXISTS YET
                colExists = \
                    "SELECT * FROM information_schema.COLUMNS WHERE TABLE_SCHEMA=DATABASE() AND COLUMN_NAME='" + \
                    formattedKey + "'AND TABLE_NAME='" + dbTableName + """'"""
                try:
                    # log.debug('checking if the column '+formattedKey+' exists
                    # in the '+dbTableName+' table')

                    rows = readquery(
                        log=log,
                        sqlQuery=colExists,
                        dbConn=dbConn,
                    )
                except Exception as e:
                    log.error('something went wrong' + str(e) + '\n')

                # IF COLUMN DOESN'T EXIT - GENERATE IT
                if len(rows) == 0:
                    qCreateColumn = """ALTER TABLE `%s` ADD `%s""" % (
                        dbTableName, formattedKey)
                    if not isinstance(value, list):
                        value = [value]
                    if reDatetime.search(str(value[0])):
                        # log.debug('Ok - a datetime string was found')
                        qCreateColumn += '` datetime DEFAULT NULL'
                    elif formattedKey == 'updated_parsed' or formattedKey == 'published_parsed' or formattedKey \
                            == 'feedName' or formattedKey == 'title':
                        qCreateColumn += '` varchar(100) DEFAULT NULL'
                    elif (isinstance(value[0], str) or isinstance(
                            value[0], unicode)) and len(value[0]) < 30:
                        qCreateColumn += '` varchar(100) DEFAULT NULL'
                    elif (isinstance(value[0], str)
                          or isinstance(value[0], unicode)) and len(
                              value[0]) >= 30 and len(value[0]) < 80:
                        qCreateColumn += '` varchar(100) DEFAULT NULL'
                    elif isinstance(value[0], str) or isinstance(
                            value[0], unicode):
                        columnLength = 450 + len(value[0]) * 2
                        qCreateColumn += '` varchar(' + str(
                            columnLength) + ') DEFAULT NULL'
                    elif isinstance(value[0], int) and abs(value[0]) <= 9:
                        qCreateColumn += '` tinyint DEFAULT NULL'
                    elif isinstance(value[0], int):
                        qCreateColumn += '` int DEFAULT NULL'
                    elif isinstance(value[0], float) or isinstance(
                            value[0], long):
                        qCreateColumn += '` double DEFAULT NULL'
                    elif isinstance(value[0], bool):
                        qCreateColumn += '` tinyint DEFAULT NULL'
                    elif isinstance(value[0], list):
                        qCreateColumn += '` varchar(1024) DEFAULT NULL'
                    else:
                        # log.debug('Do not know what format to add this key in
                        # MySQL - removing from dictionary: %s, %s'
                        # % (key, type(value[0])))
                        formattedKeyList.pop()
                        myValues.pop()
                        qCreateColumn = None
                    if qCreateColumn:
                        # ADD COMMENT TO GIVE THE ORGINAL KEYWORD IF formatted FOR
                        # MYSQL
                        if key is not formattedKey:
                            qCreateColumn += " COMMENT 'original keyword: " + \
                                key + """'"""
                        # CREATE THE COLUMN IF IT DOES NOT EXIST
                        try:
                            log.info('creating the ' + formattedKey +
                                     ' column in the ' + dbTableName +
                                     ' table')
                            writequery(log=log,
                                       sqlQuery=qCreateColumn,
                                       dbConn=dbConn)

                        except Exception as e:
                            # log.debug('qCreateColumn: %s' % (qCreateColumn,
                            # ))
                            log.error('could not create the ' + formattedKey +
                                      ' column in the ' + dbTableName +
                                      ' table -- ' + str(e) + '\n')

    if returnInsertOnly == False:
        # GENERATE THE INDEX NAME - THEN CREATE INDEX IF IT DOES NOT YET EXIST
        if len(uniqueKeyList):
            for i in range(len(uniqueKeyList)):
                uniqueKeyList[i] = uniqueKeyList[i].replace(" ", "_").replace(
                    "-", "_")
                if uniqueKeyList[i] == "dec":
                    uniqueKeyList[i] = "decl"
                if uniqueKeyList[i] == "DEC":
                    uniqueKeyList[i] = "DECL"

            indexName = uniqueKeyList[0].replace(" ", "_").replace("-", "_")
            for i in range(len(uniqueKeyList) - 1):
                indexName += '_' + uniqueKeyList[i + 1]

            indexName = indexName.lower().replace("  ", " ").replace(" ", "_")

            sqlQuery = u"""SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS WHERE TABLE_SCHEMA = DATABASE() AND TABLE_NAME = '""" + \
                dbTableName + """' AND INDEX_NAME = '""" + indexName + """'"""
            rows = readquery(log=log,
                             sqlQuery=sqlQuery,
                             dbConn=dbConn,
                             quiet=False)

            exists = rows[0]['COUNT(*)']
            # log.debug('uniqueKeyList: %s' % (uniqueKeyList,))
            if exists == 0:
                if isinstance(uniqueKeyList, list):
                    uniqueKeyList = ','.join(uniqueKeyList)

                addUniqueKey = 'ALTER TABLE `' + dbTableName + \
                    '` ADD unique ' + indexName + \
                    """ (""" + uniqueKeyList + ')'
                # log.debug('HERE IS THE COMMAND:'+addUniqueKey)
                writequery(log=log, sqlQuery=addUniqueKey, dbConn=dbConn)

    if returnInsertOnly == True and batchInserts == True:
        myKeys = '`,`'.join(formattedKeyList)
        valueString = ("%s, " * len(myValues))[:-2]
        insertCommand = insertVerb + """ INTO `""" + dbTableName + \
            """` (`""" + myKeys + """`, dateCreated) VALUES (""" + \
            valueString + """, NOW())"""
        mv = []
        mv[:] = [None if m == "None" else m for m in myValues]
        valueTuple = tuple(mv)

        dup = ""
        if replace:
            dup = " ON DUPLICATE KEY UPDATE "
            for k, v in zip(formattedKeyList, mv):
                dup = """%(dup)s %(k)s=values(%(k)s),""" % locals()
            dup = """%(dup)s updated=1, dateLastModified=NOW()""" % locals()

        insertCommand = insertCommand + dup

        insertCommand = insertCommand.replace('\\""', '\\" "')
        insertCommand = insertCommand.replace('""', "null")
        insertCommand = insertCommand.replace('!!python/unicode:', '')
        insertCommand = insertCommand.replace('!!python/unicode', '')
        insertCommand = insertCommand.replace('"None"', 'null')

        return insertCommand, valueTuple

    # GENERATE THE INSERT COMMAND - IGNORE DUPLICATE ENTRIES
    myKeys = '`,`'.join(formattedKeyList)
    myValues = '" ,"'.join(myValues)
    # log.debug(myValues+" ------ PRESTRIP")
    # REMOVE SOME CONVERSION NOISE
    myValues = myValues.replace('time.struct_time', '')
    myValues = myValues.replace(
        '- !!python/object/new:feedparser.FeedParserDict', '')
    myValues = myValues.replace(
        '!!python/object/new:feedparser.FeedParserDict', '')
    myValues = myValues.replace('dictitems:', '')
    myValues = myValues.replace('dictitems', '')
    myValues = myValues.replace('!!python/unicode:', '')
    myValues = myValues.replace('!!python/unicode', '')
    myValues = myValues.replace('"None"', 'null')
    # myValues = myValues.replace('"None', 'null')

    if myValues[-4:] != 'null':
        myValues += '"'

    dup = ""
    if replace:
        dupValues = ('"' + myValues).split(" ,")
        dupKeys = formattedKeyList
        dup = dup + " ON DUPLICATE KEY UPDATE "
        for k, v in zip(dupKeys, dupValues):
            dup = """%(dup)s `%(k)s`=%(v)s,""" % locals()

        dup = """%(dup)s updated=IF(""" % locals()
        for k, v in zip(dupKeys, dupValues):
            if v == "null":
                dup = """%(dup)s `%(k)s` is %(v)s AND """ % locals()
            else:
                dup = """%(dup)s `%(k)s`=%(v)s AND """ % locals()
        dup = dup[:-5] + ", 0, 1), dateLastModified=IF("
        for k, v in zip(dupKeys, dupValues):
            if v == "null":
                dup = """%(dup)s `%(k)s` is %(v)s AND """ % locals()
            else:
                dup = """%(dup)s `%(k)s`=%(v)s AND """ % locals()
        dup = dup[:-5] + ", dateLastModified, NOW())"

    # log.debug(myValues+" ------ POSTSTRIP")
    addValue = insertVerb + """ INTO `""" + dbTableName + \
        """` (`""" + myKeys + """`, dateCreated) VALUES (\"""" + \
        myValues + """, NOW()) %(dup)s """ % locals()

    addValue = addValue.replace('\\""', '\\" "')
    addValue = addValue.replace('""', "null")
    addValue = addValue.replace('!!python/unicode:', '')
    addValue = addValue.replace('!!python/unicode', '')
    addValue = addValue.replace('"None"', 'null')
    # log.debug(addValue)

    if returnInsertOnly == True:
        return addValue

    message = ""
    try:
        # log.debug('adding new data to the %s table; query: %s' %
        # (dbTableName, addValue))"
        writequery(log=log, sqlQuery=addValue, dbConn=dbConn)

    except Exception as e:
        log.error("could not add new data added to the table '" + dbTableName +
                  "' : " + str(e) + '\n')

    log.info('completed the ``convert_dictionary_to_mysql_table`` function')
    return None, None
Esempio n. 8
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="WARNING",
        options_first=True,
        projectName="sloancone",
        tunnel=False
    )
    arguments, settings, log, dbConn = su.setup()

    # tab completion for raw_input
    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE cl_utils.py AT %s' %
        (startTime,))

    # set options interactively if user requests
    if "interactiveFlag" in locals() and interactiveFlag:

        # load previous settings
        moduleDirectory = os.path.dirname(__file__) + "/resources"
        pathToPickleFile = "%(moduleDirectory)s/previousSettings.p" % locals()
        try:
            with open(pathToPickleFile):
                pass
            previousSettingsExist = True
        except:
            previousSettingsExist = False
        previousSettings = {}
        if previousSettingsExist:
            previousSettings = pickle.load(open(pathToPickleFile, "rb"))

        # x-raw-input
        # x-boolean-raw-input
        # x-raw-input-with-default-value-from-previous-settings

        # save the most recently used requests
        pickleMeObjects = []
        pickleMe = {}
        theseLocals = locals()
        for k in pickleMeObjects:
            pickleMe[k] = theseLocals[k]
        pickle.dump(pickleMe, open(pathToPickleFile, "wb"))

    # CALL FUNCTIONS/OBJECTS

    # call the worker function
    if search:
        cs = cone_search(
            log=log,
            ra=ra,
            dec=dec,
            searchRadius=float(arcsecRadius),
            nearest=nearestFlag,
            outputFormat=outputFormat,
            galaxyType=galaxyType
        )
        results = cs.get()
        print results

    # covered = True | False | 999 (i.e. not sure)
    if covered:
        check = check_coverage(
            log=log,
            ra=ra,
            dec=dec
        ).get()
        print check

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info('-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' %
             (endTime, runningTime, ))

    return
Esempio n. 9
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="WARNING",
        options_first=False,
        projectName="transientNamer"
    )
    arguments, settings, log, dbConn = su.setup()

    # tab completion for raw_input
    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE cl_utils.py AT %s' %
        (startTime,))

    if search or new or cone:
        if ra:
            tns = transientNamer.search(
                log=log,
                ra=ra,
                dec=dec,
                radiusArcsec=arcsecRadius,
                comments=withCommentsFlag
            )
        if name:
            tns = transientNamer.search(
                log=log,
                name=name,
                comments=withCommentsFlag
            )
        if discInLastDays:
            tns = transientNamer.search(
                log=log,
                discInLastDays=discInLastDays,
                comments=withCommentsFlag
            )

        # Recursively create missing directories
        if outputFlag and not os.path.exists(outputFlag):
            os.makedirs(outputFlag)

        if tableNamePrefix:
            sources, phot, spec, files = tns.mysql(
                tableNamePrefix=tableNamePrefix, dirPath=outputFlag)
            numSources = len(sources.split("\n")) - 1
        elif not render or render == "table":
            sources, phot, spec, files = tns.table(dirPath=outputFlag)
            numSources = len(sources.split("\n")) - 4
        elif render == "csv":
            sources, phot, spec, files = tns.csv(dirPath=outputFlag)
            numSources = len(sources.split("\n")) - 1
        elif render == "json":
            sources, phot, spec, files = tns.json(dirPath=outputFlag)
            numSources = len(sources.split("{")) - 1
        elif render == "yaml":
            sources, phot, spec, files = tns.yaml(dirPath=outputFlag)
            numSources = len(sources.split("\n-"))
        elif render == "markdown":
            sources, phot, spec, files = tns.markdown(dirPath=outputFlag)
            numSources = len(sources.split("\n")) - 2

        if numSources == 1:
            print "%(numSources)s transient found" % locals()
        elif numSources > 1:
            print "%(numSources)s transients found" % locals()

        if not outputFlag:
            print "\n# Matched Transients"
            print sources
            print "\n# Transient Photometry"
            print phot
            print "\n# Transient Spectra"
            print spec
            print "\n# Transient Supplementary Files"
            print files
            print "\n# Original TNS Search URL"
            print tns.url
        # CALL FUNCTIONS/OBJECTS

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info('-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' %
             (endTime, runningTime, ))

    return
Esempio n. 10
0
def main(arguments=None):
    """
    *The main function used when `cl_utils.py` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="WARNING",
               options_first=False,
               projectName="sloancone",
               defaultSettingsFile=True)
    arguments, settings, log, dbConn = su.setup()

    # tab completion for raw_input
    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # UNPACK REMAINING CL ARGUMENTS USING `EXEC` TO SETUP THE VARIABLE NAMES
    # AUTOMATICALLY
    a = {}
    for arg, val in list(arguments.items()):
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        a[varname] = val
        if arg == "--dbConn":
            dbConn = val
            a["dbConn"] = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    search = a["search"]
    covered = a["covered"]
    nearest = a["nearest"]
    fformat = a["format"]
    galaxyType = a["galaxyType"]

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, ))

    # set options interactively if user requests
    if "interactiveFlag" in a and a["interactiveFlag"]:

        # load previous settings
        moduleDirectory = os.path.dirname(__file__) + "/resources"
        pathToPickleFile = "%(moduleDirectory)s/previousSettings.p" % locals()
        try:
            with open(pathToPickleFile):
                pass
            previousSettingsExist = True
        except:
            previousSettingsExist = False
        previousSettings = {}
        if previousSettingsExist:
            previousSettings = pickle.load(open(pathToPickleFile, "rb"))

        # x-raw-input
        # x-boolean-raw-input
        # x-raw-input-with-default-value-from-previous-settings

        # save the most recently used requests
        pickleMeObjects = []
        pickleMe = {}
        theseLocals = locals()
        for k in pickleMeObjects:
            pickleMe[k] = theseLocals[k]
        pickle.dump(pickleMe, open(pathToPickleFile, "wb"))

    if a["init"]:
        from os.path import expanduser
        home = expanduser("~")
        filepath = home + "/.config/sloancone/sloancone.yaml"
        try:
            cmd = """open %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        try:
            cmd = """start %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        return

    # CALL FUNCTIONS/OBJECTS
    # CALL THE WORKER FUNCTION
    if search:
        cs = cone_search(log=log,
                         ra=ra,
                         dec=dec,
                         searchRadius=float(arcsecRadius),
                         nearest=nearestFlag,
                         outputFormat=outputFormat,
                         galaxyType=galaxyType)
        results = cs.get()
        print(results)

    # COVERED = TRUE | FALSE | 999 (I.E. NOT SURE)
    if covered:
        check = check_coverage(log=log, ra=ra, dec=dec).get()
        print(check)

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (
            endTime,
            runningTime,
        ))

    return
Esempio n. 11
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="WARNING",
               options_first=True,
               projectName="panstamps")
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val, ))
        else:
            exec(varname + " = %s" % (val, ))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    if ra:
        try:
            ra = float(ra)
        except:
            if ":" not in ra:
                log.error(
                    "ERROR: ra must be in decimal degree or sexagesimal format"
                )
                return

    if dec:
        try:
            dec = float(dec)
        except:
            if ":" not in dec:
                log.error(
                    "ERROR: dec must be in decimal degree or sexagesimal format"
                )
                return

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, ))

    # BUILD KEYWORD DICT
    kwargs = {}
    kwargs["log"] = log
    kwargs["settings"] = settings
    kwargs["ra"] = ra
    kwargs["dec"] = dec

    # FITS OPTIONS
    kwargs["fits"] = True  # DEFAULT
    if fitsFlag == False and nofitsFlag == True:
        kwargs["fits"] = False

    # JPEG OPTIONS
    kwargs["jpeg"] = False  # DEFAULT
    if jpegFlag == True and nojpegFlag == False:
        kwargs["jpeg"] = True

    # COLOR JPEG OPTIONS
    kwargs["color"] = False  # DEFAULT
    if colorFlag == True and nocolorFlag == False:
        kwargs["color"] = True

    # WIDTH OPTION
    kwargs["arcsecSize"] = 60
    if widthFlag:
        kwargs["arcsecSize"] = float(widthFlag) * 60.

    # CHOOSE A FILTERSET
    kwargs["filterSet"] = 'gri'
    if filtersFlag:
        kwargs["filterSet"] = filtersFlag

    for i in kwargs["filterSet"]:
        if i not in "grizy":
            log.error(
                "ERROR: the requested filter must be in the grizy filter set")
            return

    # WHICH IMAGE TYPE TO DOWNLOAD
    if stack:
        kwargs["imageType"] = "stack"
    if warp:
        kwargs["imageType"] = "warp"
    if closestFlag:
        kwargs["imageType"] = "warp"

    # MJD WINDOW
    kwargs["mjdStart"] = mjdStart
    kwargs["mjdEnd"] = mjdEnd
    kwargs["window"] = False

    try:
        kwargs["window"] = int(closestFlag)
    except:
        pass

    if not kwargs["window"]:
        if mjd and closestFlag == "before":
            kwargs["mjdEnd"] = mjd
        elif mjd and closestFlag == "after":
            kwargs["mjdStart"] = mjd
    else:
        if mjd and kwargs["window"] < 0:
            kwargs["mjdEnd"] = mjd
        elif mjd and kwargs["window"] > 0:
            kwargs["mjdStart"] = mjd

    # DOWNLOAD LOCATION
    if downloadFolderFlag:
        home = expanduser("~")
        downloadFolderFlag = downloadFolderFlag.replace("~", home)
    kwargs["downloadDirectory"] = downloadFolderFlag

    # xt-kwarg_key_and_value

    # DOWNLOAD THE IMAGES
    images = downloader(**kwargs)
    fitsPaths, jpegPaths, colorPath = images.get()
    jpegPaths += colorPath

    # POST-DOWNLOAD PROCESS IMAGES
    kwargs = {}
    kwargs["log"] = log
    kwargs["settings"] = settings
    # WIDTH OPTION
    kwargs["arcsecSize"] = 60
    if widthFlag:
        kwargs["arcsecSize"] = float(widthFlag) * 60.

    # ANNOTATE JPEG OPTIONS
    kwargs["crosshairs"] = True  # DEFAULT
    kwargs["scale"] = True
    if annotateFlag == False and noannotateFlag == True:
        kwargs["crosshairs"] = False  # DEFAULT
        kwargs["scale"] = False

    # INVERT OPTIONS
    kwargs["invert"] = False  # DEFAULT
    if invertFlag == True and noinvertFlag == False:
        kwargs["invert"] = True

    # GREYSCALE OPTIONS
    kwargs["greyscale"] = False  # DEFAULT
    if greyscaleFlag == True and nogreyscaleFlag == False:
        kwargs["greyscale"] = True

    # TRANSIENT DOT OPTIONS
    kwargs["transient"] = False  # DEFAULT
    if transientFlag == True and notransientFlag == False:
        kwargs["transient"] = True

    for j in jpegPaths:
        kwargs["imagePath"] = j

        # kwargs["transient"] = False

        # kwargs["invert"] = False
        # kwargs["greyscale"] = False
        oneImage = image(**kwargs)
        oneImage.get()

        # CALL FUNCTIONS/OBJECTS

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (
            endTime,
            runningTime,
        ))

    return
Esempio n. 12
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """

    # setup the command-line util settings
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="WARNING",
        options_first=False,
        projectName="breaker"
    )
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE cl_utils.py AT %s' %
        (startTime,))

    if init:
        from os.path import expanduser
        home = expanduser("~")
        filepath = home + "/.config/breaker/breaker.yaml"
        try:
            cmd = """open %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        try:
            cmd = """start %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass

    if not far:
        far = 1e-5

    if gwid and gwid[:2] == "GW":
        for g in settings["gravitational waves"]:
            if settings["gravitational waves"][g]["human-name"] == gwid.strip():
                gwid = g
    if wFlag and wFlag[:2] == "GW":
        for g in settings["gravitational waves"]:
            if settings["gravitational waves"][g]["human-name"] == wFlag.strip():
                wFlag = g

    # CALL FUNCTIONS/OBJECTS
    if update:
        pointingsFlag = True
        if nopointingsFlag:
            pointingsFlag = False
        u = update_ps1_atlas_footprint_tables(
            log=log,
            settings=settings,
            updateNed=updateNedFlag,
            updateAll=allFlag,
            updatePointings=pointingsFlag
        )
        u.get()
    if plot and history:
        p = plot_wave_observational_timelines(
            log=log,
            settings=settings,
            plotType="history"
        )
        p.get()
    if plot and timeline:
        if not pFlag:
            pFlag = "mercator"

        if fFlag:
            filters = list(fFlag)
        else:
            filters = False

        p = plot_wave_observational_timelines(
            log=log,
            settings=settings,
            gwid=wFlag,
            plotType="timeline",
            allPlots=allFlag,
            telescope=tFlag,
            projection=pFlag,
            filters=filters,
            probabilityCut=True
        )
        p.get()
    if plot and sources:
        p = plot_wave_matched_source_maps(
            log=log,
            settings=settings,
            gwid=gwid
        )
        p.get()

    if faker:
        f = generate_faker_catalogue(
            log=log,
            settings=settings,
            ps1ExpId=ps1ExpId,
            gwid=False
        )
        f.get()
    if stats:
        s = survey_footprint(
            log=log,
            settings=settings,
            gwid=gwid,
            telescope=telescope
        )
        s.get()
    if listen and inLastNMins:
        timeNowMjd = mjdNow(
            log=log
        ).get_mjd()
        startMJD = float(timeNowMjd) - float(inLastNMins) / (60 * 60 * 24.)
        this = mlisten(
            log=log,
            settings=settings,
            #label="EM_READY | EM_Selected | ADVOK",
            label="",
            farThreshold=far,
            startMJD=float(startMJD),
            endMJD=float(timeNowMjd) + 1.
        )
        this.get_maps()
    if listen and mjdStart:
        this = mlisten(
            log=log,
            settings=settings,
            # label="EM_READY | EM_Selected | ADVOK",
            label="",
            farThreshold=far,
            startMJD=float(mjdStart),
            endMJD=float(mjdEnd)
        )
        this.get_maps()
    if listen and daemonFlag:
        if sec:
            daemon = float(sec)
        else:
            daemon = True
        this = mlisten(
            log=log,
            settings=settings,
            # label="EM_READY | EM_Selected | ADVOK",
            label="",
            farThreshold=far,
            daemon=daemon
        )
        this.get_maps()

    if skymap:
        if exposuresFlag:
            databaseConnRequired = True
        else:
            databaseConnRequired = False

        plotter = plot_wave_observational_timelines(
            log=log,
            settings=settings,
            databaseConnRequired=databaseConnRequired
        )

        if exposuresFlag:
            plotParameters, ps1Transients, ps1Pointings, atlasPointings, atlasTransients = plotter.get_gw_parameters_from_settings(
                gwid=gwid,
                inFirstDays=(0, 31)
            )
        else:
            ps1Transients = []
            atlasTransients = []
            ps1Pointings = []
            atlasPointings = []

        ps1Transients = []
        atlasTransients = []

        if not cFlag:
            cFlag = 0.
        else:
            cFlag = float(cFlag)

        if defaultoutputFlag:
            outputDirectory = False
        else:
            outputDirectory = "."

        plotter.generate_probability_plot(
            gwid=gwid,
            ps1Transients=ps1Transients,
            atlasTransients=atlasTransients,
            ps1Pointings=ps1Pointings,
            atlasPointings=atlasPointings,
            pathToProbMap=pathToLVMap,
            fileFormats=["pdf", "png"],
            outputDirectory=outputDirectory,
            projection="mollweide",
            plotType="timeline",
            folderName="all_sky_plots",
            fitsImage=False,
            allSky=True,
            center=cFlag
        )

        plotter.generate_probability_plot(
            gwid=gwid,
            pathToProbMap=pathToLVMap,
            fileFormats=["pdf", "png"],
            outputDirectory=outputDirectory,
            projection="cartesian",
            plotType="timeline",
            folderName="all_sky_plots",
            fitsImage=True,
            allSky=True,
            center=cFlag
        )

    if contour:
        from breaker.transients import annotator
        an = annotator(
            log=log,
            settings=settings,
            gwid=gwid
        )

        from astrocalc.coords import unit_conversion
        # ASTROCALC UNIT CONVERTER OBJECT
        converter = unit_conversion(
            log=log
        )
        ra = converter.ra_sexegesimal_to_decimal(
            ra=ra
        )
        dec = converter.dec_sexegesimal_to_decimal(
            dec=dec
        )
        transients = {"cl": (ra, dec)}
        transientNames, probs = an.annotate(transients)
        percentage = probs[0]
        print "The transient lies within the inner %(percentage)s%% likelihood contour of event %(gwid)s" % locals()

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info('-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' %
             (endTime, runningTime, ))

    return
Esempio n. 13
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="DEBUG",
               options_first=False,
               projectName="HMpTy")
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.items():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, str):
            exec(varname + " = '%s'" % (val, ))
        else:
            exec(varname + " = %s" % (val, ))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, ))

    # CALL FUNCTIONS/OBJECTS
    if index:
        add_htm_ids_to_mysql_database_table(raColName=raCol,
                                            declColName=decCol,
                                            tableName=tableName,
                                            dbConn=dbConn,
                                            log=log,
                                            primaryIdColumnName=primaryIdCol,
                                            reindex=forceFlag)

    if search:
        cs = conesearch(log=log,
                        dbConn=dbConn,
                        tableName=tableName,
                        columns=False,
                        ra=ra,
                        dec=dec,
                        radiusArcsec=float(radius),
                        separations=True,
                        distinct=False,
                        sqlWhere=False)
        matchIndies, matches = cs.search()
        if not renderFlag:
            print(matches.table())
        elif renderFlag == "json":
            print(matches.json())
        elif renderFlag == "csv":
            print(matches.csv())
        elif renderFlag == "yaml":
            print(matches.yaml())
        elif renderFlag == "md":
            print(matches.markdown())
        elif renderFlag == "table":
            print(matches.markdown())
        elif renderFlag == "mysql":
            print(matches.mysql(tableName=resultsTable))

    if level:
        from HMpTy import HTM
        mesh = HTM(depth=int(level), log=log)

        htmids = mesh.lookup_id(ra, dec)
        print(htmids[0])

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (
            endTime,
            runningTime,
        ))

    return
def main(arguments=None):
    """
    *The main function used when ``astrometry_corrector.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    ########## IMPORTS ##########
    ## STANDARD LIB ##
    ## THIRD PARTY ##
    ## LOCAL APPLICATION ##

    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="INFO"
    )
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE astrometry_corrector.py AT %s' %
        (startTime,))

    if parameterFilename and pFlag and create:
        create_parameter_file(
            log=log,
            parameterFilename=parameterFilename,
        )

    if not create and pFlag and fFlag and not nFlag:
        astrometry_corrector_from_yaml(
            log=log,
            fitsFile=fitsFile,
            yamlFile=parameterFilename
        )

    if not create and pFlag and fFlag and nFlag:
        astrometry_corrector_from_yaml(
            log=log,
            fitsFile=fitsFile,
            yamlFile=parameterFilename,
            hdu=hduNumber
        )

    # call the worker function
    # x-if-settings-or-database-credientials
    # astrometry_corrector(
    #     log=log,
    # )

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE astrometry_corrector.py AT %s (RUNTIME: %s) --' %
        (endTime, runningTime, ))

    return
Esempio n. 15
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="DEBUG",
               options_first=False,
               projectName="tastic")
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if varname == "import":
            varname = "iimport"
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val, ))
        else:
            exec(varname + " = %s" % (val, ))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, ))

    if init:
        from os.path import expanduser
        home = expanduser("~")
        filepath = home + "/.config/tastic/tastic.yaml"
        try:
            cmd = """open %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        try:
            cmd = """start %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass

    # CALL FUNCTIONS/OBJECTS
    if sort or archive:

        ws = workspace(log=log,
                       settings=settings,
                       fileOrWorkspacePath=pathToFileOrWorkspace)
    if sort:
        ws.sort()
    if archive:
        ws.archive_done()

    if sync:
        tp = syncc(log=log,
                   settings=settings,
                   workspaceRoot=pathToWorkspace,
                   workspaceName=workspaceName,
                   syncFolder=pathToSyncFolder,
                   editorialRootPath=editorialRootPath,
                   includeFileTags=fileTagsFlag)
        tp.sync()

    if reminders:
        r = reminderss(log=log, settings=settings)
        r.import_list(listName=listName, pathToTaskpaperDoc=pathToTaskpaperDoc)

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (
            endTime,
            runningTime,
        ))

    return
Esempio n. 16
0
def main(arguments=None):
    """
    *The main function used when `cl_utils.py` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="WARNING",
               options_first=False,
               distributionName="qub-sherlock",
               projectName="sherlock",
               defaultSettingsFile=True)
    arguments, settings, log, dbConn = su.setup()

    # tab completion for raw_input
    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # UNPACK REMAINING CL ARGUMENTS USING `EXEC` TO SETUP THE VARIABLE NAMES
    # AUTOMATICALLY
    a = {}
    for arg, val in list(arguments.items()):
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        a[varname] = val
        if arg == "--dbConn":
            dbConn = val
            a["dbConn"] = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, ))

    # set options interactively if user requests
    if "interactiveFlag" in a and a["interactiveFlag"]:

        # load previous settings
        moduleDirectory = os.path.dirname(__file__) + "/resources"
        pathToPickleFile = "%(moduleDirectory)s/previousSettings.p" % locals()
        try:
            with open(pathToPickleFile):
                pass
            previousSettingsExist = True
        except:
            previousSettingsExist = False
        previousSettings = {}
        if previousSettingsExist:
            previousSettings = pickle.load(open(pathToPickleFile, "rb"))

        # x-raw-input
        # x-boolean-raw-input
        # x-raw-input-with-default-value-from-previous-settings

        # save the most recently used requests
        pickleMeObjects = []
        pickleMe = {}
        theseLocals = locals()
        for k in pickleMeObjects:
            pickleMe[k] = theseLocals[k]
        pickle.dump(pickleMe, open(pathToPickleFile, "wb"))

    if a["init"]:
        from os.path import expanduser
        home = expanduser("~")
        filepath = home + "/.config/sherlock/sherlock.yaml"
        try:
            cmd = """open %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        try:
            cmd = """start %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        return

    init = a["init"]
    match = a["match"]
    dbmatch = a["dbmatch"]
    clean = a["clean"]
    wiki = a["wiki"]
    iimport = a["import"]
    ned = a["ned"]
    cat = a["cat"]
    stream = a["stream"]
    info = a["info"]
    ra = a["ra"]
    dec = a["dec"]
    radiusArcsec = a["radiusArcsec"]
    cat_name = a["cat_name"]
    stream_name = a["stream_name"]
    skipNedUpdateFlag = a["skipNedUpdateFlag"]
    skipMagUpdateFlag = a["skipMagUpdateFlag"]
    settingsFlag = a["settingsFlag"]
    verboseFlag = a["verboseFlag"]
    updateFlag = a["updateFlag"]

    # CALL FUNCTIONS/OBJECTS
    if match or dbmatch:
        if verboseFlag:
            verbose = 2
        else:
            verbose = 1

        if skipNedUpdateFlag:
            updateNed = False
        else:
            updateNed = True

        if skipMagUpdateFlag:
            updatePeakMags = False
        else:
            updatePeakMags = True

        classifier = transient_classifier.transient_classifier(
            log=log,
            settings=settings,
            ra=ra,
            dec=dec,
            name=False,
            verbose=verbose,
            update=updateFlag,
            updateNed=updateNed,
            updatePeakMags=updatePeakMags)
        classifier.classify()

    if clean:
        cleaner = database_cleaner(log=log, settings=settings)
        cleaner.clean()
    if wiki:
        updateWiki = update_wiki_pages(log=log, settings=settings)
        updateWiki.update()

    if iimport and ned:
        ned = nedStreamImporter(log=log,
                                settings=settings,
                                coordinateList=["%(ra)s %(dec)s" % locals()],
                                radiusArcsec=radiusArcsec)
        ned.ingest()
    if iimport and cat:

        if cat_name == "veron":
            catalogue = veronImporter(log=log,
                                      settings=settings,
                                      pathToDataFile=pathToDataFile,
                                      version=cat_version,
                                      catalogueName=cat_name)
            catalogue.ingest()

        if "ned_d" in cat_name:
            catalogue = nedImporter(log=log,
                                    settings=settings,
                                    pathToDataFile=pathToDataFile,
                                    version=cat_version,
                                    catalogueName=cat_name)
            catalogue.ingest()
    if iimport and stream:
        if "ifs" in stream_name:
            stream = ifsImporter(log=log, settings=settings)
            stream.ingest()
    if not init and not match and not clean and not wiki and not iimport and ra:

        classifier = transient_classifier.transient_classifier(
            log=log,
            settings=settings,
            ra=ra,
            dec=dec,
            name=False,
            verbose=verboseFlag)
        classifier.classify()

    if info:
        print("sherlock-catalogues")
        wiki = update_wiki_pages(log=log, settings=settings)
        table = list(wiki._get_table_infos(trimmed=True))

        dataSet = list_of_dictionaries(log=log, listOfDictionaries=table)
        tableData = dataSet.reST(filepath=None)
        print(tableData)
        print()

        print("Crossmatch Streams")
        table = list(wiki._get_stream_view_infos(trimmed=True))
        dataSet = list_of_dictionaries(log=log, listOfDictionaries=table)
        tableData = dataSet.reST(filepath=None)
        print(tableData)
        print()

        print("Views on Catalogues and Streams")

        table = list(wiki._get_view_infos(trimmed=True))
        dataSet = list_of_dictionaries(log=log, listOfDictionaries=table)
        tableData = dataSet.reST(filepath=None)
        print(tableData)

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (
            endTime,
            runningTime,
        ))

    return
Esempio n. 17
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    from astrocalc.coords import unit_conversion
    # setup the command-line util settings
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="CRITICAL",
        options_first=True,
        projectName="astrocalc"
    )
    arguments, settings, log, dbConn = su.setup()

    # tab completion for raw_input
    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE cl_utils.py AT %s' %
        (startTime,))

    # set options interactively if user requests
    if "interactiveFlag" in locals() and interactiveFlag:

        # load previous settings
        moduleDirectory = os.path.dirname(__file__) + "/resources"
        pathToPickleFile = "%(moduleDirectory)s/previousSettings.p" % locals()
        try:
            with open(pathToPickleFile):
                pass
            previousSettingsExist = True
        except:
            previousSettingsExist = False
        previousSettings = {}
        if previousSettingsExist:
            previousSettings = pickle.load(open(pathToPickleFile, "rb"))

        # x-raw-input
        # x-boolean-raw-input
        # x-raw-input-with-default-value-from-previous-settings

        # save the most recently used requests
        pickleMeObjects = []
        pickleMe = {}
        theseLocals = locals()
        for k in pickleMeObjects:
            pickleMe[k] = theseLocals[k]
        pickle.dump(pickleMe, open(pathToPickleFile, "wb"))

    # CALL FUNCTIONS/OBJECTS
    if coordflip:

        if cartesianFlag:
            converter = unit_conversion(
                log=log
            )
            x, y, z = converter.ra_dec_to_cartesian(
                ra="23 45 21.23232",
                dec="+01:58:5.45341"
            )
            print x, y, z
            return

        try:
            ra = float(ra)
            dec = float(dec)
            degree = True
        except Exception, e:
            degree = False

        if degree is True:
            converter = unit_conversion(
                log=log
            )
            try:
                ra = converter.ra_decimal_to_sexegesimal(
                    ra=ra,
                    delimiter=":"
                )
                dec = converter.dec_decimal_to_sexegesimal(
                    dec=dec,
                    delimiter=":"
                )
            except Exception, e:
                print e
                sys.exit(0)

            print ra, dec
Esempio n. 18
0
            H0=float(hcFlag)
        )
        print "Distance Modulus: " + str(dists["dmod"]) + " mag"
        print "Luminousity Distance: " + str(dists["dl_mpc"]) + " Mpc"
        print "Angular Size Scale: " + str(dists["da_scale"]) + " kpc/arcsec"
        print "Angular Size Distance: " + str(dists["da_mpc"]) + " Mpc"
        print "Comoving Radial Distance: " + str(dists["dcmr_mpc"]) + " Mpc"

    if dist and mpcFlag:
        from astrocalc.distances import converter
        c = converter(log=log)
        z = c.distance_to_redshift(
            mpc=float(distVal)
        )
        print "z = %(z)s" % locals()

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info('-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' %
             (endTime, runningTime, ))

    return


if __name__ == '__main__':
    main()
Esempio n. 19
0
def main(arguments=None):
    """
    *The main function used when `cl_utils.py` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="WARNING",
        options_first=False,
        projectName="HMpTy",
        defaultSettingsFile=True
    )
    arguments, settings, log, dbConn = su.setup()

    # tab completion for raw_input
    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # UNPACK REMAINING CL ARGUMENTS USING `EXEC` TO SETUP THE VARIABLE NAMES
    # AUTOMATICALLY
    a = {}
    for arg, val in list(arguments.items()):
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        a[varname] = val
        if arg == "--dbConn":
            dbConn = val
            a["dbConn"] = val
        log.debug('%s = %s' % (varname, val,))

    hostFlag = a["hostFlag"]
    userFlag = a["userFlag"]
    passwdFlag = a["passwdFlag"]
    dbNameFlag = a["dbNameFlag"]
    tableName = a["tableName"]
    index = a["index"]
    htmid = a["htmid"]
    primaryIdCol = a["primaryIdCol"]
    raCol = a["raCol"]
    decCol = a["decCol"]
    ra = a["ra"]
    radius = a["radius"]
    level = a["level"]
    forceFlag = a["forceFlag"]
    renderFlag = a["renderFlag"]
    search = a["search"]

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE cl_utils.py AT %s' %
        (startTime,))

    # set options interactively if user requests
    if "interactiveFlag" in a and a["interactiveFlag"]:

        # load previous settings
        moduleDirectory = os.path.dirname(__file__) + "/resources"
        pathToPickleFile = "%(moduleDirectory)s/previousSettings.p" % locals()
        try:
            with open(pathToPickleFile):
                pass
            previousSettingsExist = True
        except:
            previousSettingsExist = False
        previousSettings = {}
        if previousSettingsExist:
            previousSettings = pickle.load(open(pathToPickleFile, "rb"))

        # x-raw-input
        # x-boolean-raw-input
        # x-raw-input-with-default-value-from-previous-settings

        # save the most recently used requests
        pickleMeObjects = []
        pickleMe = {}
        theseLocals = locals()
        for k in pickleMeObjects:
            pickleMe[k] = theseLocals[k]
        pickle.dump(pickleMe, open(pathToPickleFile, "wb"))

    if a["init"]:
        from os.path import expanduser
        home = expanduser("~")
        filepath = home + "/.config/HMpTy/HMpTy.yaml"
        try:
            cmd = """open %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        try:
            cmd = """start %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        return

    # CALL FUNCTIONS/OBJECTS
    if index:
        add_htm_ids_to_mysql_database_table(
            raColName=raCol,
            declColName=decCol,
            tableName=tableName,
            dbConn=dbConn,
            log=log,
            primaryIdColumnName=primaryIdCol,
            reindex=forceFlag,
            dbSettings=dbSettings
        )

    if search:
        cs = conesearch(
            log=log,
            dbConn=dbConn,
            tableName=tableName,
            columns=False,
            ra=ra,
            dec=dec,
            radiusArcsec=float(radius),
            separations=True,
            distinct=False,
            sqlWhere=False
        )
        matchIndies, matches = cs.search()
        if not renderFlag:
            print(matches.table())
        elif renderFlag == "json":
            print(matches.json())
        elif renderFlag == "csv":
            print(matches.csv())
        elif renderFlag == "yaml":
            print(matches.yaml())
        elif renderFlag == "md":
            print(matches.markdown())
        elif renderFlag == "table":
            print(matches.markdown())
        elif renderFlag == "mysql":
            print(matches.mysql(tableName=resultsTable))

    if level:
        from HMpTy import HTM
        mesh = HTM(
            depth=int(level),
            log=log
        )

        htmids = mesh.lookup_id(ra, dec)
        print(htmids[0])

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info('-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' %
             (endTime, runningTime, ))

    return
def main(arguments=None):
    """
    *The main function used when ``execute_mysql_script.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    ########## IMPORTS ##########
    ## STANDARD LIB ##
    ## THIRD PARTY ##
    ## LOCAL APPLICATION ##

    ## ACTIONS BASED ON WHICH ARGUMENTS ARE RECIEVED ##
    # PRINT COMMAND-LINE USAGE IF NO ARGUMENTS PASSED
    # setup the command-line util settings
    from fundamentals import tools, times
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="DEBUG"
    )
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    # Check for the force option
    if "force" not in locals() or force is not True:
        force = False

    # SETUP A DATABASE CONNECTION BASED ON WHAT ARGUMENTS HAVE BEEN PASSED
    dbConn = False
    if 'settings' in locals() and "database settings" in settings:
        host = settings["database settings"]["host"]
        user = settings["database settings"]["user"]
        passwd = settings["database settings"]["password"]
        dbName = settings["database settings"]["db"]
        dbConn = True
    elif "host" in locals() and "dbName" in locals():
        # SETUP DB CONNECTION
        dbConn = True
        host = arguments["--host"]
        user = arguments["--user"]
        passwd = arguments["--passwd"]
        dbName = arguments["--dbName"]
    if dbConn:
        import pymysql as ms
        dbConn = ms.connect(
            host=host,
            user=user,
            passwd=passwd,
            db=dbName,
            use_unicode=True,
            charset='utf8'
        )
        dbConn.autocommit(True)
        log.debug('dbConn: %s' % (dbConn,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE execute_mysql_script.py AT %s' %
        (startTime,))

    # call the worker function
    execute_mysql_script(
        log=log,
        user=user,
        passwd=passwd,
        db=dbName,
        host=host,
        pathToMysqlScript=pathToMysqlScript,
        force=force
    )

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE execute_mysql_script.py AT %s (RUNTIME: %s) --' %
        (endTime, runningTime, ))

    return
Esempio n. 21
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="WARNING",
        options_first=True,
        projectName="panstamps"
    )
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    if ra:
        try:
            ra = float(ra)
        except:
            if ":" not in ra:
                log.error(
                    "ERROR: ra must be in decimal degree or sexagesimal format")
                return

    if dec:
        try:
            dec = float(dec)
        except:
            if ":" not in dec:
                log.error(
                    "ERROR: dec must be in decimal degree or sexagesimal format")
                return

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE cl_utils.py AT %s' %
        (startTime,))

    # BUILD KEYWORD DICT
    kwargs = {}
    kwargs["log"] = log
    kwargs["settings"] = settings
    kwargs["ra"] = ra
    kwargs["dec"] = dec

    # FITS OPTIONS
    kwargs["fits"] = True  # DEFAULT
    if fitsFlag == False and nofitsFlag == True:
        kwargs["fits"] = False

    # JPEG OPTIONS
    kwargs["jpeg"] = False  # DEFAULT
    if jpegFlag == True and nojpegFlag == False:
        kwargs["jpeg"] = True

    # COLOR JPEG OPTIONS
    kwargs["color"] = False  # DEFAULT
    if colorFlag == True and nocolorFlag == False:
        kwargs["color"] = True

    # WIDTH OPTION
    kwargs["arcsecSize"] = 60
    if widthFlag:
        kwargs["arcsecSize"] = float(widthFlag) * 60.

    # CHOOSE A FILTERSET
    kwargs["filterSet"] = 'gri'
    if filtersFlag:
        kwargs["filterSet"] = filtersFlag

    for i in kwargs["filterSet"]:
        if i not in "grizy":
            log.error(
                "ERROR: the requested filter must be in the grizy filter set")
            return

    # WHICH IMAGE TYPE TO DOWNLOAD
    if stack:
        kwargs["imageType"] = "stack"
    if warp:
        kwargs["imageType"] = "warp"
    if closestFlag:
        kwargs["imageType"] = "warp"

    # MJD WINDOW
    kwargs["mjdStart"] = mjdStart
    kwargs["mjdEnd"] = mjdEnd
    kwargs["window"] = False

    try:
        kwargs["window"] = int(closestFlag)
    except:
        pass

    if not kwargs["window"]:
        if mjd and closestFlag == "before":
            kwargs["mjdEnd"] = mjd
        elif mjd and closestFlag == "after":
            kwargs["mjdStart"] = mjd
    else:
        if mjd and kwargs["window"] < 0:
            kwargs["mjdEnd"] = mjd
        elif mjd and kwargs["window"] > 0:
            kwargs["mjdStart"] = mjd

    # DOWNLOAD LOCATION
    if downloadFolderFlag:
        home = expanduser("~")
        downloadFolderFlag = downloadFolderFlag.replace("~", home)
    kwargs["downloadDirectory"] = downloadFolderFlag

    # xt-kwarg_key_and_value

    # DOWNLOAD THE IMAGES
    images = downloader(**kwargs)
    fitsPaths, jpegPaths, colorPath = images.get()
    jpegPaths += colorPath

    # POST-DOWNLOAD PROCESS IMAGES
    kwargs = {}
    kwargs["log"] = log
    kwargs["settings"] = settings
    # WIDTH OPTION
    kwargs["arcsecSize"] = 60
    if widthFlag:
        kwargs["arcsecSize"] = float(widthFlag) * 60.

    # ANNOTATE JPEG OPTIONS
    kwargs["crosshairs"] = True  # DEFAULT
    kwargs["scale"] = True
    if annotateFlag == False and noannotateFlag == True:
        kwargs["crosshairs"] = False  # DEFAULT
        kwargs["scale"] = False

    # INVERT OPTIONS
    kwargs["invert"] = False  # DEFAULT
    if invertFlag == True and noinvertFlag == False:
        kwargs["invert"] = True

    # GREYSCALE OPTIONS
    kwargs["greyscale"] = False  # DEFAULT
    if greyscaleFlag == True and nogreyscaleFlag == False:
        kwargs["greyscale"] = True

    # TRANSIENT DOT OPTIONS
    kwargs["transient"] = False  # DEFAULT
    if transientFlag == True and notransientFlag == False:
        kwargs["transient"] = True

    for j in jpegPaths:
        kwargs["imagePath"] = j

        # kwargs["transient"] = False

        # kwargs["invert"] = False
        # kwargs["greyscale"] = False
        oneImage = image(**kwargs)
        oneImage.get()

        # CALL FUNCTIONS/OBJECTS

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info('-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' %
             (endTime, runningTime, ))

    return
Esempio n. 22
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="DEBUG",
        options_first=False,
        projectName="headjack"
    )
    arguments, settings, log, dbConn = su.setup()

    # tab completion for raw_input
    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE cl_utils.py AT %s' %
        (startTime,))

    # set options interactively if user requests
    if "interactiveFlag" in locals() and interactiveFlag:

        # load previous settings
        moduleDirectory = os.path.dirname(__file__) + "/resources"
        pathToPickleFile = "%(moduleDirectory)s/previousSettings.p" % locals()
        try:
            with open(pathToPickleFile):
                pass
            previousSettingsExist = True
        except:
            previousSettingsExist = False
        previousSettings = {}
        if previousSettingsExist:
            previousSettings = pickle.load(open(pathToPickleFile, "rb"))

        # x-raw-input
        # x-boolean-raw-input
        # x-raw-input-with-default-value-from-previous-settings

        # save the most recently used requests
        pickleMeObjects = []
        pickleMe = {}
        theseLocals = locals()
        for k in pickleMeObjects:
            pickleMe[k] = theseLocals[k]
        pickle.dump(pickleMe, open(pathToPickleFile, "wb"))

    # CALL FUNCTIONS/OBJECTS
    if read and sendToKindle:
        from headjack.read import sendToKindle
        sender = sendToKindle(
            log=log,
            settings=settings,
            dbConn=dbConn
        )
        sender.send()

    # CALL FUNCTIONS/OBJECTS
    if read and convert and kindleAnnotations:
        from headjack.read import convertKindleNB
        converter = convertKindleNB(
            log=log,
            settings=settings,
            dbConn=dbConn
        )
        converter.convert()

    if media and stage:
        from headjack.archiver import docs
        stager = docs(
            log=log,
            settings=settings,
            dbConn=dbConn
        )
        stager.stash()

    if media and archive:
        from headjack.archiver import docs
        stager = docs(
            log=log,
            settings=settings,
            dbConn=dbConn
        )
        stager.archive()
    if web2epub:
        from headjack.read import generate_web_article_epubs
        epubs = generate_web_article_epubs(
            log=log,
            settings=settings,
            dbConn=dbConn
        )
        epubs.create()

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()

    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info('-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' %
             (endTime, runningTime, ))

    return
Esempio n. 23
0
def main(arguments=None):
    """
    *The main function used when `cl_utils.py` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="WARNING",
               options_first=False,
               projectName="atelParser",
               defaultSettingsFile=True)
    arguments, settings, log, dbConn = su.setup()

    # tab completion for raw_input
    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # UNPACK REMAINING CL ARGUMENTS USING `EXEC` TO SETUP THE VARIABLE NAMES
    # AUTOMATICALLY
    a = {}
    for arg, val in list(arguments.items()):
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        a[varname] = val
        if arg == "--dbConn":
            dbConn = val
            a["dbConn"] = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, ))

    # set options interactively if user requests
    if "interactiveFlag" in a and a["interactiveFlag"]:

        # load previous settings
        moduleDirectory = os.path.dirname(__file__) + "/resources"
        pathToPickleFile = "%(moduleDirectory)s/previousSettings.p" % locals()
        try:
            with open(pathToPickleFile):
                pass
            previousSettingsExist = True
        except:
            previousSettingsExist = False
        previousSettings = {}
        if previousSettingsExist:
            previousSettings = pickle.load(open(pathToPickleFile, "rb"))

        # x-raw-input
        # x-boolean-raw-input
        # x-raw-input-with-default-value-from-previous-settings

        # save the most recently used requests
        pickleMeObjects = []
        pickleMe = {}
        theseLocals = locals()
        for k in pickleMeObjects:
            pickleMe[k] = theseLocals[k]
        pickle.dump(pickleMe, open(pathToPickleFile, "wb"))

    if a["init"]:
        from os.path import expanduser
        home = expanduser("~")
        filepath = home + "/.config/atelParser/atelParser.yaml"
        try:
            cmd = """open %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        try:
            cmd = """start %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        return

    count = a["count"]
    download = a["download"]
    parse = a["parse"]
    reparseFlag = a["reparseFlag"]

    # CALL FUNCTIONS/OBJECTS
    if download:
        from atelParser import download
        atels = download(log=log, settings=settings)
        atelsToDownload = atels.get_list_of_atels_still_to_download()
        atels.download_list_of_atels(atelsToDownload)

    if count:
        from atelParser import download
        atels = download(log=log, settings=settings)
        latestNumber = atels.get_latest_atel_number()
        from datetime import datetime, date, time
        now = datetime.now()
        now = now.strftime("%Y/%m/%d %H:%M:%Ss")
        print("%(latestNumber)s ATels have been reported as of %(now)s" %
              locals())

    if parse:
        from atelParser import mysql
        parser = mysql(log=log, settings=settings, reParse=reparseFlag)
        parser.atels_to_database()
        parser.parse_atels()
        parser.populate_htm_columns()

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (
            endTime,
            runningTime,
        ))

    return
Esempio n. 24
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="WARNING",
               options_first=False,
               projectName="polyglot")
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val, ))
        else:
            exec(varname + " = %s" % (val, ))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, ))

    # for k, v in locals().iteritems():
    #     print k, v

    if not destinationFolder:
        destinationFolder = os.getcwd()
    if not filenameFlag:
        filenameFlag = False
    if not cleanFlag:
        readability = False
    else:
        readability = True

    if init:
        from os.path import expanduser
        home = expanduser("~")
        filepath = home + "/.config/polyglot/polyglot.yaml"
        try:
            cmd = """open %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        try:
            cmd = """start %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass

    if pdf and url:
        filepath = printpdf.printpdf(log=log,
                                     settings=settings,
                                     url=url,
                                     folderpath=destinationFolder,
                                     title=filenameFlag,
                                     append=False,
                                     readability=readability).get()

    if html and url:

        cleaner = htmlCleaner.htmlCleaner(
            log=log,
            settings=settings,
            url=url,
            outputDirectory=destinationFolder,
            title=filenameFlag,  # SET TO FALSE TO USE WEBPAGE TITLE,
            style=cleanFlag,  # add polyglot's styling to the HTML document
            metadata=True,  # include metadata in generated HTML (e.g. title),
            h1=True  # include title as H1 at the top of the doc
        )
        filepath = cleaner.clean()

    if epub:
        if url:
            iinput = url
        else:
            iinput = docx
        from polyglot import ebook
        epub = ebook(log=log,
                     settings=settings,
                     urlOrPath=iinput,
                     title=filenameFlag,
                     bookFormat="epub",
                     outputDirectory=destinationFolder)
        filepath = epub.get()

    if mobi:
        if url:
            iinput = url
        else:
            iinput = docx
        from polyglot import ebook
        mobi = ebook(
            log=log,
            settings=settings,
            urlOrPath=iinput,
            title=filenameFlag,
            bookFormat="mobi",
            outputDirectory=destinationFolder,
        )
        filepath = mobi.get()

    if kindle:
        if url:
            iinput = url
        else:
            iinput = docx
        from polyglot import kindle
        sender = kindle(log=log,
                        settings=settings,
                        urlOrPath=iinput,
                        title=filenameFlag)
        success = sender.send()

    if kindleNB2MD:
        basename = os.path.basename(notebook)
        extension = os.path.splitext(basename)[1]
        filenameNoExtension = os.path.splitext(basename)[0]
        if destinationFolder:
            filepath = destinationFolder + "/" + filenameNoExtension + ".md"
        else:
            filepath = notebook.replace("." + extension, ".md")
        from polyglot.markdown import kindle_notebook
        nb = kindle_notebook(log=log,
                             kindleExportPath=notebook,
                             outputPath=filepath)
        nb.convert()

    if openFlag:
        try:
            cmd = """open %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        try:
            cmd = """start %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (
            endTime,
            runningTime,
        ))

    return
def convert_dictionary_to_mysql_table(
        log,
        dictionary,
        dbTableName,
        uniqueKeyList=[],
        dbConn=False,
        createHelperTables=False,
        dateModified=False,
        returnInsertOnly=False,
        replace=False,
        batchInserts=True,
        reDatetime=False,
        skipChecks=False,
        dateCreated=True):
    """convert dictionary to mysql table

    **Key Arguments:**
        - ``log`` -- logger
        - ``dictionary`` -- python dictionary
        - ``dbConn`` -- the db connection
        - ``dbTableName`` -- name of the table you wish to add the data to (or create if it does not exist)
        - ``uniqueKeyList`` - a lists column names that need combined to create the primary key
        - ``createHelperTables`` -- create some helper tables with the main table, detailing original keywords etc
        - ``returnInsertOnly`` -- returns only the insert command (does not execute it)
        - ``dateModified`` -- add a modification date and updated flag to the mysql table
        - ``replace`` -- use replace instead of mysql insert statements (useful when updates are required)
        - ``batchInserts`` -- if returning insert statements return separate insert commands and value tuples
        - ``reDatetime`` -- compiled regular expression matching datetime (passing this in cuts down on execution time as it doesn't have to be recompiled everytime during multiple iterations of ``convert_dictionary_to_mysql_table``)
        - ``skipChecks`` -- skip reliability checks. Less robust but a little faster.
        - ``dateCreated`` -- add a timestamp for dateCreated?

    **Return:**
        - ``returnInsertOnly`` -- the insert statement if requested

    **Usage:**

        To add a python dictionary to a database table, creating the table and/or columns if they don't yet exist:

        .. code-block:: python

            from fundamentals.mysql import convert_dictionary_to_mysql_table
            dictionary = {"a newKey": "cool", "and another": "super cool",
                      "uniquekey1": "cheese", "uniqueKey2": "burgers"}

            convert_dictionary_to_mysql_table(
                dbConn=dbConn,
                log=log,
                dictionary=dictionary,
                dbTableName="testing_table",
                uniqueKeyList=["uniquekey1", "uniqueKey2"],
                dateModified=False,
                returnInsertOnly=False,
                replace=True
            )

        Or just return the insert statement with a list of value tuples, i.e. do not execute the command on the database:

            insertCommand, valueTuple = convert_dictionary_to_mysql_table(
                dbConn=dbConn,
                log=log,
                dictionary=dictionary,
                dbTableName="testing_table",
                uniqueKeyList=["uniquekey1", "uniqueKey2"],
                dateModified=False,
                returnInsertOnly=True,
                replace=False,
                batchInserts=True
            )

            print insertCommand, valueTuple

            # OUT: 'INSERT IGNORE INTO `testing_table`
            # (a_newKey,and_another,dateCreated,uniqueKey2,uniquekey1) VALUES
            # (%s, %s, %s, %s, %s)', ('cool', 'super cool',
            # '2016-06-21T12:08:59', 'burgers', 'cheese')

        You can also return a list of single insert statements using ``batchInserts = False``. Using ``replace = True`` will also add instructions about how to replace duplicate entries in the database table if found:

            inserts = convert_dictionary_to_mysql_table(
                dbConn=dbConn,
                log=log,
                dictionary=dictionary,
                dbTableName="testing_table",
                uniqueKeyList=["uniquekey1", "uniqueKey2"],
                dateModified=False,
                returnInsertOnly=True,
                replace=True,
                batchInserts=False
            )

            print inserts

            # OUT: INSERT INTO `testing_table` (a_newKey,and_another,dateCreated,uniqueKey2,uniquekey1)
            # VALUES ("cool" ,"super cool" ,"2016-09-14T13:12:08" ,"burgers" ,"cheese")
            # ON DUPLICATE KEY UPDATE  a_newKey="cool", and_another="super
            # cool", dateCreated="2016-09-14T13:12:08", uniqueKey2="burgers",
            # uniquekey1="cheese"
    """

    log.debug('starting the ``convert_dictionary_to_mysql_table`` function')

    if not reDatetime:
        reDatetime = re.compile('^[0-9]{4}-[0-9]{2}-[0-9]{2}T')

    if not replace:
        insertVerb = "INSERT"
    else:
        insertVerb = "INSERT IGNORE"

    if returnInsertOnly == False:
        # TEST THE ARGUMENTS
        if str(type(dbConn).__name__) != "Connection":
            message = 'Please use a valid MySQL DB connection.'
            log.critical(message)
            raise TypeError(message)

        if not isinstance(dictionary, dict):
            message = 'Please make sure "dictionary" argument is a dict type.'
            log.critical(message)
            raise TypeError(message)

        if not isinstance(uniqueKeyList, list):
            message = 'Please make sure "uniqueKeyList" is a list'
            log.critical(message)
            raise TypeError(message)

        for i in uniqueKeyList:
            if i not in dictionary.keys():
                message = 'Please make sure values in "uniqueKeyList" are present in the "dictionary" you are tring to convert'
                log.critical(message)
                raise ValueError(message)

        for k, v in dictionary.iteritems():
            # log.debug('k: %s, v: %s' % (k, v,))
            if isinstance(v, list) and len(v) != 2:
                message = 'Please make sure the list values in "dictionary" 2 items in length'
                log.critical("%s: in %s we have a %s (%s)" %
                             (message, k, v, type(v)))
                raise ValueError(message)
            if isinstance(v, list):
                if not (isinstance(v[0], str) or isinstance(v[0], int) or isinstance(v[0], bool) or isinstance(v[0], float) or isinstance(v[0], long) or isinstance(v[0], datetime.date) or v[0] == None):
                    message = 'Please make sure values in "dictionary" are of an appropriate value to add to the database, must be str, float, int or bool'
                    log.critical("%s: in %s we have a %s (%s)" %
                                 (message, k, v, type(v)))
                    raise ValueError(message)
            else:
                if not (isinstance(v, str) or isinstance(v, int) or isinstance(v, bool) or isinstance(v, float) or isinstance(v, long) or isinstance(v, unicode) or isinstance(v, datetime.date) or v == None):
                    this = type(v)
                    message = 'Please make sure values in "dictionary" are of an appropriate value to add to the database, must be str, float, int or bool : %(k)s is a %(this)s' % locals(
                    )
                    log.critical("%s: in %s we have a %s (%s)" %
                                 (message, k, v, type(v)))
                    raise ValueError(message)

        if not isinstance(createHelperTables, bool):
            message = 'Please make sure "createHelperTables" is a True or False'
            log.critical(message)
            raise TypeError(message)

        # TEST IF TABLE EXISTS
        if not skipChecks:
            tableExists = table_exists.table_exists(
                dbConn=dbConn,
                log=log,
                dbTableName=dbTableName
            )
        else:
            tableExists = False

        # CREATE THE TABLE IF IT DOES NOT EXIST
        if tableExists is False:
            sqlQuery = """
                CREATE TABLE IF NOT EXISTS `%(dbTableName)s`
                (`primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter',
                `dateCreated` DATETIME NULL DEFAULT CURRENT_TIMESTAMP,
                `dateLastModified` DATETIME NULL DEFAULT CURRENT_TIMESTAMP,
                `updated` tinyint(4) DEFAULT '0',
                PRIMARY KEY (`primaryId`))
                ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
            """ % locals()
            writequery(
                log=log,
                sqlQuery=sqlQuery,
                dbConn=dbConn,

            )

    qCreateColumn = ''
    formattedKey = ''
    formattedKeyList = []
    myValues = []

    # ADD EXTRA COLUMNS TO THE DICTIONARY todo: do I need this?
    if dateModified:
        dictionary['dateLastModified'] = [
            str(times.get_now_sql_datetime()), "date row was modified"]
        if replace == False:
            dictionary['updated'] = [0, "this row has been updated"]
        else:
            dictionary['updated'] = [1, "this row has been updated"]

    # ITERATE THROUGH THE DICTIONARY AND GENERATE THE TABLE COLUMN WITH THE
    # NAME OF THE KEY, IF IT DOES NOT EXIST
    count = len(dictionary)
    i = 1
    for (key, value) in dictionary.items():
        if (isinstance(value, list) and value[0] is None):
            del dictionary[key]
    # SORT THE DICTIONARY BY KEY
    odictionary = c.OrderedDict(sorted(dictionary.items()))
    for (key, value) in odictionary.iteritems():

        formattedKey = key.replace(" ", "_").replace("-", "_")
        # DEC A KEYWORD IN MYSQL - NEED TO CHANGE BEFORE INGEST
        if formattedKey == "dec":
            formattedKey = "decl"
        if formattedKey == "DEC":
            formattedKey = "DECL"

        formattedKeyList.extend([formattedKey])
        if len(key) > 0:
            # CONVERT LIST AND FEEDPARSER VALUES TO YAML (SO I CAN PASS IT AS A
            # STRING TO MYSQL)
            if isinstance(value, list) and (isinstance(value[0], list)):
                value[0] = yaml.dump(value[0])
                value[0] = str(value[0])
            # REMOVE CHARACTERS THAT COLLIDE WITH MYSQL
            # if type(value[0]) == str or type(value[0]) == unicode:
            #     value[0] = value[0].replace('"', """'""")
            # JOIN THE VALUES TOGETHER IN A LIST - EASIER TO GENERATE THE MYSQL
            # COMMAND LATER
            if isinstance(value, str):
                value = value.replace('\\', '\\\\')
                value = value.replace('"', '\\"')
                try:
                    udata = value.decode("utf-8", "ignore")
                    value = udata.encode("ascii", "ignore")
                except:
                    log.error('cound not decode value %(value)s' % locals())

                # log.debug('udata: %(udata)s' % locals())

            if isinstance(value, unicode):
                value = value.replace('"', '\\"')
                value = value.encode("ascii", "ignore")

            if isinstance(value, list) and isinstance(value[0], unicode):
                myValues.extend(['%s' % value[0].strip()])
            elif isinstance(value, list):
                myValues.extend(['%s' % (value[0], )])
            else:
                myValues.extend(['%s' % (value, )])

            if returnInsertOnly == False:
                # CHECK IF COLUMN EXISTS YET
                colExists = \
                    "SELECT * FROM information_schema.COLUMNS WHERE TABLE_SCHEMA=DATABASE() AND COLUMN_NAME='" + \
                    formattedKey + "'AND TABLE_NAME='" + dbTableName + """'"""
                try:
                    # log.debug('checking if the column '+formattedKey+' exists
                    # in the '+dbTableName+' table')

                    rows = readquery(
                        log=log,
                        sqlQuery=colExists,
                        dbConn=dbConn,
                    )
                except Exception as e:
                    log.error('something went wrong' + str(e) + '\n')

                # IF COLUMN DOESN'T EXIT - GENERATE IT
                if len(rows) == 0:
                    qCreateColumn = """ALTER TABLE `%s` ADD `%s""" % (
                        dbTableName, formattedKey)
                    if not isinstance(value, list):
                        value = [value]
                    if reDatetime.search(str(value[0])):
                        # log.debug('Ok - a datetime string was found')
                        qCreateColumn += '` datetime DEFAULT NULL'
                    elif formattedKey == 'updated_parsed' or formattedKey == 'published_parsed' or formattedKey \
                            == 'feedName' or formattedKey == 'title':
                        qCreateColumn += '` varchar(100) DEFAULT NULL'
                    elif (isinstance(value[0], str) or isinstance(value[0], unicode)) and len(value[0]) < 30:
                        qCreateColumn += '` varchar(100) DEFAULT NULL'
                    elif (isinstance(value[0], str) or isinstance(value[0], unicode)) and len(value[0]) >= 30 and len(value[0]) < 80:
                        qCreateColumn += '` varchar(100) DEFAULT NULL'
                    elif isinstance(value[0], str) or isinstance(value[0], unicode):
                        columnLength = 450 + len(value[0]) * 2
                        qCreateColumn += '` varchar(' + str(
                            columnLength) + ') DEFAULT NULL'
                    elif isinstance(value[0], int) and abs(value[0]) <= 9:
                        qCreateColumn += '` tinyint DEFAULT NULL'
                    elif isinstance(value[0], int):
                        qCreateColumn += '` int DEFAULT NULL'
                    elif isinstance(value[0], float) or isinstance(value[0], long):
                        qCreateColumn += '` double DEFAULT NULL'
                    elif isinstance(value[0], bool):
                        qCreateColumn += '` tinyint DEFAULT NULL'
                    elif isinstance(value[0], list):
                        qCreateColumn += '` varchar(1024) DEFAULT NULL'
                    else:
                        # log.debug('Do not know what format to add this key in
                        # MySQL - removing from dictionary: %s, %s'
                                 # % (key, type(value[0])))
                        formattedKeyList.pop()
                        myValues.pop()
                        qCreateColumn = None
                    if qCreateColumn:
                        # ADD COMMENT TO GIVE THE ORGINAL KEYWORD IF formatted FOR
                        # MYSQL
                        if key is not formattedKey:
                            qCreateColumn += " COMMENT 'original keyword: " + \
                                key + """'"""
                        # CREATE THE COLUMN IF IT DOES NOT EXIST
                        try:
                            log.info('creating the ' +
                                     formattedKey + ' column in the ' + dbTableName + ' table')
                            writequery(
                                log=log,
                                sqlQuery=qCreateColumn,
                                dbConn=dbConn
                            )

                        except Exception as e:
                            # log.debug('qCreateColumn: %s' % (qCreateColumn,
                            # ))
                            log.error('could not create the ' + formattedKey + ' column in the ' + dbTableName
                                      + ' table -- ' + str(e) + '\n')

    if returnInsertOnly == False:
        # GENERATE THE INDEX NAME - THEN CREATE INDEX IF IT DOES NOT YET EXIST
        if len(uniqueKeyList):
            for i in range(len(uniqueKeyList)):
                uniqueKeyList[i] = uniqueKeyList[
                    i].replace(" ", "_").replace("-", "_")
                if uniqueKeyList[i] == "dec":
                    uniqueKeyList[i] = "decl"
                if uniqueKeyList[i] == "DEC":
                    uniqueKeyList[i] = "DECL"

            indexName = uniqueKeyList[0].replace(" ", "_").replace("-", "_")
            for i in range(len(uniqueKeyList) - 1):
                indexName += '_' + uniqueKeyList[i + 1]

            indexName = indexName.lower().replace("  ", " ").replace(" ", "_")

            sqlQuery = u"""SELECT COUNT(*) FROM INFORMATION_SCHEMA.STATISTICS WHERE TABLE_SCHEMA = DATABASE() AND TABLE_NAME = '""" + \
                dbTableName + """' AND INDEX_NAME = '""" + indexName + """'"""
            rows = readquery(
                log=log,
                sqlQuery=sqlQuery,
                dbConn=dbConn,
                quiet=False
            )

            exists = rows[0]['COUNT(*)']
            # log.debug('uniqueKeyList: %s' % (uniqueKeyList,))
            if exists == 0:
                if isinstance(uniqueKeyList, list):
                    uniqueKeyList = ','.join(uniqueKeyList)

                addUniqueKey = 'ALTER TABLE `' + dbTableName + \
                    '` ADD unique ' + indexName + \
                    """ (""" + uniqueKeyList + ')'
                # log.debug('HERE IS THE COMMAND:'+addUniqueKey)
                writequery(
                    log=log,
                    sqlQuery=addUniqueKey,
                    dbConn=dbConn
                )

    if returnInsertOnly == True and batchInserts == True:
        myKeys = '`,`'.join(formattedKeyList)
        valueString = ("%s, " * len(myValues))[:-2]
        insertCommand = insertVerb + """ INTO `""" + dbTableName + \
            """` (`""" + myKeys + """`, dateCreated) VALUES (""" + \
            valueString + """, NOW())"""
        mv = []
        mv[:] = [None if m == "None" else m for m in myValues]
        valueTuple = tuple(mv)

        dup = ""
        if replace:
            dup = " ON DUPLICATE KEY UPDATE "
            for k, v in zip(formattedKeyList, mv):
                dup = """%(dup)s %(k)s=values(%(k)s),""" % locals()

        insertCommand = insertCommand + dup

        insertCommand = insertCommand.replace('\\""', '\\" "')
        insertCommand = insertCommand.replace('""', "null")
        insertCommand = insertCommand.replace('!!python/unicode:', '')
        insertCommand = insertCommand.replace('!!python/unicode', '')
        insertCommand = insertCommand.replace('"None"', 'null')

        if not dateCreated:
            insertCommand = insertCommand.replace(
                ", dateCreated)", ")").replace(", NOW())", ")")

        return insertCommand, valueTuple

    # GENERATE THE INSERT COMMAND - IGNORE DUPLICATE ENTRIES
    myKeys = '`,`'.join(formattedKeyList)
    myValues = '" ,"'.join(myValues)
    # log.debug(myValues+" ------ PRESTRIP")
    # REMOVE SOME CONVERSION NOISE
    myValues = myValues.replace('time.struct_time', '')
    myValues = myValues.replace(
        '- !!python/object/new:feedparser.FeedParserDict', '')
    myValues = myValues.replace(
        '!!python/object/new:feedparser.FeedParserDict', '')
    myValues = myValues.replace('dictitems:', '')
    myValues = myValues.replace('dictitems', '')
    myValues = myValues.replace('!!python/unicode:', '')
    myValues = myValues.replace('!!python/unicode', '')
    myValues = myValues.replace('"None"', 'null')
    # myValues = myValues.replace('"None', 'null')

    if myValues[-4:] != 'null':
        myValues += '"'

    dup = ""
    if replace:
        dupValues = ('"' + myValues).split(" ,")
        dupKeys = formattedKeyList
        dup = dup + " ON DUPLICATE KEY UPDATE "
        for k, v in zip(dupKeys, dupValues):
            dup = """%(dup)s `%(k)s`=%(v)s,""" % locals()

        if dateModified:
            dup = """%(dup)s updated=IF(""" % locals()
            for k, v in zip(dupKeys, dupValues):
                if v == "null":
                    dup = """%(dup)s `%(k)s` is %(v)s AND """ % locals()
                else:
                    dup = """%(dup)s `%(k)s`=%(v)s AND """ % locals()
            dup = dup[:-5] + ", 0, 1), dateLastModified=IF("
            for k, v in zip(dupKeys, dupValues):
                if v == "null":
                    dup = """%(dup)s `%(k)s` is %(v)s AND """ % locals()
                else:
                    dup = """%(dup)s `%(k)s`=%(v)s AND """ % locals()
            dup = dup[:-5] + ", dateLastModified, NOW())"
        else:
            dup = dup[:-1]

    # log.debug(myValues+" ------ POSTSTRIP")
    addValue = insertVerb + """ INTO `""" + dbTableName + \
        """` (`""" + myKeys + """`, dateCreated) VALUES (\"""" + \
        myValues + """, NOW()) %(dup)s """ % locals()

    if not dateCreated:
        addValue = addValue.replace(
            ", dateCreated)", ")").replace(", NOW())", ")", 1)

    addValue = addValue.replace('\\""', '\\" "')
    addValue = addValue.replace('""', "null")
    addValue = addValue.replace('!!python/unicode:', '')
    addValue = addValue.replace('!!python/unicode', '')
    addValue = addValue.replace('"None"', 'null')
    # log.debug(addValue)

    if returnInsertOnly == True:
        return addValue

    message = ""
    try:
        # log.debug('adding new data to the %s table; query: %s' %
        # (dbTableName, addValue))"
        writequery(
            log=log,
            sqlQuery=addValue,
            dbConn=dbConn
        )

    except Exception as e:
        log.error("could not add new data added to the table '" +
                  dbTableName + "' : " + str(e) + '\n')

    log.debug('completed the ``convert_dictionary_to_mysql_table`` function')
    return None, None
def main(arguments=None):
    """
    *The main function used when ``shift_coordinates.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    from dryxPython import astrotools as dat
    # setup the command-line util settings
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="WARNING",
        options_first=False
    )
    arguments, settings, log, dbConn = su.setup()

    # tab completion for raw_input
    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE shift_coordinates.py AT %s' %
        (startTime,))

    # set options interactively if user requests
    # if interactiveFlag:
    # x-raw-input
    # x-boolean-raw-input
    #     pass

    # call the worker function
    # x-if-settings-or-database-credientials
    newCoords = shift_coordinates(
        log=log,
        ra=ra,
        dec=dec,
        north=float(north),
        east=float(east)
    )

    raNew, decNew = newCoords.get()

    raSex = dat.ra_to_sex(
        ra=raNew,
        delimiter=':'
    )
    decSex = dat.dec_to_sex(
        dec=decNew,
        delimiter=':'
    )

    print """%(raNew)6.4f, %(decNew)6.4f (%(raSex)s, %(decSex)s)""" % locals()

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info('-- FINISHED ATTEMPT TO RUN THE shift_coordinates.py AT %s (RUNTIME: %s) --' %
             (endTime, runningTime, ))

    return
Esempio n. 27
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings

    dev_flag = False

    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="DEBUG",
               options_first=False,
               projectName="rockAtlas")
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if varname == "import":
            varname = "iimport"
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val, ))
        else:
            exec(varname + " = %s" % (val, ))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, ))

    if init:
        from os.path import expanduser
        home = expanduser("~")
        filepath = home + "/.config/rockAtlas/rockAtlas.yaml"
        try:
            cmd = """open %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        try:
            cmd = """start %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass

    # CALL FUNCTIONS/OBJECTS
    if bookkeeping:
        from rockAtlas.bookkeeping import bookkeeper
        bk = bookkeeper(log=log, settings=settings, fullUpdate=fullFlag)
        bk.clean_all()

    if astorb:
        from rockAtlas.orbital_elements import astorb
        oe = astorb(log=log, settings=settings)
        oe.refresh()

    if pyephem:

        from rockAtlas.positions import pyephemPositions
        pyeph = pyephemPositions(log=log, settings=settings, dev_flag=dev_flag)
        pyeph.get(singleSnapshot=oneFlag)

    if orbfit:
        from rockAtlas.positions import orbfitPositions
        oe = orbfitPositions(log=log, settings=settings, dev_flag=dev_flag)
        oe.get(singleExposure=oneFlag)

    if cache:
        from rockAtlas.phot import download
        data = download(log=log, settings=settings, dev_flag=dev_flag)
        data.get(days=days)

    if dophot:
        from rockAtlas.phot import dophotMatch
        dp = dophotMatch(log=log, settings=settings)
        dp.get()

    if cycle:
        from rockAtlas.phot import download
        from rockAtlas.positions import pyephemPositions
        from rockAtlas.positions import orbfitPositions
        from rockAtlas.phot import dophotMatch
        from fundamentals.mysql import readquery

        # INITIAL ACTIONS
        # SETUP ALL DATABASE CONNECTIONS
        from rockAtlas import database
        db = database(log=log, settings=settings)
        dbConns, dbVersions = db.connect()
        atlas3DbConn = dbConns["atlas3"]
        atlas4DbConn = dbConns["atlas4"]
        atlasMoversDBConn = dbConns["atlasMovers"]

        while True:

            if dev_flag:
                o = " and dev_flag = 1"
            else:
                o = " "

            sqlQuery = u"""
                select distinct floor(mjd) from (
select mjd from atlas_exposures where dophot_match = 0 %(o)s
union all
select mjd from day_tracker where processed = 0 %(o)s) as a;
            """ % locals()
            rows = readquery(log=log,
                             sqlQuery=sqlQuery,
                             dbConn=atlasMoversDBConn,
                             quiet=False)

            if len(rows) == 0:
                if dev_flag:
                    print "Processing of the ATLAS development dataset is now complete and up to date"
                else:
                    print "Processing of ATLAS data is now complete and up to date"
                break

            start_time = time.time()

            data = download(log=log, settings=settings, dev_flag=dev_flag)
            data.get(days=days)

            print "%d seconds to download ATLAS cache of %s days\n" % (
                time.time() - start_time, days)
            start_time = time.time()

            pyeph = pyephemPositions(log=log,
                                     settings=settings,
                                     dev_flag=dev_flag)
            pyeph.get()

            print "%d seconds to generate pyephem snapshots\n" % (time.time() -
                                                                  start_time, )
            start_time = time.time()

            oe = orbfitPositions(log=log, settings=settings, dev_flag=dev_flag)
            oe.get()

            print "%d seconds to generate orbfit positions\n" % (time.time() -
                                                                 start_time, )
            start_time = time.time()

            dp = dophotMatch(log=log, settings=settings)
            dp.get()

            print "%d seconds to extract dophot measurements\n" % (
                time.time() - start_time, )
            start_time = time.time()

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (
            endTime,
            runningTime,
        ))

    return
Esempio n. 28
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="DEBUG",
        options_first=False,
        projectName="HMpTy"
    )
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE cl_utils.py AT %s' %
        (startTime,))

    # CALL FUNCTIONS/OBJECTS
    if index:
        add_htm_ids_to_mysql_database_table(
            raColName=raCol,
            declColName=decCol,
            tableName=tableName,
            dbConn=dbConn,
            log=log,
            primaryIdColumnName=primaryIdCol,
            reindex=forceFlag
        )

    if search:
        cs = conesearch(
            log=log,
            dbConn=dbConn,
            tableName=tableName,
            columns=False,
            ra=ra,
            dec=dec,
            radiusArcsec=float(radius),
            separations=True,
            distinct=False,
            sqlWhere=False
        )
        matchIndies, matches = cs.search()
        if not renderFlag:
            print matches.table()
        elif renderFlag == "json":
            print matches.json()
        elif renderFlag == "csv":
            print matches.csv()
        elif renderFlag == "yaml":
            print matches.yaml()
        elif renderFlag == "md":
            print matches.markdown()
        elif renderFlag == "table":
            print matches.markdown()
        elif renderFlag == "mysql":
            print matches.mysql(tableName=resultsTable)

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info('-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' %
             (endTime, runningTime, ))

    return
Esempio n. 29
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="WARNING",
               options_first=False,
               projectName="polygot")
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val, ))
        else:
            exec(varname + " = %s" % (val, ))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, ))

    # for k, v in locals().iteritems():
    #     print k, v

    if not destinationFolder:
        destinationFolder = os.getcwd()
    if not filenameFlag:
        filenameFlag = False
    if not cleanFlag:
        readability = False
    else:
        readability = True

    if pdf:
        filepath = printpdf.printpdf(log=log,
                                     settings=settings,
                                     url=url,
                                     folderpath=destinationFolder,
                                     title=filenameFlag,
                                     append=False,
                                     readability=readability).get()

    if html:

        cleaner = htmlCleaner.htmlCleaner(
            log=log,
            settings=settings,
            url=url,
            outputDirectory=destinationFolder,
            title=filenameFlag,  # SET TO FALSE TO USE WEBPAGE TITLE,
            style=cleanFlag,  # add polygot's styling to the HTML document
            metadata=True,  # include metadata in generated HTML (e.g. title),
            h1=True  # include title as H1 at the top of the doc
        )
        filepath = cleaner.clean()

    if openFlag:
        try:
            cmd = """open %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        try:
            cmd = """start %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (
            endTime,
            runningTime,
        ))

    return
Esempio n. 30
0
def main(arguments=None):
    """
    *The main function used when `cl_utils.py` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="WARNING",
               options_first=False,
               projectName="marshallEngine",
               defaultSettingsFile=True)
    arguments, settings, log, dbConn = su.setup()

    # tab completion for raw_input
    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # UNPACK REMAINING CL ARGUMENTS USING `EXEC` TO SETUP THE VARIABLE NAMES
    # AUTOMATICALLY
    a = {}
    for arg, val in list(arguments.items()):
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        a[varname] = val
        if arg == "--dbConn":
            dbConn = val
            a["dbConn"] = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, ))

    init = a["init"]
    clean = a["clean"]
    iimport = a["import"]
    lightcurve = a["lightcurve"]
    transientBucketId = a["transientBucketId"]
    survey = a["survey"]
    withInLastDay = a["withInLastDay"]
    settingsFlag = a["settingsFlag"]

    # set options interactively if user requests
    if "interactiveFlag" in a and a["interactiveFlag"]:

        # load previous settings
        moduleDirectory = os.path.dirname(__file__) + "/resources"
        pathToPickleFile = "%(moduleDirectory)s/previousSettings.p" % locals()
        try:
            with open(pathToPickleFile):
                pass
            previousSettingsExist = True
        except:
            previousSettingsExist = False
        previousSettings = {}
        if previousSettingsExist:
            previousSettings = pickle.load(open(pathToPickleFile, "rb"))

        # x-raw-input
        # x-boolean-raw-input
        # x-raw-input-with-default-value-from-previous-settings

        # save the most recently used requests
        pickleMeObjects = []
        pickleMe = {}
        theseLocals = locals()
        for k in pickleMeObjects:
            pickleMe[k] = theseLocals[k]
        pickle.dump(pickleMe, open(pathToPickleFile, "wb"))

    if a["init"]:
        from os.path import expanduser
        home = expanduser("~")
        filepath = home + "/.config/marshallEngine/marshallEngine.yaml"
        try:
            cmd = """open %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        try:
            cmd = """start %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        return

    # CALL FUNCTIONS/OBJECTS
    # DEFAULT VALUES
    if not withInLastDay:
        withInLastDay = 30

    # CALL FUNCTIONS/OBJECTS
    if clean:
        # RESCUE ORPHANED TRANSIENTS - NO MASTER ID FLAG
        print("rescuing orphaned transients")
        from fundamentals.mysql import writequery

        procedureNames = [
            "update_transients_with_no_masteridflag",
            "insert_new_transients_into_transientbucketsummaries",
            "resurrect_objects", "update_sherlock_xmatch_counts",
            "update_inbox_auto_archiver", "update_transient_akas"
        ]

        # CALL EACH PROCEDURE
        for p in procedureNames:
            sqlQuery = "CALL `%(p)s`();" % locals()
            writequery(
                log=log,
                sqlQuery=sqlQuery,
                dbConn=dbConn,
            )

        # UPDATE THE TRANSIENT BUCKET SUMMARY TABLE IN THE MARSHALL DATABASE
        from marshallEngine.housekeeping import update_transient_summaries
        updater = update_transient_summaries(log=log,
                                             settings=settings,
                                             dbConn=dbConn).update()

    if iimport:
        if survey.lower() == "panstarrs":
            from marshallEngine.feeders.panstarrs.data import data
            from marshallEngine.feeders.panstarrs import images
        if survey.lower() == "atlas":
            from marshallEngine.feeders.atlas.data import data
            from marshallEngine.feeders.atlas import images
        if survey.lower() == "useradded":
            from marshallEngine.feeders.useradded.data import data
            from marshallEngine.feeders.useradded import images
        if survey.lower() == "tns":
            from marshallEngine.feeders.tns.data import data
            from marshallEngine.feeders.tns import images
        if survey.lower() == "ztf":
            from marshallEngine.feeders.ztf.data import data
            from marshallEngine.feeders.ztf import images
        ingester = data(log=log, settings=settings,
                        dbConn=dbConn).ingest(withinLastDays=withInLastDay)
        cacher = images(log=log, settings=settings,
                        dbConn=dbConn).cache(limit=3000)

        from marshallEngine.services import panstarrs_location_stamps
        ps_stamp = panstarrs_location_stamps(log=log,
                                             settings=settings,
                                             dbConn=dbConn).get()

    if lightcurve:
        from marshallEngine.lightcurves import marshall_lightcurves
        lc = marshall_lightcurves(log=log,
                                  dbConn=dbConn,
                                  settings=settings,
                                  transientBucketIds=transientBucketId)
        filepath = lc.plot()
        print(
            "The lightcurve plot for transient %(transientBucketId)s can be found here: %(filepath)s"
            % locals())

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (
            endTime,
            runningTime,
        ))

    return
Esempio n. 31
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="WARNING",
        options_first=False
    )
    arguments, settings, log, dbConn = su.setup()

    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE cl_utils.py AT %s' %
        (startTime,))

    privateFlag = True
    # set options interactively if user requests
    if interactiveFlag:

        if interactiveFlag == "create":
            create = True

        location = ""
        while location != "g" and location != "b":
            location = raw_input(
                "github or bitbucket [g/b]? \n  >  ")
        if location == "g":
            location = "github"
        else:
            location = "bitbucket"

        # load previous settings
        moduleDirectory = os.path.dirname(__file__) + "/resources"
        pathToPickleFile = "%(moduleDirectory)s/%(location)s.p" % locals()
        try:
            with open(pathToPickleFile):
                pass
            previousSettingsExist = True
        except:
            previousSettingsExist = False
        previousSettings = {}
        if previousSettingsExist:
            previousSettings = pickle.load(open(pathToPickleFile, "rb"))

        if "pathToHostDirectory" in previousSettings:
            default = previousSettings["pathToHostDirectory"]
            pathToHostDirectory = raw_input(
                "path to the host directory? (%(default)s)\n  >  " % locals())
            if not len(pathToHostDirectory):
                pathToHostDirectory = default
        else:
            pathToHostDirectory = raw_input(
                "path to the host directory?\n  >  " % locals())

        if "yamlSettingsFlag" in previousSettings:
            default = previousSettings["yamlSettingsFlag"]
            yamlSettingsFlag = raw_input(
                "path to the credentials file? (%(default)s)\n  >  " % locals())
            if not len(yamlSettingsFlag):
                yamlSettingsFlag = default
        else:
            yamlSettingsFlag = raw_input(
                "path to the credentials file?\n  >  " % locals())

        if "projectName" in previousSettings:
            default = previousSettings["projectName"]
            projectName = raw_input(
                "name of new git repo? (%(default)s)\n  >  " % locals())
            if not len(projectName):
                projectName = default
        else:
            projectName = raw_input(
                "name of new git repo?\n  >  " % locals())

        while branchesFlag != "y" and branchesFlag != "n":
            branchesFlag = raw_input(
                "create dev and bug branches [y/n]? \n  >  ")
        if branchesFlag == "y":
            branchesFlag = True
        else:
            branchesFlag = False

        while privateFlag != "y" and privateFlag != "n":
            privateFlag = raw_input(
                "private repo [y/n]? \n  >  ")
        if privateFlag == "y":
            privateFlag = True
        else:
            privateFlag = False

        if "strapline" in previousSettings:
            default = previousSettings["strapline"]
            strapline = raw_input(
                "give a short description of the project (%(default)s)\n  >  " % locals())
            if not len(strapline):
                strapline = default
        else:
            strapline = raw_input(
                "give a short description of the project\n  >  " % locals())

        while wikiFlag != "y" and wikiFlag != "n":
            wikiFlag = raw_input(
                "add a wiki [y/n]? \n  >  ")
        if wikiFlag == "y":
            wiki = ""
            while wiki != "y" and wiki != "n":
                wiki = raw_input(
                    "do you want to make a seperate repo for wiki/issues [y/n]? \n  >  ")
            if wiki == "y":
                wiki = "seperate"
            else:
                wiki = "same"
        else:
            wiki = False

        # save the most recently used requests
        pickleMeObjects = [
            "pathToHostDirectory", "projectName", "strapline", "yamlSettingsFlag"]
        pickleMe = {}
        theseLocals = locals()
        for k in pickleMeObjects:
            pickleMe[k] = theseLocals[k]
        pickle.dump(pickleMe, open(pathToPickleFile, "wb"))

    else:
        if wikiFlag:
            wiki = seperateOrSame
        else:
            wiki = False

    # Create command ...
    if create and pathToHostDirectory and projectName:
        create_project_folder(
            log=log,
            pathToHostDirectory=pathToHostDirectory,
            wiki=wiki,
            projectName=projectName
        )

        pathToProjectRoot = """%(pathToHostDirectory)s/%(projectName)s""" % locals(
        )

        create_local_git_repo(
            log=log,
            pathToProjectRoot=pathToProjectRoot,
            branches=branchesFlag
        )

        wikiUrl = False
        if location == "bb" or location == "bitbucket":
            repoUrl = add_git_repo_to_bitbucket(
                log=log,
                pathToProject=pathToProjectRoot,
                strapline=strapline,
                pathToCredentials=yamlSettingsFlag,
                private=privateFlag,
                wiki=wiki
            )
            if wiki:
                thisWiki = clone_bitbucket_repo_wiki(
                    log=log,
                    projectName=projectName,
                    pathToHostDirectory=pathToHostDirectory,
                    branches=branchesFlag,
                    strapline=strapline,
                    wiki=wiki,
                    pathToCredentials=yamlSettingsFlag
                )
                wikiUrl, pathToWikiRoot = thisWiki.get()
                add_git_repo_to_tower(
                    log=log,
                    pathToProjectRoot=pathToWikiRoot
                )
                if wikiUrl:
                    webbrowser.open_new_tab(wikiUrl)
        elif location == "gh" or location == "github":
            repoUrl = add_git_repo_to_github(
                log=log,
                pathToProject=pathToProjectRoot,
                strapline=strapline,
                private=privateFlag,
                pathToCredentials=yamlSettingsFlag,
                wiki=wiki
            )

            if wiki:
                thisWiki = clone_github_repo_wiki(
                    log=log,
                    projectName=projectName,
                    pathToHostDirectory=pathToHostDirectory,
                    branches=branchesFlag,
                    strapline=strapline,
                    wiki=wiki,
                    pathToCredentials=yamlSettingsFlag
                )
                wikiUrl, pathToWikiRoot = thisWiki.get()
                add_git_repo_to_tower(
                    log=log,
                    pathToProjectRoot=pathToWikiRoot
                )
                if wikiUrl:
                    webbrowser.open_new_tab(wikiUrl)

        add_git_repo_to_tower(
            log=log,
            pathToProjectRoot=pathToProjectRoot
        )

        open_repo_in_sublime(
            log=log,
            pathToProjectRoot=pathToProjectRoot
        )

        ## open in webbrowser
        webbrowser.open_new_tab(repoUrl)

    # hook commands ...
    if hook:
        if location == "bb" or location == "bitbucket":
            add_hook_to_bitbucket_repo(
                log,
                repoName=projectName,
                hookDomain=domainName,
                pathToCredentials=yamlSettingsFlag
            )
        elif location == "gh" or location == "github":
            add_hook_to_github_repo(
                log,
                repoName=projectName,
                hookDomain=domainName,
                pathToCredentials=yamlSettingsFlag
            )
        open_webhook_list_in_browser(
            log=log,
            location=location,
            projectName=projectName
        )

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' %
        (endTime, runningTime, ))

    return
Esempio n. 32
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="WARNING",
               options_first=False)
    arguments, settings, log, dbConn = su.setup()

    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val, ))
        else:
            exec(varname + " = %s" % (val, ))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, ))

    privateFlag = True
    # set options interactively if user requests
    if interactiveFlag:

        if interactiveFlag == "create":
            create = True

        location = ""
        while location != "g" and location != "b":
            location = raw_input("github or bitbucket [g/b]? \n  >  ")
        if location == "g":
            location = "github"
        else:
            location = "bitbucket"

        # load previous settings
        moduleDirectory = os.path.dirname(__file__) + "/resources"
        pathToPickleFile = "%(moduleDirectory)s/%(location)s.p" % locals()
        try:
            with open(pathToPickleFile):
                pass
            previousSettingsExist = True
        except:
            previousSettingsExist = False
        previousSettings = {}
        if previousSettingsExist:
            previousSettings = pickle.load(open(pathToPickleFile, "rb"))

        if "pathToHostDirectory" in previousSettings:
            default = previousSettings["pathToHostDirectory"]
            pathToHostDirectory = raw_input(
                "path to the host directory? (%(default)s)\n  >  " % locals())
            if not len(pathToHostDirectory):
                pathToHostDirectory = default
        else:
            pathToHostDirectory = raw_input(
                "path to the host directory?\n  >  " % locals())

        if "yamlSettingsFlag" in previousSettings:
            default = previousSettings["yamlSettingsFlag"]
            yamlSettingsFlag = raw_input(
                "path to the credentials file? (%(default)s)\n  >  " %
                locals())
            if not len(yamlSettingsFlag):
                yamlSettingsFlag = default
        else:
            yamlSettingsFlag = raw_input(
                "path to the credentials file?\n  >  " % locals())

        if "projectName" in previousSettings:
            default = previousSettings["projectName"]
            projectName = raw_input(
                "name of new git repo? (%(default)s)\n  >  " % locals())
            if not len(projectName):
                projectName = default
        else:
            projectName = raw_input("name of new git repo?\n  >  " % locals())

        while branchesFlag != "y" and branchesFlag != "n":
            branchesFlag = raw_input(
                "create dev and bug branches [y/n]? \n  >  ")
        if branchesFlag == "y":
            branchesFlag = True
        else:
            branchesFlag = False

        while privateFlag != "y" and privateFlag != "n":
            privateFlag = raw_input("private repo [y/n]? \n  >  ")
        if privateFlag == "y":
            privateFlag = True
        else:
            privateFlag = False

        if "strapline" in previousSettings:
            default = previousSettings["strapline"]
            strapline = raw_input(
                "give a short description of the project (%(default)s)\n  >  "
                % locals())
            if not len(strapline):
                strapline = default
        else:
            strapline = raw_input(
                "give a short description of the project\n  >  " % locals())

        while wikiFlag != "y" and wikiFlag != "n":
            wikiFlag = raw_input("add a wiki [y/n]? \n  >  ")
        if wikiFlag == "y":
            wiki = ""
            while wiki != "y" and wiki != "n":
                wiki = raw_input(
                    "do you want to make a seperate repo for wiki/issues [y/n]? \n  >  "
                )
            if wiki == "y":
                wiki = "seperate"
            else:
                wiki = "same"
        else:
            wiki = False

        # save the most recently used requests
        pickleMeObjects = [
            "pathToHostDirectory", "projectName", "strapline",
            "yamlSettingsFlag"
        ]
        pickleMe = {}
        theseLocals = locals()
        for k in pickleMeObjects:
            pickleMe[k] = theseLocals[k]
        pickle.dump(pickleMe, open(pathToPickleFile, "wb"))

    else:
        if wikiFlag:
            wiki = seperateOrSame
        else:
            wiki = False

    # Create command ...
    if create and pathToHostDirectory and projectName:
        create_project_folder(log=log,
                              pathToHostDirectory=pathToHostDirectory,
                              wiki=wiki,
                              projectName=projectName)

        pathToProjectRoot = """%(pathToHostDirectory)s/%(projectName)s""" % locals(
        )

        create_local_git_repo(log=log,
                              pathToProjectRoot=pathToProjectRoot,
                              branches=branchesFlag)

        wikiUrl = False
        if location == "bb" or location == "bitbucket":
            repoUrl = add_git_repo_to_bitbucket(
                log=log,
                pathToProject=pathToProjectRoot,
                strapline=strapline,
                pathToCredentials=yamlSettingsFlag,
                private=privateFlag,
                wiki=wiki)
            if wiki:
                thisWiki = clone_bitbucket_repo_wiki(
                    log=log,
                    projectName=projectName,
                    pathToHostDirectory=pathToHostDirectory,
                    branches=branchesFlag,
                    strapline=strapline,
                    wiki=wiki,
                    pathToCredentials=yamlSettingsFlag)
                wikiUrl, pathToWikiRoot = thisWiki.get()
                add_git_repo_to_tower(log=log,
                                      pathToProjectRoot=pathToWikiRoot)
                if wikiUrl:
                    webbrowser.open_new_tab(wikiUrl)
        elif location == "gh" or location == "github":
            repoUrl = add_git_repo_to_github(
                log=log,
                pathToProject=pathToProjectRoot,
                strapline=strapline,
                private=privateFlag,
                pathToCredentials=yamlSettingsFlag,
                wiki=wiki)

            if wiki:
                thisWiki = clone_github_repo_wiki(
                    log=log,
                    projectName=projectName,
                    pathToHostDirectory=pathToHostDirectory,
                    branches=branchesFlag,
                    strapline=strapline,
                    wiki=wiki,
                    pathToCredentials=yamlSettingsFlag)
                wikiUrl, pathToWikiRoot = thisWiki.get()
                add_git_repo_to_tower(log=log,
                                      pathToProjectRoot=pathToWikiRoot)
                if wikiUrl:
                    webbrowser.open_new_tab(wikiUrl)

        add_git_repo_to_tower(log=log, pathToProjectRoot=pathToProjectRoot)

        open_repo_in_sublime(log=log, pathToProjectRoot=pathToProjectRoot)

        ## open in webbrowser
        webbrowser.open_new_tab(repoUrl)

    # hook commands ...
    if hook:
        if location == "bb" or location == "bitbucket":
            add_hook_to_bitbucket_repo(log,
                                       repoName=projectName,
                                       hookDomain=domainName,
                                       pathToCredentials=yamlSettingsFlag)
        elif location == "gh" or location == "github":
            add_hook_to_github_repo(log,
                                    repoName=projectName,
                                    hookDomain=domainName,
                                    pathToCredentials=yamlSettingsFlag)
        open_webhook_list_in_browser(log=log,
                                     location=location,
                                     projectName=projectName)

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (
            endTime,
            runningTime,
        ))

    return
Esempio n. 33
0
def main(arguments=None):
    """
    The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command

    .. todo ::

        - update key arguments values and definitions with defaults
        - update return values and definitions
        - update usage examples and text
        - update docstring text
        - check sublime snippet exists
        - clip any useful text to docs mindmap
        - regenerate the docs and check redendering of this docstring
    """
    # setup the command-line util settings

    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="WARNING",
               options_first=False,
               projectName="sherlock",
               distributionName="qub-sherlock")
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
            if varname == "import":
                varname = "iimport"
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val, ))
        else:
            exec(varname + " = %s" % (val, ))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.debug('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, ))

    # call the worker function
    # x-if-settings-or-database-credientials
    if init:
        from os.path import expanduser
        home = expanduser("~")
        filepath = home + "/.config/sherlock/sherlock.yaml"
        cmd = """open %(filepath)s""" % locals()
        p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        try:
            cmd = """open %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        try:
            cmd = """start %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass

    if match or dbmatch:
        if verboseFlag:
            verbose = 2
        else:
            verbose = 1

        if skipNedUpdateFlag:
            updateNed = False
        else:
            updateNed = True

        if skipMagUpdateFlag:
            updatePeakMags = False
        else:
            updatePeakMags = True

        classifier = transient_classifier.transient_classifier(
            log=log,
            settings=settings,
            ra=ra,
            dec=dec,
            name=False,
            verbose=verbose,
            update=updateFlag,
            updateNed=updateNed,
            updatePeakMags=updatePeakMags)
        classifier.classify()

    if clean:
        cleaner = database_cleaner(log=log, settings=settings)
        cleaner.clean()
    if wiki:
        updateWiki = update_wiki_pages(log=log, settings=settings)
        updateWiki.update()

    if iimport and ned:
        ned = nedStreamImporter(log=log,
                                settings=settings,
                                coordinateList=["%(ra)s %(dec)s" % locals()],
                                radiusArcsec=radiusArcsec)
        ned.ingest()
    if iimport and cat:

        if cat_name == "veron":
            catalogue = veronImporter(log=log,
                                      settings=settings,
                                      pathToDataFile=pathToDataFile,
                                      version=cat_version,
                                      catalogueName=cat_name)
            catalogue.ingest()

        if "ned_d" in cat_name:
            catalogue = nedImporter(log=log,
                                    settings=settings,
                                    pathToDataFile=pathToDataFile,
                                    version=cat_version,
                                    catalogueName=cat_name)
            catalogue.ingest()
    if iimport and stream:
        if "marshall" in stream_name:
            stream = marshallImporter(
                log=log,
                settings=settings,
            )
            stream.ingest()
        if "ifs" in stream_name:
            stream = ifsImporter(log=log, settings=settings)
            stream.ingest()
    if not init and not match and not clean and not wiki and not iimport and ra:

        classifier = transient_classifier.transient_classifier(
            log=log,
            settings=settings,
            ra=ra,
            dec=dec,
            name=False,
            verbose=verboseFlag)
        classifier.classify()

    if info:
        print "sherlock-catalogues"
        wiki = update_wiki_pages(log=log, settings=settings)
        table = list(wiki._get_table_infos(trimmed=True))

        dataSet = list_of_dictionaries(log=log, listOfDictionaries=table)
        tableData = dataSet.reST(filepath=None)
        print tableData
        print

        print "Crossmatch Streams"
        table = list(wiki._get_stream_view_infos(trimmed=True))
        dataSet = list_of_dictionaries(log=log, listOfDictionaries=table)
        tableData = dataSet.reST(filepath=None)
        print tableData
        print

        print "Views on Catalogues and Streams"

        table = list(wiki._get_view_infos(trimmed=True))
        dataSet = list_of_dictionaries(log=log, listOfDictionaries=table)
        tableData = dataSet.reST(filepath=None)
        print tableData

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.debug(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (
            endTime,
            runningTime,
        ))

    return
Esempio n. 34
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="WARNING",
        options_first=False,
        projectName="polyglot"
    )
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE cl_utils.py AT %s' %
        (startTime,))

    # for k, v in locals().iteritems():
    #     print k, v

    if not destinationFolder:
        destinationFolder = os.getcwd()
    if not filenameFlag:
        filenameFlag = False
    if not cleanFlag:
        readability = False
    else:
        readability = True

    if init:
        from os.path import expanduser
        home = expanduser("~")
        filepath = home + "/.config/polyglot/polyglot.yaml"
        try:
            cmd = """open %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        try:
            cmd = """start %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass

    if pdf and url:
        filepath = printpdf.printpdf(
            log=log,
            settings=settings,
            url=url,
            folderpath=destinationFolder,
            title=filenameFlag,
            append=False,
            readability=readability
        ).get()

    if html and url:

        cleaner = htmlCleaner.htmlCleaner(
            log=log,
            settings=settings,
            url=url,
            outputDirectory=destinationFolder,
            title=filenameFlag,  # SET TO FALSE TO USE WEBPAGE TITLE,
            style=cleanFlag,  # add polyglot's styling to the HTML document
            metadata=True,  # include metadata in generated HTML (e.g. title),
            h1=True  # include title as H1 at the top of the doc
        )
        filepath = cleaner.clean()

    if epub:
        if url:
            iinput = url
        else:
            iinput = docx
        from polyglot import ebook
        epub = ebook(
            log=log,
            settings=settings,
            urlOrPath=iinput,
            title=filenameFlag,
            bookFormat="epub",
            outputDirectory=destinationFolder
        )
        filepath = epub.get()

    if mobi:
        if url:
            iinput = url
        else:
            iinput = docx
        from polyglot import ebook
        mobi = ebook(
            log=log,
            settings=settings,
            urlOrPath=iinput,
            title=filenameFlag,
            bookFormat="mobi",
            outputDirectory=destinationFolder,
        )
        filepath = mobi.get()

    if kindle:
        if url:
            iinput = url
        else:
            iinput = docx
        from polyglot import kindle
        sender = kindle(
            log=log,
            settings=settings,
            urlOrPath=iinput,
            title=filenameFlag
        )
        success = sender.send()

    if kindleNB2MD:
        basename = os.path.basename(notebook)
        extension = os.path.splitext(basename)[1]
        filenameNoExtension = os.path.splitext(basename)[0]
        if destinationFolder:
            filepath = destinationFolder + "/" + filenameNoExtension + ".md"
        else:
            filepath = notebook.replace("." + extension, ".md")
        from polyglot.markdown import kindle_notebook
        nb = kindle_notebook(
            log=log,
            kindleExportPath=notebook,
            outputPath=filepath
        )
        nb.convert()

    if openFlag:
        try:
            cmd = """open %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        try:
            cmd = """start %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info('-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' %
             (endTime, runningTime, ))

    return
def main(arguments=None):
    """
    *The main function used when ``convert_spectrum_fits_to_ascii.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="WARNING",
        options_first=False,
        projectName=False
    )
    arguments, settings, log, dbConn = su.setup()

    # tab completion for raw_input
    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE convert_spectrum_fits_to_ascii.py AT %s' %
        (startTime,))

    # set options interactively if user requests
    if "interactiveFlag" in locals() and interactiveFlag:

        # load previous settings
        moduleDirectory = os.path.dirname(__file__) + "/resources"
        pathToPickleFile = "%(moduleDirectory)s/previousSettings.p" % locals()
        try:
            with open(pathToPickleFile):
                pass
            previousSettingsExist = True
        except:
            previousSettingsExist = False
        previousSettings = {}
        if previousSettingsExist:
            previousSettings = pickle.load(open(pathToPickleFile, "rb"))

        # x-raw-input
        # x-boolean-raw-input
        # x-raw-input-with-default-value-from-previous-settings

        # save the most recently used requests
        pickleMeObjects = []
        pickleMe = {}
        theseLocals = locals()
        for k in pickleMeObjects:
            pickleMe[k] = theseLocals[k]
        pickle.dump(pickleMe, open(pathToPickleFile, "wb"))

    # call the worker function
    # x-if-settings-or-database-credientials
    convert_spectrum_fits_to_ascii(
        log=log,
        fitsFilePath=pathToFits
    ).get()

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info('-- FINISHED ATTEMPT TO RUN THE convert_spectrum_fits_to_ascii.py AT %s (RUNTIME: %s) --' %
             (endTime, runningTime, ))

    return
Esempio n. 36
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="WARNING",
               options_first=True,
               projectName="inoreader")
    arguments, settings, log, dbConn = su.setup()

    # tab completion for raw_input
    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val, ))
        else:
            exec(varname + " = %s" % (val, ))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, ))

    # set options interactively if user requests
    if "interactiveFlag" in locals() and interactiveFlag:

        # load previous settings
        moduleDirectory = os.path.dirname(__file__) + "/resources"
        pathToPickleFile = "%(moduleDirectory)s/previousSettings.p" % locals()
        try:
            with open(pathToPickleFile):
                pass
            previousSettingsExist = True
        except:
            previousSettingsExist = False
        previousSettings = {}
        if previousSettingsExist:
            previousSettings = pickle.load(open(pathToPickleFile, "rb"))

        # x-raw-input
        # x-boolean-raw-input
        # x-raw-input-with-default-value-from-previous-settings

        # save the most recently used requests
        pickleMeObjects = []
        pickleMe = {}
        theseLocals = locals()
        for k in pickleMeObjects:
            pickleMe[k] = theseLocals[k]
        pickle.dump(pickleMe, open(pathToPickleFile, "wb"))

    # call the worker function
    # x-if-settings-or-database-credientials
    if sub:
        rss = add_subscription(log,
                               settings=settings,
                               settingsFilePath=settingsFile,
                               rssUrl=feedUrl,
                               folder=folder)
        rss.get()
    if articles:
        if favouriteFlag:
            unreadOrStarred = "starred"
        elif unreadFlag:
            unreadOrStarred = "unread"
        articles = get_articles(log,
                                settings=settings,
                                settingsFilePath=settingsFile,
                                stream=stream,
                                maxArticles=maxArticles,
                                unreadOrStarred=unreadOrStarred)
        articles = articles.get()

        if len(articles) == 0:
            print "No articles found"
        else:
            for article in articles:
                title = article["title"]
                thisId = article["id"]
                print "%(title)s (%(thisId)s)" % locals()

    if tag:
        tag = edit_tags(log,
                        settings=settings,
                        settingsFilePath=settingsFile,
                        removeTag=tagToRemove,
                        addTag=tagToAdd,
                        articleIdList=articleId)
        tag.get()

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (
            endTime,
            runningTime,
        ))

    return
Esempio n. 37
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """

    # setup the command-line util settings
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="WARNING",
        options_first=False,
        projectName="rockfinder",
        defaultSettingsFile=True
    )
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE cl_utils.py AT %s' %
        (startTime,))

    # CALL FUNCTIONS/OBJECTS

    if init:
        from os.path import expanduser
        home = expanduser("~")
        filepath = home + "/.config/rockfinder/rockfinder.yaml"
        cmd = """open %(filepath)s""" % locals()
        p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        try:
            cmd = """open %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        try:
            cmd = """start %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        return

    if where and orbfitFlag:
        from rockfinder import orbfit_ephemeris
        eph = orbfit_ephemeris(
            log=log,
            objectId=objectId,
            mjd=mjd,
            obscode=obscode,
            settings=settings,
            verbose=extraFlag
        )
    else:
        from rockfinder import jpl_horizons_ephemeris
        eph = jpl_horizons_ephemeris(
            log=log,
            objectId=objectId,
            mjd=mjd,
            obscode=obscode,
            verbose=extraFlag
        )

    dataSet = list_of_dictionaries(
        log=log,
        listOfDictionaries=eph
    )
    # xfundamentals-render-list-of-dictionaries

    output = dataSet.table(filepath=None)
    if csv:
        output = dataSet.csv(filepath=None)
    elif json:
        output = dataSet.json(filepath=None)
    elif yaml:
        output = dataSet.yaml(filepath=None)
    elif md:
        output = dataSet.markdown(filepath=None)
    elif rst:
        output = dataSet.reST(filepath=None)

    print output

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info('-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' %
             (endTime, runningTime, ))

    return
Esempio n. 38
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """

    # setup the command-line util settings
    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="WARNING",
               options_first=False,
               projectName="rockfinder",
               defaultSettingsFile=True)
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val, ))
        else:
            exec(varname + " = %s" % (val, ))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, ))

    # CALL FUNCTIONS/OBJECTS

    if init:
        from os.path import expanduser
        home = expanduser("~")
        filepath = home + "/.config/rockfinder/rockfinder.yaml"
        cmd = """open %(filepath)s""" % locals()
        p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        try:
            cmd = """open %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        try:
            cmd = """start %(filepath)s""" % locals()
            p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True)
        except:
            pass
        return

    if where and orbfitFlag:
        from rockfinder import orbfit_ephemeris
        eph = orbfit_ephemeris(log=log,
                               objectId=objectId,
                               mjd=mjd,
                               obscode=obscode,
                               settings=settings,
                               verbose=extraFlag)
    else:
        from rockfinder import jpl_horizons_ephemeris
        eph = jpl_horizons_ephemeris(log=log,
                                     objectId=objectId,
                                     mjd=mjd,
                                     obscode=obscode,
                                     verbose=extraFlag)

    dataSet = list_of_dictionaries(log=log, listOfDictionaries=eph)
    # xfundamentals-render-list-of-dictionaries

    output = dataSet.table(filepath=None)
    if csv:
        output = dataSet.csv(filepath=None)
    elif json:
        output = dataSet.json(filepath=None)
    elif yaml:
        output = dataSet.yaml(filepath=None)
    elif md:
        output = dataSet.markdown(filepath=None)
    elif rst:
        output = dataSet.reST(filepath=None)
    print(output)

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (
            endTime,
            runningTime,
        ))

    return
Esempio n. 39
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="WARNING",
        options_first=False,
        projectName="dropterm"
    )
    arguments, settings, log, dbConn = su.setup()

    # tab completion for raw_input
    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            val = val.replace("'", "\\'")
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE cl_utils.py AT %s' %
        (startTime,))

    # set options interactively if user requests
    if "interactiveFlag" in locals() and interactiveFlag:

        # load previous settings
        moduleDirectory = os.path.dirname(__file__) + "/resources"
        pathToPickleFile = "%(moduleDirectory)s/previousSettings.p" % locals()
        try:
            with open(pathToPickleFile):
                pass
            previousSettingsExist = True
        except:
            previousSettingsExist = False
        previousSettings = {}
        if previousSettingsExist:
            previousSettings = pickle.load(open(pathToPickleFile, "rb"))

        # x-raw-input
        # x-boolean-raw-input
        # x-raw-input-with-default-value-from-previous-settings

        # save the most recently used requests
        pickleMeObjects = []
        pickleMe = {}
        theseLocals = locals()
        for k in pickleMeObjects:
            pickleMe[k] = theseLocals[k]
        pickle.dump(pickleMe, open(pathToPickleFile, "wb"))

    # call the worker function
    if authenticate == True:
        this = authenticate_dropterm.authenticate_dropterm(
            log=log,
            settings=settings,
            pathToSettings=pathToSettingsFile
        )
    if move == True and contents == False:
        this = move_files_or_folders.move_files_or_folders(
            log=log,
            settings=settings,
            source=sourcePath,
            destination=destinationPath
        )
        this.move()
    if move == True and contents == True:
        this = move_files_or_folders.move_files_or_folders(
            log=log,
            settings=settings,
            source=sourcePath,
            destination=destinationPath,
            moveFolderContents=True
        )
        this.move()
    if copy == True and contents == False:

        this = copy_files_or_folders.copy_files_or_folders(
            log=log,
            settings=settings,
            source=sourcePath,
            destination=destinationPath
        )
        this.copy()
    if copy == True and contents == True:
        this = copy_files_or_folders.copy_files_or_folders(
            log=log,
            settings=settings,
            source=sourcePath,
            destination=destinationPath,
            copyFolderContents=True
        )
        this.copy()
    if ls == True:
        this = list_directory_contents(
            log=log,
            settings=settings,
            directoryToList=directoryPath
        )
        filePaths, fileNames, shareUrls, meta = this.get()
        for f in fileNames:
            print f
    if share == True:
        if lFlag == False:
            lFlag = True
        else:
            lFlag = False
        this = share_file.share_file(
            log=log,
            settings=settings,
            filePath=filePath,
            short=lFlag
        )
        url = this.get()
        print url
    if mkdir == True:
        this = mkdirectory.mkdir(
            log=log,
            settings=settings,
            dropboxPath=dirPath
        ).get()
    if touch == True:
        this = touch_file.touch_file(
            log=log,
            settings=settings,
            destination=destinationPath
        ).touch()

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info('-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' %
             (endTime, runningTime, ))

    return
Esempio n. 40
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(arguments=arguments, docString=__doc__, logLevel="DEBUG", options_first=False, projectName="projector")
    arguments, settings, log, dbConn = su.setup()

    # tab completion for raw_input
    readline.set_completer_delims(" \t\n;")
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if isinstance(val, str) or isinstance(val, unicode):
            exec (varname + " = '%s'" % (val,))
        else:
            exec (varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug("%s = %s" % (varname, val))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info("--- STARTING TO RUN THE cl_utils.py AT %s" % (startTime,))

    # set options interactively if user requests
    if "interactiveFlag" in locals() and interactiveFlag:

        # load previous settings
        moduleDirectory = os.path.dirname(__file__) + "/resources"
        pathToPickleFile = "%(moduleDirectory)s/previousSettings.p" % locals()
        try:
            with open(pathToPickleFile):
                pass
            previousSettingsExist = True
        except:
            previousSettingsExist = False
        previousSettings = {}
        if previousSettingsExist:
            previousSettings = pickle.load(open(pathToPickleFile, "rb"))

        # x-raw-input
        # x-boolean-raw-input
        # x-raw-input-with-default-value-from-previous-settings

        # save the most recently used requests
        pickleMeObjects = []
        pickleMe = {}
        theseLocals = locals()
        for k in pickleMeObjects:
            pickleMe[k] = theseLocals[k]
        pickle.dump(pickleMe, open(pathToPickleFile, "wb"))

    if home:
        userId = "*****@*****.**"
        stack = "home projects"
        dropboxPath = "/notes/thespacedoctor-wiki/projects/"
    elif work:
        userId = "*****@*****.**"
        stack = "work projects"
        dropboxPath = "/notes/astronotes-wiki/projects/"

    # CALL FUNCTIONS/OBJECTS
    if create:
        gmail_utils(log=log, settings=settings, userId=userId).create_label("projects/" + projectName)
        evernote_utils(log=log, settings=settings).create_notebook(nbName=projectName, stack=stack)
        mkdir(log=log, settings=settings, dropboxPath=dropboxPath + projectName).get()
        this = touch_file(log=log, settings=settings, destination=dropboxPath + projectName + ".remove").touch()

        # try:
        #     devonthink_indexer(
        #         log=log,
        #         settings=settings,
        #     ).get()

        #     time.sleep(3)
        #     devonthink_indexer(
        #         log=log,
        #         settings=settings,
        #     ).get()
        # except:
        #     pass

    if remote:
        setup_projects_from_file_input(log=log, settings=settings)

    if auth:
        evernote_utils(log=log, settings=settings).authenticate()

    # if dt:
    #     devonthink_indexer(
    #         log=log,
    #         settings=settings,
    #     ).get()

    if ls:
        gmail_utils(log=log, settings=settings, userId=userId).list_live_projects(archive=archiveFlag)

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info("-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --" % (endTime, runningTime))

    return
Esempio n. 41
0
def main(arguments=None):
    """
    *The main function used when `cl_utils.py` is run as a single script from the cl, or when installed as a cl command*
    """
    from astrocalc.coords import unit_conversion
    # setup the command-line util settings
    su = tools(arguments=arguments,
               docString=__doc__,
               logLevel="CRITICAL",
               options_first=True,
               projectName="astrocalc",
               defaultSettingsFile=True)
    arguments, settings, log, dbConn = su.setup()

    # tab completion for raw_input
    readline.set_completer_delims(' \t\n;')
    readline.parse_and_bind("tab: complete")
    readline.set_completer(tab_complete)

    # UNPACK REMAINING CL ARGUMENTS USING `EXEC` TO SETUP THE VARIABLE NAMES
    # AUTOMATICALLY
    a = {}
    for arg, val in list(arguments.items()):
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        a[varname] = val
        if arg == "--dbConn":
            dbConn = val
            a["dbConn"] = val
        log.debug('%s = %s' % (
            varname,
            val,
        ))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime, ))

    # set options interactively if user requests
    if "interactiveFlag" in a and a["interactiveFlag"]:

        # load previous settings
        moduleDirectory = os.path.dirname(__file__) + "/resources"
        pathToPickleFile = "%(moduleDirectory)s/previousSettings.p" % locals()
        try:
            with open(pathToPickleFile):
                pass
            previousSettingsExist = True
        except:
            previousSettingsExist = False
        previousSettings = {}
        if previousSettingsExist:
            previousSettings = pickle.load(open(pathToPickleFile, "rb"))

        # x-raw-input
        # x-boolean-raw-input
        # x-raw-input-with-default-value-from-previous-settings

        # save the most recently used requests
        pickleMeObjects = []
        pickleMe = {}
        theseLocals = locals()
        for k in pickleMeObjects:
            pickleMe[k] = theseLocals[k]
        pickle.dump(pickleMe, open(pathToPickleFile, "wb"))

    coordflip = a["coordflip"]
    sep = a["sep"]
    timeflip = a["timeflip"]
    trans = a["trans"]
    now = a["now"]
    dist = a["dist"]
    ra = a["ra"]
    ra1 = a["ra1"]
    ra2 = a["ra2"]
    dec = a["dec"]
    dec1 = a["dec1"]
    dec2 = a["dec2"]
    datetime = a["datetime"]
    north = a["north"]
    east = a["east"]
    distVal = a["distVal"]
    hVal = a["hcFlag"]
    OmegaMatter = a["wmFlag"]
    OmegaVacuum = a["wvFlag"]
    mpcFlag = a["mpcFlag"]
    redshiftFlag = a["redshiftFlag"]
    cartesianFlag = a["cartesianFlag"]

    # CALL FUNCTIONS/OBJECTS
    if coordflip:

        if cartesianFlag:
            converter = unit_conversion(log=log)
            x, y, z = converter.ra_dec_to_cartesian(ra="23 45 21.23232",
                                                    dec="+01:58:5.45341")
            print(x, y, z)
            return

        try:
            ra = float(ra)
            dec = float(dec)
            degree = True
        except Exception as e:
            degree = False

        if degree is True:
            converter = unit_conversion(log=log)
            try:
                ra = converter.ra_decimal_to_sexegesimal(ra=ra, delimiter=":")
                dec = converter.dec_decimal_to_sexegesimal(dec=dec,
                                                           delimiter=":")
            except Exception as e:
                print(e)
                sys.exit(0)

            print(ra, dec)
        else:
            converter = unit_conversion(log=log)
            try:
                ra = converter.ra_sexegesimal_to_decimal(ra=ra)
                dec = converter.dec_sexegesimal_to_decimal(dec=dec)
            except Exception as e:
                print(e)
                sys.exit(0)
            print(ra, dec)

    if sep:
        from astrocalc.coords import separations
        calculator = separations(
            log=log,
            ra1=ra1,
            dec1=dec1,
            ra2=ra2,
            dec2=dec2,
        )
        angularSeparation, north, east = calculator.get()
        print("""%(angularSeparation)s arcsec (%(north)s N, %(east)s E)""" %
              locals())

    if timeflip:
        try:
            inputMjd = float(datetime)
            if datetime[0] not in ["0", "1", "2"]:
                inputMjd = True
            else:
                inputMjd = False
        except:
            inputMjd = False
        from astrocalc.times import conversions
        converter = conversions(log=log)

        if inputMjd == False:
            try:
                mjd = converter.ut_datetime_to_mjd(utDatetime=datetime)
                print(mjd)
            except Exception as e:
                print(e)
        else:
            try:
                utDate = converter.mjd_to_ut_datetime(mjd=datetime)
                print(utDate)
            except Exception as e:
                print(e)

    if trans:
        # TRANSLATE COORDINATES ACROSS SKY
        from astrocalc.coords import translate
        newRa, newDec = translate(log=log,
                                  ra=ra,
                                  dec=dec,
                                  northArcsec=float(north),
                                  eastArcsec=float(east)).get()
        from astrocalc.coords import unit_conversion
        converter = unit_conversion(log=log)
        ra = converter.ra_decimal_to_sexegesimal(ra=newRa, delimiter=":")
        dec = converter.dec_decimal_to_sexegesimal(dec=newDec, delimiter=":")

        print("%(newRa)s, %(newDec)s (%(ra)s, %(dec)s)" % locals())

    if now:
        from astrocalc.times import now
        mjd = now(log=log).get_mjd()
        print(mjd)

    if dist and redshiftFlag:
        from astrocalc.distances import converter
        c = converter(log=log)
        if not hcFlag:
            hcFlag = 70.
        if not wmFlag:
            wmFlag = 0.3
        if not wvFlag:
            wvFlag = 0.7
        dists = c.redshift_to_distance(z=float(distVal),
                                       WM=float(wmFlag),
                                       WV=float(wvFlag),
                                       H0=float(hcFlag))
        print("Distance Modulus: " + str(dists["dmod"]) + " mag")
        print("Luminousity Distance: " + str(dists["dl_mpc"]) + " Mpc")
        print("Angular Size Scale: " + str(dists["da_scale"]) + " kpc/arcsec")
        print("Angular Size Distance: " + str(dists["da_mpc"]) + " Mpc")
        print("Comoving Radial Distance: " + str(dists["dcmr_mpc"]) + " Mpc")

    if dist and mpcFlag:
        from astrocalc.distances import converter
        c = converter(log=log)
        z = c.distance_to_redshift(mpc=float(distVal))
        print("z = %(z)s" % locals())

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info(
        '-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (
            endTime,
            runningTime,
        ))

    return
Esempio n. 42
0
def main(arguments=None):
    """
    *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
    """
    # setup the command-line util settings
    su = tools(
        arguments=arguments,
        docString=__doc__,
        logLevel="WARNING",
        options_first=False,
        projectName="qubits"
    )
    arguments, settings, log, dbConn = su.setup()

    # unpack remaining cl arguments using `exec` to setup the variable names
    # automatically
    for arg, val in arguments.iteritems():
        if arg[0] == "-":
            varname = arg.replace("-", "") + "Flag"
        else:
            varname = arg.replace("<", "").replace(">", "")
        if varname == "import":
            varname = "iimport"
        if isinstance(val, str) or isinstance(val, unicode):
            exec(varname + " = '%s'" % (val,))
        else:
            exec(varname + " = %s" % (val,))
        if arg == "--dbConn":
            dbConn = val
        log.debug('%s = %s' % (varname, val,))

    ## START LOGGING ##
    startTime = times.get_now_sql_datetime()
    log.info(
        '--- STARTING TO RUN THE cl_utils.py AT %s' %
        (startTime,))

    if init:
        from . import workspace
        ws = workspace(
            log=log,
            pathToWorkspace=pathToWorkspace
        )
        ws.setup()
        return

    # IMPORT THE SIMULATION SETTINGS
    (allSettings,
     programSettings,
     limitingMags,
     sampleNumber,
     peakMagnitudeDistributions,
     explosionDaysFromSettings,
     extendLightCurveTail,
     relativeSNRates,
     lowerRedshiftLimit,
     upperRedshiftLimit,
     redshiftResolution,
     restFrameFilter,
     kCorrectionTemporalResolution,
     kCorPolyOrder,
     kCorMinimumDataPoints,
     extinctionType,
     extinctionConstant,
     hostExtinctionDistributions,
     galacticExtinctionDistribution,
     surveyCadenceSettings,
     snLightCurves,
     surveyArea,
     CCSNRateFraction,
     transientToCCSNRateFraction,
     extraSurveyConstraints,
     lightCurvePolyOrder,
     logLevel) = cu.read_in_survey_parameters(
        log,
        pathToSettingsFile=pathToSettingsFile
    )

    logFilePath = pathToOutputDirectory + "/qubits.log"
    del log
    log = _set_up_command_line_tool(
        level=str(logLevel),
        logFilePath=logFilePath
    )

    # dbConn, log = cu.settings(
    #     pathToSettingsFile=pathToSettingsFile,
    #     dbConn=False,
    #     log=True
    # )

    ## START LOGGING ##
    startTime = dcu.get_now_sql_datetime()
    log.info('--- STARTING TO RUN THE qubits AT %s' % (startTime,))

    resultsDict = {}

    pathToOutputPlotDirectory = pathToOutputDirectory + "/plots/"
    dcu.dryx_mkdir(
        log,
        directoryPath=pathToOutputPlotDirectory
    )

    pathToResultsFolder = pathToOutputDirectory + "/results/"
    dcu.dryx_mkdir(
        log,
        directoryPath=pathToResultsFolder
    )

    if not programSettings['Extract Lightcurves from Spectra'] and not programSettings['Generate KCorrection Database'] and not programSettings['Run the Simulation'] and not programSettings['Compile and Plot Results']:
        print "All stages of the simulatation have been switched off. Please switch on at least one stage of the simulation under the 'Programming Settings' in the settings file `%(pathToSettingsFile)s`" % locals()

    # GENERATE THE DATA FOR SIMULATIONS
    if programSettings['Extract Lightcurves from Spectra']:
        log.info('generating the Lightcurves')
        dg.generate_model_lightcurves(
            log=log,
            pathToSpectralDatabase=pathToSpectralDatabase,
            pathToOutputDirectory=pathToOutputDirectory,
            pathToOutputPlotDirectory=pathToOutputPlotDirectory,
            explosionDaysFromSettings=explosionDaysFromSettings,
            extendLightCurveTail=extendLightCurveTail,
            polyOrder=lightCurvePolyOrder
        )
        print "The lightcurve file can be found here: %(pathToOutputDirectory)stransient_light_curves.yaml" % locals()
        print "The lightcurve plots can be found in %(pathToOutputPlotDirectory)s" % locals()

    if programSettings['Generate KCorrection Database']:
        log.info('generating the kcorrection data')
        dg.generate_kcorrection_listing_database(
            log,
            pathToOutputDirectory=pathToOutputDirectory,
            pathToSpectralDatabase=pathToSpectralDatabase,
            restFrameFilter=restFrameFilter,
            temporalResolution=kCorrectionTemporalResolution,
            redshiftResolution=redshiftResolution,
            redshiftLower=lowerRedshiftLimit,
            redshiftUpper=upperRedshiftLimit + redshiftResolution)
        log.info('generating the kcorrection polynomials')
        dg.generate_kcorrection_polynomial_database(
            log,
            pathToOutputDirectory=pathToOutputDirectory,
            restFrameFilter=restFrameFilter,
            kCorPolyOrder=kCorPolyOrder,  # ORDER OF THE POLYNOMIAL TO FIT
            kCorMinimumDataPoints=kCorMinimumDataPoints,
            redshiftResolution=redshiftResolution,
            redshiftLower=lowerRedshiftLimit,
            redshiftUpper=upperRedshiftLimit + redshiftResolution,
            plot=programSettings['Generate KCorrection Plots'])

        print "The k-correction database has been generated here: %(pathToOutputDirectory)sk_corrections" % locals()
        if programSettings['Generate KCorrection Plots']:
            print "The k-correction polynomial plots can also be found in %(pathToOutputDirectory)sk_corrections" % locals()

    if programSettings['Run the Simulation']:
        # CREATE THE OBSERVABLE UNIVERSE!
        log.info('generating the redshift array')
        redshiftArray = u.random_redshift_array(
            log,
            sampleNumber,
            lowerRedshiftLimit,
            upperRedshiftLimit,
            redshiftResolution=redshiftResolution,
            pathToOutputPlotDirectory=pathToOutputPlotDirectory,
            plot=programSettings['Plot Simulation Helper Plots'])
        resultsDict['Redshifts'] = redshiftArray.tolist()

        log.info('generating the SN type array')
        snTypesArray = u.random_sn_types_array(
            log,
            sampleNumber,
            relativeSNRates,
            pathToOutputPlotDirectory=pathToOutputPlotDirectory,
            plot=programSettings['Plot Simulation Helper Plots'])
        resultsDict['SN Types'] = snTypesArray.tolist()

        log.info('generating peak magnitudes for the SNe')
        peakMagnitudesArray = u.random_peak_magnitudes(
            log,
            peakMagnitudeDistributions,
            snTypesArray,
            plot=programSettings['Plot Simulation Helper Plots'])

        log.info('generating the SN host extictions array')
        hostExtinctionArray = u.random_host_extinction(
            log,
            sampleNumber,
            extinctionType,
            extinctionConstant,
            hostExtinctionDistributions,
            plot=programSettings['Plot Simulation Helper Plots'])

        log.info('generating the SN galactic extictions array')
        galacticExtinctionArray = u.random_galactic_extinction(
            log,
            sampleNumber,
            extinctionType,
            extinctionConstant,
            galacticExtinctionDistribution,
            plot=programSettings['Plot Simulation Helper Plots'])

        log.info('generating the raw lightcurves for the SNe')
        rawLightCurveDict = u.generate_numpy_polynomial_lightcurves(
            log,
            snLightCurves=snLightCurves,
            pathToOutputDirectory=pathToOutputDirectory,
            pathToOutputPlotDirectory=pathToOutputPlotDirectory,
            plot=programSettings['Plot Simulation Helper Plots'])

        log.info('generating the k-correction array for the SNe')
        kCorrectionArray = u.build_kcorrection_array(
            log,
            redshiftArray,
            snTypesArray,
            snLightCurves,
            pathToOutputDirectory=pathToOutputDirectory,
            plot=programSettings['Plot Simulation Helper Plots'])

        log.info('generating the observed lightcurves for the SNe')
        observedFrameLightCurveInfo, peakAppMagList = u.convert_lightcurves_to_observered_frame(
            log,
            snLightCurves=snLightCurves,
            rawLightCurveDict=rawLightCurveDict,
            redshiftArray=redshiftArray,
            snTypesArray=snTypesArray,
            peakMagnitudesArray=peakMagnitudesArray,
            kCorrectionArray=kCorrectionArray,
            hostExtinctionArray=hostExtinctionArray,
            galacticExtinctionArray=galacticExtinctionArray,
            restFrameFilter=restFrameFilter,
            pathToOutputDirectory=pathToOutputDirectory,
            pathToOutputPlotDirectory=pathToOutputPlotDirectory,
            polyOrder=lightCurvePolyOrder,
            plot=programSettings['Plot Simulation Helper Plots'])

        log.info('generating the survey observation cadence')
        cadenceDictionary = ss.survey_cadence_arrays(
            log,
            surveyCadenceSettings,
            pathToOutputDirectory=pathToOutputDirectory,
            pathToOutputPlotDirectory=pathToOutputPlotDirectory,
            plot=programSettings['Plot Simulation Helper Plots'])

        log.info('determining if the SNe are discoverable by the survey')
        discoverableList = ss.determine_if_sne_are_discoverable(
            log,
            redshiftArray=redshiftArray,
            limitingMags=limitingMags,
            observedFrameLightCurveInfo=observedFrameLightCurveInfo,
            pathToOutputDirectory=pathToOutputDirectory,
            pathToOutputPlotDirectory=pathToOutputPlotDirectory,
            plot=programSettings['Plot Simulation Helper Plots'])

        log.info(
            'determining the day (if and) when each SN is first discoverable by the survey')
        ripeDayList = ss.determine_when_sne_are_ripe_for_discovery(
            log,
            redshiftArray=redshiftArray,
            limitingMags=limitingMags,
            discoverableList=discoverableList,
            observedFrameLightCurveInfo=observedFrameLightCurveInfo,
            plot=programSettings['Plot Simulation Helper Plots'])

        # log.info('determining the day when each SN is disappears fainter than the survey limiting mags')
        # disappearDayList = determine_when_discovered_sne_disappear(
        #     log,
        #     redshiftArray=redshiftArray,
        #     limitingMags=limitingMags,
        #     ripeDayList=ripeDayList,
        #     observedFrameLightCurveInfo=observedFrameLightCurveInfo,
        #     plot=programSettings['Plot Simulation Helper Plots'])

        log.info('determining if and when each SN is discovered by the survey')
        lightCurveDiscoveryDayList, surveyDiscoveryDayList, snCampaignLengthList = ss.determine_if_sne_are_discovered(
            log,
            limitingMags=limitingMags,
            ripeDayList=ripeDayList,
            cadenceDictionary=cadenceDictionary,
            observedFrameLightCurveInfo=observedFrameLightCurveInfo,
            extraSurveyConstraints=extraSurveyConstraints,
            plot=programSettings['Plot Simulation Helper Plots'])

        resultsDict[
            'Discoveries Relative to Peak Magnitudes'] = lightCurveDiscoveryDayList
        resultsDict[
            'Discoveries Relative to Survey Year'] = surveyDiscoveryDayList
        resultsDict['Campaign Length'] = snCampaignLengthList
        resultsDict['Cadence Dictionary'] = cadenceDictionary
        resultsDict['Peak Apparent Magnitudes'] = peakAppMagList

        now = datetime.now()
        now = now.strftime("%Y%m%dt%H%M%S")
        fileName = pathToOutputDirectory + \
            "/simulation_results_%s.yaml" % (now,)
        stream = file(fileName, 'w')
        yamlContent = dict(allSettings.items() + resultsDict.items())
        yaml.dump(yamlContent, stream, default_flow_style=False)
        stream.close()

        print "The simulation output file can be found here: %(fileName)s. Remember to update your settings file 'Simulation Results File Used for Plots' parameter with this filename before compiling the results." % locals()
        if programSettings['Plot Simulation Helper Plots']:
            print "The simulation helper-plots found in %(pathToOutputPlotDirectory)s" % locals()

    # COMPILE AND PLOT THE RESULTS
    if programSettings['Compile and Plot Results']:
        pathToYamlFile = pathToOutputDirectory + "/" + \
            programSettings['Simulation Results File Used for Plots']
        result_log = r.log_the_survey_settings(log, pathToYamlFile)
        snSurveyDiscoveryTimes, lightCurveDiscoveryTimes, snTypes, redshifts, cadenceDictionary, peakAppMagList, snCampaignLengthList = r.import_results(
            log, pathToYamlFile)
        snRatePlotLink, totalRate, tooFaintRate, shortCampaignRate = r.determine_sn_rate(
            log,
            lightCurveDiscoveryTimes,
            snSurveyDiscoveryTimes,
            redshifts,
            surveyCadenceSettings=surveyCadenceSettings,
            lowerRedshiftLimit=lowerRedshiftLimit,
            upperRedshiftLimit=upperRedshiftLimit,
            redshiftResolution=redshiftResolution,
            surveyArea=surveyArea,
            CCSNRateFraction=CCSNRateFraction,
            transientToCCSNRateFraction=transientToCCSNRateFraction,
            peakAppMagList=peakAppMagList,
            snCampaignLengthList=snCampaignLengthList,
            extraSurveyConstraints=extraSurveyConstraints,
            pathToOutputPlotFolder=pathToOutputPlotDirectory)
        result_log += """
## Results ##

This simulated survey discovered a total of **%s** transients per year. An extra **%s** transients were detected but deemed too faint to constrain a positive transient identification and a further **%s** transients where detected but an observational campaign of more than **%s** days could not be completed to ensure identification. See below for the various output plots.

        """ % (totalRate, tooFaintRate, shortCampaignRate, extraSurveyConstraints["Observable for at least ? number of days"])
        cadenceWheelLink = r.plot_cadence_wheel(
            log,
            cadenceDictionary,
            pathToOutputPlotFolder=pathToOutputPlotDirectory)
        result_log += """%s""" % (cadenceWheelLink,)
        discoveryMapLink = r.plot_sn_discovery_map(
            log,
            snSurveyDiscoveryTimes,
            peakAppMagList,
            snCampaignLengthList,
            redshifts,
            extraSurveyConstraints,
            pathToOutputPlotFolder=pathToOutputPlotDirectory)
        result_log += """%s""" % (discoveryMapLink,)
        ratioMapLink = r.plot_sn_discovery_ratio_map(
            log,
            snSurveyDiscoveryTimes,
            redshifts,
            peakAppMagList,
            snCampaignLengthList,
            extraSurveyConstraints,
            pathToOutputPlotFolder=pathToOutputPlotDirectory)
        result_log += """%s""" % (ratioMapLink,)
        result_log += """%s""" % (snRatePlotLink,)

        now = datetime.now()
        now = now.strftime("%Y%m%dt%H%M%S")
        mdLogPath = pathToResultsFolder + \
            "simulation_result_log_%s.md" % (now,)
        mdLog = open(mdLogPath, 'w')
        mdLog.write(result_log)
        mdLog.close()

        dmd.convert_to_html(
            log=log,
            pathToMMDFile=mdLogPath,
            css="amblin"
        )

        print "Results can be found here: %(pathToResultsFolder)s" % locals()
        html = mdLogPath.replace(".md", ".html")
        print "Open this file in your browser: %(html)s" % locals()

    if "dbConn" in locals() and dbConn:
        dbConn.commit()
        dbConn.close()
    ## FINISH LOGGING ##
    endTime = times.get_now_sql_datetime()
    runningTime = times.calculate_time_difference(startTime, endTime)
    log.info('-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' %
             (endTime, runningTime, ))

    return