Ejemplo n.º 1
0
def build_master_table(input_table,
                       output_table,
                       ftype="fitacf",
                       coords="mlt",
                       config_filename="../mysql_dbconfig_files/config.ini",
                       section="midlat",
                       input_dbname=None,
                       output_dbname=None):
    """ combines all the ten-min median filtered gridded iscat data into
    one master table. 
    The results are stored in a different db file.

    Parameters
    ----------
    input_table : str
        A table name from input_dbname
    output_table : str
        A table name from output_dbname
    ftype : str
        SuperDARN file type
    coords : str
        Coordinates in which the binning process took place.
        Default to "mlt, can be "geo" as well. 
    config_filename: str
        name and path of the configuration file
    section: str, default to "midlat"
        section of database configuration
    input_dbname : str, default to None
        Name of the MySQL db where ten-min median data are stored.
    output_dbname : str, default to None
        Name of the master db

    Returns
    -------
    Nothing

    """

    import numpy as np
    import datetime as dt
    from mysql.connector import MySQLConnection
    from month_to_season import get_season_by_month
    import sys
    sys.path.append("../")
    from mysql_dbutils.db_config import db_config
    from mysql_dbutils import db_tools
    import logging

    # create db name
    if input_dbname is None:
        input_dbname = "ten_min_median_" + coords + "_" + ftype
    if output_dbname is None:
        output_dbname = "master_" + coords + "_" + ftype

    # create a db (if not exist) that combines all the data
    try:
        # create a db
        db_tools.create_db(output_dbname)
    except Exception, e:
        logging.error(e, exc_info=True)
Ejemplo n.º 2
0
def main():
    """ Call the functions above. Acts as an example code.
    Multiprocessing has been implemented to do parallel computing.
    The unit process is for reading a day worth of data for a given radar

    NOTE: NO need to worry about the sd data in tmp dir because they will 
    be removed after using them. 
    """
    
    import datetime as dt
    import multiprocessing as mp
    import os
    import sys
    sys.path.append("../")
    from mysql_dbutils import db_tools, db_config
    import logging
    from mysql.connector import MySQLConnection

    # create a log file to which any error occured between client and 
    # MySQL server communication will be written
    logging.basicConfig(filename="./log_files/boxcar_filtered_data_to_db_six_rads.log",
                        level=logging.INFO)

    # input parameters
    sdate = dt.datetime(2017, 1, 1)     # includes sdate
#    sdate = dt.datetime(2016, 6, 21)     # includes sdate
#    edate = dt.datetime(2015, 1, 1)     # does not include edate
    edate = dt.datetime(2018, 7, 1)     # does not include edate
    channel = None
    params=['velocity']
    ftype = "fitacf"
    ffname = None

    # run the code for the following radars in parallel
    #rad_list = ["hok", "hkw", "ade", "adw"]
    #rad_list = ["ade", "adw"]
    # rad_list = ["tig", "unw"]
    rad_list = ["bks", "wal", "fhe", "fhw", "cve", "cvw"]
    #rad_list = ["bpk"]

    # create tmpdirs to store dmap files temporarily
    for rad in rad_list:
        tmpdir = "../data/" + rad + "_tmp"
        os.system("mkdir -p " + tmpdir)

    # create dbs (if not exist) for radars
    for rad in rad_list:
        db_name = rad + "_boxcar_" + ftype 
        try:
            # create a db
            db_tools.create_db(db_name)
        except Exception, e:
            logging.error(e, exc_info=True)
Ejemplo n.º 3
0
def main():
    """ Call the functions above. Acts as an example code.
    Multiprocessing has been implemented to do parallel computing.
    The unit process is for reading a day worth of data for a given radar"""

    import datetime as dt
    import multiprocessing as mp
    import sys
    sys.path.append("../")
    from mysql_dbutils import db_tools
    import logging

    # create a log file to which any error occured between client and
    # MySQL server communication will be written.
    logging.basicConfig(filename="./log_files/iscat_to_db_six_rads.log",
                        level=logging.INFO)

    # input parameters
    sdate = dt.datetime(2017, 1, 1)  # includes sdate
    #    sdate = dt.datetime(2011, 8, 1)     # includes sdate
    edate = dt.datetime(2018, 7, 1)  # does not include edate
    channel = None
    params = ['velocity']
    ftype = "fitacf"
    low_vel_iscat_event_only = True
    search_allbeams = True
    no_gscat = True
    data_from_db = True
    ffname = None
    tmpdir = None

    # run the code for the following radars in parallel
    #rad_list = ["hok", "hkw"]
    #rad_list = ["ade", "adw"]
    #rad_list = ["tig", "unw"]
    #rad_list = ["bpk"]
    rad_list = ["bks", "wal", "fhe", "fhw", "cve", "cvw"]

    # create dbs (if not exist) for radars
    for rad in rad_list:
        db_name = rad + "_iscat_" + ftype
        try:
            # create a db
            db_tools.create_db(db_name)
        except Exception, e:
            logging.error(e, exc_info=True)
Ejemplo n.º 4
0
def ten_min_median(rad,
                   stm,
                   etm,
                   ftype="fitacf",
                   coords="mlt",
                   config_filename="../mysql_dbconfig_files/config.ini",
                   section="midlat",
                   iscat_dbname=None,
                   output_dbname=None):
    """ Bins the gridded data from all beams of a single radar into ten-minute intervals.
    e.g., at each ten-minute interval, median vector in each azimuth bin within a grid cell is
    selected as the representative velocity for that bin. 
    The results are stored in a different db such that data from all beams 
    of a given radar are written into a single table named by the radar name.

    Parameters
    ----------
    rad : str
        Three-letter radar code
    stm : datetime.datetime
        The start time.
    etm : datetime.datetime
        The end time.
    ftype : str
        SuperDARN file type
    coords : str
        Coordinates in which the binning process takes place.
        Default to "mlt. Can be "geo" as well. 
    config_filename: str
        name and path of the configuration file
    section: str, default to "midlat"
        section of database configuration
    iscat_dbname : str, default to None
        Name of the MySQL db to which iscat data has been written
    output_dbname : str, default to None
        Name of the MySQL db to which ten-min median filtered data will be written

    Returns
    -------
    Nothing

    """

    import numpy as np
    import datetime as dt
    from mysql.connector import MySQLConnection
    import sys
    sys.path.append("../")
    from mysql_dbutils.db_config import db_config
    from mysql_dbutils import db_tools
    import logging

    # create db names
    if iscat_dbname is None:
        iscat_dbname = rad + "_iscat_" + ftype
    if output_dbname is None:
        output_dbname = "ten_min_median_" + coords + "_" + ftype
    output_table = rad + "_" + ftype

    # create a db (if not exist) for ten-min median data
    try:
        # create a db
        db_tools.create_db(output_dbname)
    except Exception, e:
        logging.error(e, exc_info=True)
def imf_based_filter(imf_table,
                     output_table,
                     stm,
                     etm,
                     ftype="fitacf",
                     coords="mlt",
                     kp_lim=[0.0, 9.0],
                     config_filename="../mysql_dbconfig_files/config.ini",
                     section="midlat",
                     input_dbname=None,
                     input_table=None,
                     output_dbname=None,
                     imf_dbname=None,
                     imf_db_location="../../data/sqlite3/"):
    """ Selects the data for stable IMF intervals.

    Parameters
    ----------
    stm : datetime.datetime
	The start time
    etm : datetime.datetime
	The end time
    imf_table : list
        The binned imf table from which binned imf data will be taken.
    output_table : str
        Name of the table where filtered iscat data will be stored.
    ftype : str
        SuperDARN LOS data file type
    coords : str
        Coordinate systems. valid inputs are "geo" or "mlt".
    kp_lim : list
        The range of Kp. The range bondaries are inclusive.
    config_filename: str
        name and path of the configuration file
    section: str, default to "midlat"
        section of database configuration
    input_dbname : str
        Name of the input db.
        Default to None.
    input_table : str
        Name of the table where the iscat data will be extracted.
    output_dbname : str
        Name of the output db.
        Default to None.
    imf_dbname : str
        Name of the sqlite3 db where imf data are stored.
        Default to None.
    imf_db_location : str
        The path to imf_dbname sqlite db.
        Default to None.

    Returns
    -------
    Nothing
    
    """

    import datetime as dt
    import sqlite3
    import os
    import sys
    sys.path.append("../../")
    from mysql.connector import MySQLConnection
    from mysql_dbutils.db_config import db_config
    from mysql_dbutils import db_tools
    import logging

    # construct db names and table names
    if input_dbname is None:
        input_dbname = "master_" + coords + "_" + ftype
    if output_dbname is None:
        output_dbname = "master_" + coords + "_" + ftype + "_binned_by_imf_clock_angle"
    if imf_dbname is None:
        imf_dbname = "binned_imf.sqlite"

    # create the output db (if not exist)
    try:
        # create a db
        db_tools.create_db(output_dbname, config_filename=config_filename)
    except Exception, e:
        logging.error(e, exc_info=True)