def fetch_dscovr_by_dates(sdate, edate, db_name="gme_data",\
        table_name="dscovr", local_data_store="../data/sqlite3/", imf_coord="gsm"):
    """
    Get the stored DSCOVR data from omni database
    Parameters
    ----------
    sdate : start datetime
    edate : end datetime
    db_name : name of the database
    table_name : table name
    local_data_store : folder location of the files
    imf_coord : Coordinate of the IMF data "gsm", "gse"
    """
    from db_utils import DbUtils
    dbo = DbUtils(db_name=db_name, local_data_store=local_data_store)
    sql = """SELECT * from {tb} WHERE strftime('%s', date) BETWEEN strftime('%s', '{sdate}') AND strftime('%s', '{edate}')""".format(tb=table_name,sdate=sdate,edate=edate)
    print("Running sql query >> ",sql)
    df = dbo.fetch_table_by_sql(sql)
    nan_directory = {"bx":10000.0, "by":10000.0, "bz":10000.0, "by_gse":10000.0, "bz_gse":10000.0,
            "b":10000.0, "v":99999.9, "vx":99999.9, "vy":99999.9, "vz":99999.9,
            "n":1000.0, "t":9999999.}
    for _kv in nan_directory.keys():
        df = df.replace(nan_directory[_kv], numpy.inf)
    if imf_coord == "gsm": df = df.drop(columns=["by_gse","bz_gse"])
    elif imf_coord == "gse":
        df = df.drop(columns=["by","bz"])
        df = df.rename(columns={"by_gse":"by", "bz_gse":"bz"})
    print(df.head())
    return df
 def fetch_store_kp_data(self, db_name="gme_data",\
                     table_name="kp",\
                     local_data_store="../data/sqlite3/"):
     """
     Download AUR inds data and store in a db
     """
     from db_utils import DbUtils
     # fetch the data
     data_df, missing_dates = self.get_kp_data()
     # set up the database connections!
     db_obj = DbUtils(db_name=db_name,\
                  local_data_store=local_data_store)
     if data_df is not None:
         print("Working with data wdc provides!")
         db_obj.kp_to_db(data_df, table_name=table_name)
         print("Updated DB!")
Example #3
0
 def fetch_store_aur_data(self, db_name="gme_data",\
                     table_name="sym_inds",\
                     local_data_store="../data/sqlite3/"):
     """
     Download AUR inds data and store in a db
     """
     from db_utils import DbUtils
     # fetch the data
     data_df, missing_dates = self.get_asym_data()
     # set up the database connections!
     db_obj = DbUtils(db_name=db_name,\
                  local_data_store=local_data_store)
     if data_df is not None:
         print("Downloaded data!")
         db_obj.sym_inds_to_db(data_df, table_name=table_name)
         print("Updated DB!")
Example #4
0
 def store_map_data(self,
                    db_name="gme_data",
                    table_name="sd_map",
                    local_data_store="../data/sqlite3/"):
     """
     Download AUR inds data and store in a db
     """
     from db_utils import DbUtils
     # fetch the data
     data_df = self.fetch_map_data()
     # set up the database connections!
     db_obj = DbUtils(db_name=db_name, local_data_store=local_data_store)
     if data_df is not None:
         print("Working with map data from VT server!")
         db_obj.map_to_db(data_df, table_name=table_name)
         print("Updated DB!")
def fetch_omni_by_dates(sdate,
                        edate,
                        db_name="omni_data",
                        table_name="omni",
                        local_data_store="../data/sqlite3/"):
    """
    Get the stored OMNI data from omni database
    Parameters
    ----------
    sdate : start datetime
    edate : end datetime
    db_name : name of the database
    table_name : table name
    local_data_store : folder location of the files
    """
    from db_utils import DbUtils
    dbo = DbUtils(db_name=db_name, local_data_store=local_data_store)
    sql = """SELECT * from {tb} WHERE strftime('%s', date) BETWEEN strftime('%s', '{sdate}') AND strftime('%s', '{edate}')\
            """.format(tb=table_name, sdate=sdate, edate=edate)
    print("Running sql query >> ", sql)
    df = dbo.fetch_table_by_sql(sql)
    nan_directory = {
        "bx": 10000.0,
        "by": 10000.0,
        "bz": 10000.0,
        "b": 10000.0,
        "v": 99999.9,
        "vx": 99999.9,
        "vy": 99999.9,
        "vz": 99999.9,
        "n": 1000.0,
        "t": 9999999.
    }
    for _kv in nan_directory.keys():
        df = df.replace(nan_directory[_kv], numpy.inf)
    #print(df.head())
    return df
def fetch_gme_by_dates(sdate,
                       edate,
                       db_name="gme_data",
                       table_name="sd_map",
                       col_names='*',
                       local_data_store="../data/sqlite3/"):
    """
    Get the stored data from gme_data database
    Parameters
    ----------
    sdate : start datetime
    edate : end datetime
    db_name : name of the database
    table_name : table name
    local_data_store : folder location of the files
    """
    from db_utils import DbUtils
    dbo = DbUtils(db_name=db_name, local_data_store=local_data_store)
    sql = """SELECT {cols} from {tb} WHERE strftime('%s', date) BETWEEN strftime('%s', '{sdate}') AND strftime('%s', '{edate}')\
            """.format(cols=col_names, tb=table_name, sdate=sdate, edate=edate)
    print("Running sql query >> ", sql)
    df = dbo.fetch_table_by_sql(sql)
    #print(df.head())
    return df
 def fetch_dscovr_data(self, db_name="gme_data",\
         table_name="dscovr",\
         local_data_store="../data/sqlite3/"):
     """
     Download dscovr data and store the data
     """
     from db_utils import DbUtils
     dbo = DbUtils(db_name=db_name, local_data_store=local_data_store)
     _df = self._download_dscovr()
     if _df is not None:
         dbo.dscovr_to_db(_df, table_name=table_name)
         print("Updated DB!")
     dbo._close_dbconn()
     return
Example #8
0
 def fetch_store_aur_data(self, db_name="gme_data",\
                     table_name="aur_inds",\
                     local_data_store="../data/sqlite3/"):
     """
     Download AUR inds data and store in a db
     """
     from db_utils import DbUtils
     from aul_from_images import ExtractAL
     # fetch the data
     data_df, missing_dates = self.get_al_data()
     # set up the database connections!
     db_obj = DbUtils(db_name=db_name,\
                  local_data_store=local_data_store)
     if data_df is not None:
         print("Working with data wdc provides!")
         db_obj.aur_inds_to_db(data_df, table_name=table_name)
         print("Updated DB!")
     # Now we'll work on the missing dates
     if len(missing_dates) > 0:
         print("Working with missing dates! cheat mode on")
         data_obj = ExtractAL(missing_dates)
         aur_img_data_df = data_obj.get_al_data()
         db_obj.aur_inds_to_db(aur_img_data_df, table_name=table_name)
         print("Updated DB!")