Esempio n. 1
0
def fetch_timeseries(symbol, dir_name='data', use_cache=True, from_year=1900):
    """
    Read time series data. Use cached version if it exists and
    use_cache is True, otherwise retrive, cache, then read.
    """
    base_dir = ''
    try:
        conf = pf.read_config()
        base_dir = conf['base_dir']
    except:
        pass
    finally:
        dir_name = os.path.join(base_dir, dir_name)

    if not os.path.exists(dir_name):
        os.makedirs(dir_name)    

    timeseries_cache = os.path.join(dir_name, symbol + '.csv')

    if os.path.isfile(timeseries_cache) and use_cache:
        pass
    else:
        ts = pdr.DataReader(symbol, 'yahoo', start=datetime.datetime(from_year, 1, 1))
        ts.to_csv(timeseries_cache, encoding='utf-8')

    ts = pd.read_csv(timeseries_cache, index_col='Date', parse_dates=True)
    ts = _adj_column_names(ts)
    return ts
Esempio n. 2
0
def _get_cache_dir(dir_name):
    """
    Get the data dir path.

    Parameters
    ----------
    dir_name : str
        The leaf data dir name.

    Returns
    -------
    str
        Path to the data dir.
    """
    base_dir = ''
    try:
        conf = pf.read_config()
        base_dir = conf['base_dir']
    except:
        pass
    finally:
        dir_name = os.path.join(base_dir, dir_name)

    if not os.path.exists(dir_name):
        os.makedirs(dir_name)
    return dir_name
Esempio n. 3
0
def fetch_timeseries(symbol, dir_name='data', use_cache=True, from_year=1900):
    """
    Read time series data. Use cached version if it exists and
    use_cache is True, otherwise retrive, cache, then read.
    """
    base_dir = ''
    try:
        conf = pf.read_config()
        base_dir = conf['base_dir']
    except:
        pass
    finally:
        dir_name = os.path.join(base_dir, dir_name)

    if not os.path.exists(dir_name):
        os.makedirs(dir_name)

    timeseries_cache = os.path.join(dir_name, symbol + '.csv')

    if os.path.isfile(timeseries_cache) and use_cache:
        pass
    else:
        ts = pdr.DataReader(symbol,
                            'google',
                            start=datetime.datetime(from_year, 1, 1))
        ts.to_csv(timeseries_cache, encoding='utf-8')

    ts = pd.read_csv(timeseries_cache, index_col='Date', parse_dates=True)
    ts = _adj_column_names(ts)
    return ts
Esempio n. 4
0
def _get_cache_dir(dir_name):
    """ returns the path to the cache_dir """
    base_dir = ''
    try:
        conf = pf.read_config()
        base_dir = conf['base_dir']
    except:
        pass
    finally:
        dir_name = os.path.join(base_dir, dir_name)

    if not os.path.exists(dir_name):
        os.makedirs(dir_name)
    return dir_name
Esempio n. 5
0
    def test_read_config(self, mocker):
        ''' Check that the config file is read correctly. '''
        config_path = get_test_config_path()
        config = ConfigParser()

        config["global"] = {}
        config["global"]["global1"] = "1"
        config["global"]["base_dir"] = "some_directory"
        config["global"]["global3"] = "3"

        with open(config_path, "w") as config_file:
            config.write(config_file)

        conf_dict = pf.read_config()
        self.assertTrue(config["global"]["base_dir"] == conf_dict["base_dir"])