Beispiel #1
0
    if data_vendor == 'ncfx':
        from tcapy.data.databasepopulator import DatabasePopulatorNCFX as DatabasePopulator

        tickers = constants.ncfx_tickers
    elif data_vendor == 'dukascopy':
        from tcapy.data.databasepopulator import DatabasePopulatorDukascopy as DatabasePopulator

        tickers = constants.dukascopy_tickers

    # Example of manually specifying _tickers
    # tickers = {'EURUSD' : 'EURUSD', 'GBPUSD': 'GBPUSD', 'USDCAD': 'USDCAD', 'NZDUSD': 'NZDUSD', 'USDCHF' : 'USDCHF',
    #            'USDJPY' : 'USDJPY'}

    db_populator = DatabasePopulator(
        temp_data_folder=temp_data_folder,
        temp_large_data_folder=temp_large_data_folder,
        tickers=tickers)

    # Writes a CSV/Parquet to disk from data vendor (does not attempt to write anything to the database)
    # Will also dump temporary HDF5 files to disk (to avoid reloading them)
    msg, df_dict = db_populator.download_to_csv(
        start_date_csv,
        finish_date_csv,
        tickers,
        split_size=split_size,
        csv_folder=csv_folder,
        return_df=False,
        remove_duplicates=False,
        write_large_csv=write_large_csv,
        write_large_hdf5_parquet=write_large_hdf5_parquet)
# }

chunk_int_min_dict = {
    'dukascopy': None,
    'ncfx': 60
}  # number of minutes to download from data vendor (eg. 5 minutes)

########################################################################################################################
folder = Constants().test_data_harness_folder

#### change for your data vendor
data_vendor_name_list = ['ncfx', 'dukascopy']

database_populator_dict = {
    'dukascopy': DatabasePopulatorDukascopy(),
    'ncfx': DatabasePopulatorNCFX()
}
database_source_dict = {
    'dukascopy': DatabaseSourceDukascopy(),
    'ncfx': DatabaseSourceNCFX()
}

if constants.ncfx_url is None:
    data_vendor_name_list.remove('ncfx')

####

trade_order_list = ['trade_df', 'order_df']

sql_trade_order_mapping = OrderedDict([
    ('trade_df',
web_proxies = {'https' : None}

# web_proxies = {
#     'http' : "http://127.0.0.1:8080",
#     'https' : "https://127.0.0.1:7000",
# }

chunk_int_min_dict = {'dukascopy' : None, 'ncfx' : 60} # number of minutes to download from data vendor (eg. 5 minutes)

########################################################################################################################
folder = Constants().test_data_harness_folder

#### Change for your data vendor
data_vendor_name_list = ['dukascopy'] # ['ncfx', 'dukascopy']

database_populator_dict = {'dukascopy' : DatabasePopulatorDukascopy(), 'ncfx' : DatabasePopulatorNCFX()}
database_source_dict = {'dukascopy' : DatabaseSourceDukascopy(), 'ncfx' : DatabaseSourceNCFX()}

if constants.ncfx_url is None and 'ncfx' in data_vendor_name_list:
    data_vendor_name_list.remove('ncfx')

if constants.ncfx_url is not None and 'ncfx' in data_vendor_name_list:
    if len(constants.ncfx_url) < 10:
        data_vendor_name_list.remove('ncfx')

invalid_start_date = '01 Jan 1999'
invalid_finish_date = '01 Feb 1999'

use_multithreading = False

def test_fetch_market_data_from_data_vendor():
Beispiel #4
0
    if data_vendor == 'ncfx':
        from tcapy.data.databasepopulator import DatabasePopulatorNCFX as DatabasePopulator

        tickers = constants.ncfx_tickers
        data_store = 'arctic-ncfx'

    elif data_vendor == 'dukascopy':
        from tcapy.data.databasepopulator import DatabasePopulatorDukascopy as DatabasePopulator

        tickers = constants.dukascopy_tickers
        data_store = 'arctic-dukascopy'

    db_populator = DatabasePopulator(
        temp_data_folder=temp_data_folder,
        temp_large_data_folder=temp_large_data_folder,
        tickers=tickers,
        data_store=data_store)

    # The first time we are downloading data from the data vendor
    if FIRST_DOWNLOAD:
        # WARNING: for very first time we download choose the specified dates
        # you may need to change the delete cached files temporarily on disk
        msg = db_populator.download_from_external_source(
            append_data=True,
            start_date=start_date,
            finish_date=finish_date,
            remove_duplicates=remove_duplicate_follow_on,
            delete_cached_files=delete_cached_files,
            write_to_disk_db=True,
            if_exists_ticker='replace',