async def main():
    init_logger(level=logging.DEBUG)

    exchange_factory = class_builders.get("market").get("binance")()

    desired_fields = ["open_ts", "open", "close_ts"]
    async with exchange_factory.create_data_homogenizer() \
            as binance_homogenizer:
        base_assets = await binance_homogenizer.get_all_base_assets()
        print(f"All the base assets available on the "
              f"Binance exchange are {base_assets}")

        reference_assets = await binance_homogenizer.get_all_refernce_assets()
        print(f"All the reference assets available on the"
              f" Binance exchange are {reference_assets}")

    time_range = {("25 Jan 2018", "27 Feb 2018"): "1d"}
    time_aggregated_data_container = data_container_access.\
        TimeAggregatedDataContainer(
            exchange_factory,
            base_assets=["NANO"],
            reference_assets=["BTC", "USDT"],
            ohlcv_fields=desired_fields,
            time_range_dict=time_range
        )
    xdataarray_of_coins = await time_aggregated_data_container.get_time_aggregated_data_container()
    pprint(xdataarray_of_coins)

    xdataset = xdataarray_of_coins.to_dataset("ohlcv_fields")
    pprint(xdataset)
async def main():
    init_logger(level=logging.DEBUG)

    exchange_factory = class_builders.get("market").get("binance")()

    async with exchange_factory.create_data_homogenizer() \
            as binance_homogenizer:
        base_assets = await binance_homogenizer.get_all_base_assets()

    desired_fields = ["open_ts", "open", "close"]
    candle_type = "1m"
    time_aggregated_data_container = data_container_access.TimeAggregatedDataContainer.create_instance(
        exchange_factory,
        base_assets=base_assets,
        reference_assets=["BTC"],
        ohlcv_fields=desired_fields,
        time_range_dict={("25 Jan 2015", "9 Nov 2020"): candle_type})
    xdataarray_of_coins = await time_aggregated_data_container.get_time_aggregated_data_container(
    )
    type_converter = data_container_post.TypeConvertedData(exchange_factory)
    type_converted_dataarray = type_converter.set_type_on_dataarray(
        xdataarray_of_coins)

    sql_writer = class_builders.get("write_to_disk").get("sqlite")()
    with tempfile.NamedTemporaryFile(suffix=".db") as temp_db:
        save_to_disk.write_coin_history_to_file(type_converted_dataarray,
                                                sql_writer, temp_db.name,
                                                ["open", "close"])
async def main():
    init_logger(level=logging.DEBUG)

    exchange_factory = class_builders.get("market").get("binance")()

    desired_fields = ["open_ts", "open", "close_ts"]
    time_aggregated_data_container = data_container_access.TimeAggregatedDataContainer(
        exchange_factory,
        base_assets=["NANO", "AMB", "XRP"],
        reference_assets=["BTC", "USDT"],
        ohlcv_fields=desired_fields,
        time_range_dict={("25 Jan 2018", "27 Feb 2018"): "1d",
                         ("26 Aug 2020", "now"):         "1w"}
    )
    xdataarray_of_coins = await time_aggregated_data_container.get_time_aggregated_data_container()
    pprint(xdataarray_of_coins)

    xdataset = xdataarray_of_coins.to_dataset("ohlcv_fields")
    pprint(xdataset)

    type_converter = data_container_post.TypeConvertedData(exchange_factory)
    type_converted_dataset = type_converter.set_type_on_dataset(xdataset)

    type_converted_dataarray = type_converter.set_type_on_dataarray(xdataarray_of_coins)

    incomplete_data_handle = data_container_post.HandleIncompleteData()
    entire_na_removed_dataarray = incomplete_data_handle.\
        drop_xarray_coins_with_entire_na(type_converted_dataarray)

    entire_na_removed_dataset = incomplete_data_handle.\
        drop_xarray_coins_with_entire_na(type_converted_dataset)

    strict_na_dropped_dataarray = incomplete_data_handle.\
        nullify_incomplete_data_from_dataarray(entire_na_removed_dataarray)
    pprint(strict_na_dropped_dataarray)

    strict_na_dropped_dataset = incomplete_data_handle.\
        nullify_incomplete_data_from_dataset(entire_na_removed_dataset)
    pprint(strict_na_dropped_dataset)
Esempio n. 4
0
def main():
    init_logger(logging.DEBUG)
    overall_start = datetime(day=25, month=8, year=2018)
    overall_end = datetime(day=18, month=11, year=2020)
    reference_coin = "BTC"
    ohlcv_field = "open"
    candle = "1h"
    interval = "1d"
    data_source_general = "sqlite"
    data_source_specific = "binance"

    time_interval_iterator = TimeIntervalIterator(overall_start,
                                                  overall_end,
                                                  interval,
                                                  forward_in_time=False,
                                                  increasing_range=False)

    table_name_list = [
        f"COIN_HISTORY_{ohlcv_field}_{reference_coin}_1d",
        f"COIN_HISTORY_{ohlcv_field}_{reference_coin}_1h"
    ]

    sqlite_access_creator = class_builders.get("access_xarray").get(
        data_source_general)()

    full_dataarray = store_largest_xarray(
        sqlite_access_creator,
        overall_start=overall_start,
        overall_end=overall_end,
        candle=candle,
        reference_coin=reference_coin,
        ohlcv_field=ohlcv_field,
        file_path=
        "/Users/vikram/Documents/Personal/s3_sync/25_Jan_2017_TO_23_May_2021_BTC_1h_1d.db",
        mapped_class=OversoldCoins,
        table_name_list=table_name_list)

    source_iterators = ManualSourceIterators()
    success_iterators = ManualSuccessIterators()

    iterators = {
        "time":
        time_interval_iterator,
        "source": [source_iterators.high_cutoff, source_iterators.low_cutoff],
        "success":
        [success_iterators.percentage_increase, success_iterators.days_to_run],
        "target": [
            "percentage_of_bought_coins_hit_target",
            "end_of_run_value_of_bought_coins_if_not_sold",
            "end_of_run_value_of_bought_coins_if_sold_on_target"
        ],
        "strategy": [MarketBuyLimitSellIndicatorCreator]
    }

    pickled_potential_path = str(
        pathlib.Path(__file__).parents[4] / "s3_sync" / "staging" /
        "1d_2018-07-01_2021-05-20_potential_coins_overall.pickle")

    gather_items = gather_overall.GatherIndicator(
        full_dataarray,
        reference_coin,
        ohlcv_field,
        iterators,
        potential_coin_path=pickled_potential_path,
    )
    narrowed_start = datetime(day=25, month=8, year=2018)
    narrowed_end = datetime(day=17, month=11, year=2020)

    collective_ds = gather_items.overall_individual_indicator_calculator(
        narrowed_start, narrowed_end)
    with open(
            pathlib.Path(
                pathlib.Path(__file__).parents[2] / "common_db" /
                f"success_results_{interval}_"
                f"{narrowed_start.strftime('%d-%b-%Y')}_"
                f"{narrowed_end.strftime('%d-%b-%Y')}"), "wb") as fp:
        pickle.dump(collective_ds, fp)
def main():
    init_logger(logging.DEBUG)
    overall_start = datetime(day=25, month=8, year=2018)
    overall_end = datetime(day=20, month=5, year=2021)
    reference_coin = "BTC"
    ohlcv_field = "open"
    candle = "1h"
    interval = "300d"
    data_source_general = "sqlite"
    data_source_specific = "binance"

    time_interval_iterator = TimeIntervalIterator(overall_start,
                                                  overall_end,
                                                  interval,
                                                  forward_in_time=False,
                                                  increasing_range=False)

    table_name_list = [f"COIN_HISTORY_{ohlcv_field}_{reference_coin}_1d",
                       f"COIN_HISTORY_{ohlcv_field}_{reference_coin}_1h"]

    sqlite_access_creator = class_builders.get("access_xarray").get(data_source_general)()
    file_path = str(pathlib.Path(__file__).parents[4] /
                    "s3_sync" /
                    "25_Jan_2017_TO_23_May_2021_BTC_1h_1d.db")
    full_dataarray_dict = store_largest_xarray(sqlite_access_creator,
                                               overall_start=overall_start,
                                               overall_end=overall_end,
                                               candle=candle,
                                               reference_coin=reference_coin,
                                               ohlcv_field=ohlcv_field,
                                               file_path=file_path,
                                               mapped_class=OversoldCoins,
                                               table_name_list=table_name_list)

    source_iterators = ManualSourceIterators()
    success_iterators = ManualSuccessIterators()

    iterators = {"time": time_interval_iterator,
                 "source": [
                     source_iterators.cutoff_mean,
                     source_iterators.cutoff_deviation,
                     source_iterators.max_coins_to_buy
                 ],
                 "success": [
                     success_iterators.percentage_increase,
                     success_iterators.percentage_reduction,
                     success_iterators.days_to_run,
                     success_iterators.stop_price_sell,
                     success_iterators.limit_sell_adjust_trail
                 ],
                 "target": [
                     "calculate_end_of_run_value"
                 ],
                 "strategy":
                     [
                         MarketBuyTrailingSellSimulationCreator
                     ]
                 }

    pickled_potential_path = str(pathlib.Path(__file__).parents[4] /
                                 "s3_sync" /
                                 "staging" /
                                 "1d_2018-07-01_2021-05-20_potential_coins_overall.pickle")

    gather_items = gather_overall.GatherSimulation(
        full_dataarray_dict,
        reference_coin,
        ohlcv_field,
        iterators,
        potential_coin_path=pickled_potential_path,
    )

    # pickled_potential_path = str(pathlib.Path(pathlib.Path(__file__).resolve().parents[3] /
    #                                           "common_db" /
    #                                           f"1h_2018_to_2020_potential_coins.pickled"))
    narrowed_start = datetime(day=25, month=8, year=2018)
    narrowed_end = datetime(day=20, month=5, year=2021)

    collective_ds = gather_items.simulation_calculator(narrowed_start,
                                                       narrowed_end,
                                                       )
    with open("/Users/vikram/Documents/Personal/s3_sync/result_temp_1",
              "wb") as fp:
        pickle.dump(collective_ds, fp)
def main():
    init_logger(logging.DEBUG)
    overall_start = datetime(day=25, month=8, year=2018)
    overall_end = datetime(day=20, month=5, year=2021)
    reference_coin = "BTC"
    ohlcv_field = "open"
    candle = "1h"
    data_source_general = "sqlite"
    data_source_specific = "binance"

    table_name_list = [
        f"COIN_HISTORY_{ohlcv_field}_{reference_coin}_1d",
        f"COIN_HISTORY_{ohlcv_field}_{reference_coin}_1h"
    ]

    sqlite_access_creator = class_builders.get("access_xarray").get(
        data_source_general)()

    full_history_da_dict = store_largest_xarray(
        sqlite_access_creator,
        overall_start=overall_start,
        overall_end=overall_end,
        candle=candle,
        reference_coin=reference_coin,
        ohlcv_field=ohlcv_field,
        file_path=
        "/Users/vikram/Documents/Personal/s3_sync/25_Jan_2017_TO_23_May_2021_BTC_1h_1d.db",
        mapped_class=OversoldCoins,
        table_name_list=table_name_list)

    source_iterators = ManualSourceIterators()

    interval = "1d"
    time_interval_iterator = TimeIntervalIterator(overall_start,
                                                  overall_end,
                                                  interval,
                                                  forward_in_time=False,
                                                  increasing_range=False)

    iterators = {
        "time": time_interval_iterator,
        "source": [source_iterators.high_cutoff, source_iterators.low_cutoff],
        "success": [],
        "target": [],
        "strategy": []
    }

    gather_items = gather_overall.GatherPotential(
        full_history_da_dict,
        reference_coin,
        ohlcv_field,
        iterators,
        potential_coin_path=None,
    )

    narrowed_start = datetime(day=1, month=7, year=2018)
    narrowed_end = datetime(day=20, month=5, year=2021)
    gather_items.store_potential_coins_pickled(pickled_file_path=str(
        pathlib.Path(
            pathlib.Path(__file__).parents[4] / "s3_sync" / "staging" /
            f"{interval}_"
            f"{narrowed_start.year}-{narrowed_start.month}-{narrowed_start.day}_"
            f"{narrowed_end.year}-{narrowed_end.month}-{narrowed_end.day}_"
            f"potential_coins_overall.pickle")),
                                               narrowed_start_time=
                                               narrowed_start,
                                               narrowed_end_time=narrowed_end)