Exemple #1
0
            resample="15s",
            resample_how="last_dropna"
        )

        market = Market(market_data_generator=MarketDataGenerator())

        df = market.fetch_market(md_request=md_request)

        print(df)

        # the second time we call it, if we have Redis installed, we will fetch
        # from memory, so it will be quicker
        # also don"t need to run the resample operation again

        # need to specify cache_algo_return
        md_request.cache_algo = "cache_algo_return"

        df = market.fetch_market(md_request)

        print(df)

    if run_example == 3:
        # In this case we are saving predefined daily tickers to disk, and then
        # reading back
        from findatapy.util.dataconstants import DataConstants
        from findatapy.market.ioengine import IOEngine
        import os

        quandl_api_key = DataConstants().quandl_api_key  # change with your own
        # Quandl API key!
Exemple #2
0
    # in the config file, we can use keywords 'open', 'high', 'low', 'close' and 'volume' for alphavantage data

    # download equities data from alphavantage
    md_request = MarketDataRequest(
        start_date="01 Jan 2002",  # start date
        finish_date="05 Feb 2017",  # finish date
        data_source='alphavantage',  # use alphavantage as data source
        tickers=[
            'Apple', 'Citigroup', 'Microsoft', 'Oracle', 'IBM', 'Walmart',
            'Amazon', 'UPS', 'Exxon'
        ],  # ticker (findatapy)
        fields=['close'],  # which fields to download
        vendor_tickers=[
            'aapl', 'c', 'msft', 'orcl', 'ibm', 'wmt', 'amzn', 'ups', 'xom'
        ],  # ticker (alphavantage)
        vendor_fields=['Close'],  # which alphavantage fields to download
        cache_algo='internet_load_return')

    logger.info("Load data from alphavantage directly")
    df = market.fetch_market(md_request)

    logger.info(
        "Loaded data from alphavantage directly, now try reading from Redis in-memory cache"
    )
    md_request.cache_algo = 'cache_algo_return'  # change flag to cache algo so won't attempt to download via web

    df = market.fetch_market(md_request)

    logger.info("Read from Redis cache.. that was a lot quicker!")
Exemple #3
0
            freq='intraday',
            resample='15s',
            resample_how='last_dropna'
        )

        market = Market(market_data_generator=MarketDataGenerator())

        df = market.fetch_market(md_request=md_request)

        print(df)

        # the second time we call it, if we have Redis installed, we will fetch from memory, so it will be quicker
        # also don't need to run the resample operation again

        # need to specify cache_algo_return
        md_request.cache_algo = 'cache_algo_return'

        df = market.fetch_market(md_request)

        print(df)

    if run_example == 3:
        # In this case we are saving predefined daily tickers to disk, and then reading back
        from findatapy.util.dataconstants import DataConstants
        from findatapy.market.ioengine import IOEngine
        import os

        quandl_api_key = DataConstants().quandl_api_key # change with your own Quandl API key!

        md_request = MarketDataRequest(
            category='fx',
    # and "volume" for alphavantage data

    # Download equities data from yahoo
    md_request = MarketDataRequest(
        start_date="01 Jan 2002",  # start date
        finish_date="05 Feb 2017",  # finish date
        data_source="yahoo",  # use alphavantage as data source
        tickers=["Apple", "Citigroup", "Microsoft", "Oracle", "IBM", "Walmart",
                 "Amazon", "UPS", "Exxon"],  # ticker (findatapy)
        fields=["close"],  # which fields to download
        vendor_tickers=["aapl", "c", "msft", "orcl", "ibm", "wmt", "amzn",
                        "ups", "xom"],  # ticker (yahoo)
        vendor_fields=["Close"],  # which yahoo fields to download
        cache_algo="internet_load_return")

    logger.info("Load data from yahoo directly")
    df = market.fetch_market(md_request)

    print(df)

    logger.info(
        "Loaded data from yahoo directly, now try reading from Redis "
        "in-memory cache")
    md_request.cache_algo = "cache_algo_return"  # change flag to cache algo
    # so won"t attempt to download via web

    df = market.fetch_market(md_request)
    print(df)

    logger.info("Read from Redis cache.. that was a lot quicker!")