def convert(start_date=None, end_date=None, end_time='15:01:00', bar_size='1 min', verbose=False): if start_date is None: start_date = end_date if end_date is None: raise ValueError(f'User must pass at least the end date for data conversion.') date_range = get_date_range(start_date, end_date) for date in date_range: create_csv_dump(date, end_time=end_time, bar_size=bar_size)
def convert(start_date=None, end_date=None): if start_date is None: start_date = end_date if end_date is None: raise ValueError( f'User must pass at least the end date for data conversion.') date_range = get_date_range(start_date, end_date) for date in date_range: create_csv_dump(date)
def metrics(tickers, start_date=None, end_date=None, bar_size='1 min', verbose=False): # todo: input sequence if start_date is None: start_date = end_date if end_date is None: raise ValueError(f'User must pass at least the end date for metrics generation.') if tickers is None: pass # fixme: read cached input date_range = get_date_range(start_date, end_date) for date in date_range: metrics_generator(date, bar_size, tickers)
def metrics(start_date=None, end_date=None, tickers=None): if start_date is None: start_date = end_date if end_date is None: raise ValueError( f'User must pass at least the end date for metrics generation.') if tickers is None: # fixme: to be implemented... pass date_range = get_date_range(start_date, end_date) for date in date_range: generate_extraction_metrics(date)
def download(tickers=None, start_date=None, end_date=None, end_time=None, duration=None, bar_size=None, what_to_show=None, use_rth=None, verbose=False): input_is_a_file = isfile(tickers) if input_is_a_file: tickers = get_tickers_from_user_file(tickers) if start_date is None: start_date = end_date if end_date is None: raise ValueError(f'User must specify at least the end date for data extraction.') date_range = get_date_range(start_date, end_date) for date in date_range: extract_historical_data(tickers=tickers, end_date=date, end_time=end_time, duration=duration, bar_size=bar_size, what_to_show=what_to_show, use_rth=use_rth, verbose=verbose)
def metrics(tickers=None, start_date=None, end_date=None, end_time='15:01:00', verbose=False): if start_date is None: start_date = end_date if end_date is None: raise ValueError( f'User must pass at least the end date for metrics generation.') if tickers is None: pass # fixme: read cached input date_range = get_date_range(start_date, end_date) for date in date_range: generate_extraction_metrics(date, end_time=end_time, input_tickers=tickers)
def extract_historical_data(tickers=None, start_date=None, end_date=None, end_time=None, duration='1 D', bar_size='1 min', what_to_show='TRADES', use_rth=1, date_format=1, keep_upto_date=False, chart_options=(), batch_size=_BATCH_SIZE): """ A wrapper function around HistoricalDataExtractor, that pulls data from TWS for the given tickers. :param tickers: ticker ID (ex: 1301) :param start_date: date from which the extraction is to be started (ex: '20201231') :param end_date: end date (ex: '20210101') :param end_time: end time (ex: '15:00:01') :param duration: the amount of time to go back from end_date_time (ex: '1 D') :param bar_size: valid bar size or granularity of data (ex: '1 min') :param what_to_show: the type of data to retrieve (ex: 'TRADES') :param use_rth: 1 means retrieve data withing regular trading hours, else 0 :param date_format: format for bar data, 1 means yyyyMMdd, 0 means epoch time :param keep_upto_date: setting to True will continue to return unfinished bar data :param chart_options: to be documented :param batch_size: size of each batch as integer, default=30 """ stdout.write(f'\n{"-"*40} Init Extraction {"-"*40}\n') if start_date is None: start_date = end_date if end_date is None: raise ValueError( 'User must at least specify an end date for extraction.') date_range = get_date_range(start_date, end_date) for end_date in date_range: tickers, cache_success, cache_failure = _prep_for_extraction( tickers, end_date, end_time) batches = create_batches(tickers, batch_size) success_files, failure_files = _run_extractor( batches, end_date, end_time, duration, bar_size, what_to_show, use_rth, date_format, keep_upto_date, chart_options, cache_success, cache_failure) _cleanup(success_files, cache_success, failure_files, cache_failure) # stdout.write(f'=> Extraction completed for: {target_date}\n') stdout.write(f'\n{"-"*100}\n\n')