def metrics_initializer(m_data: dict, period='2y'): """Metrics Initializer Keyword Arguments: period {str} -- (default: {'2y'}) Returns: list -- downloaded_data, sector_list, index, metrics_file data """ sectors = m_data['Components'] tickers = " ".join(sectors) tickers = index_appender(tickers) all_tickers = tickers.split(' ') if isinstance(period, (list)): period = period[0] # tickers = index_appender(tickers) print(" ") print(f'Fetching Type Composite Index funds for {period}...') data, _ = download_data_indexes(indexes=sectors, tickers=all_tickers, period=period, interval='1d') print(" ") return data, sectors
def metrics_initializer(duration: str = 'short') -> list: """Metrics Initializer Keyword Arguments: duration {str} -- duration of view (default: {'short'}) Returns: list -- data downloaded and sector list """ metrics_file = os.path.join("resources", "sectors.json") if not os.path.exists(metrics_file): return None, [] with open(metrics_file) as m_file: m_data = json.load(m_file) m_file.close() m_data = m_data.get("Correlation") sectors = m_data['tickers'] tickers = " ".join(m_data['tickers']) START = m_data['start'] tickers = index_appender(tickers) all_tickers = tickers.split(' ') date = datetime.now().strftime('%Y-%m-%d') print(" ") print('Fetching Correlation Composite Index funds...') if duration == 'short': START = datetime.today() - timedelta(days=900) START = START.strftime('%Y-%m-%d') data, _ = download_data_indexes(indexes=all_tickers, tickers=tickers, start=START, end=date, interval='1d') print(" ") return data, sectors
def metrics_initializer(period='5y', name='Market Composite Index'): """Metrics Initializer Keyword Arguments: period {str/list} -- duration of view (default: {'5y'}) name {str} -- (default: {'Market Composite Index'}) Returns: list -- data downloaded, sector list """ metrics_file = os.path.join("resources", "sectors.json") if not os.path.exists(metrics_file): print(f"{WARNING}WARNING: '{metrics_file}' not found for " + f"'metrics_initializer'. Failed.{NORMAL_COLOR}") return None, [] with open(metrics_file) as m_file: m_data = json.load(m_file) m_file.close() m_data = m_data.get("Market_Composite") sectors = m_data['tickers'] tickers = " ".join(m_data['tickers']) tickers = index_appender(tickers) all_tickers = tickers.split(' ') if isinstance(period, (list)): period = period[0] print(" ") print(f'Fetching {name} funds for {period}...') data, _ = download_data_indexes(indexes=all_tickers, tickers=tickers, period=period, interval='1d') print(" ") return data, sectors
def init_script(config: dict, **kwargs) -> list: """Init Script Arguments: config {dict} -- startup config object to control application Optional Args: release {str} -- 'dev' or 'prod' (default: {'prod'}) Returns: list -- script: dataset, funds, periods, config """ release = kwargs.get('release', 'prod') if release == 'dev': config['process_steps'] = PROCESS_STEPS_DEV elif release == 'prod': config['process_steps'] = PROCESS_STEPS_PROD if config['release'] == True: # Use only after release! print(" ") if release == 'dev': message = f"{HEADER_COLOR}~~~~ DEVELOPMENT VERSION ~~~~ " message += f"[latest functionality, 'unclean' version]{NORMAL_COLOR}" print(message) config = start_header(update_release=_DATE_REVISION_DEV_, version=_DEV_VERSION_, options=True) config['process_steps'] = PROCESS_STEPS_DEV else: if release == 'prod': print( f"{PROD_COLOR}~~~~ PRODUCTION ENVIRONMENT ~~~~{NORMAL_COLOR}") print(" ") if config['state'] == 'halt': return if 'function' in config['state']: # If only simple functions are desired, they go into this handler only_functions_handler(config) return if 'no_index' not in config['state']: config['tickers'] = index_appender(config['tickers']) config['process_steps'] = config['process_steps'] + 2 if 'debug' in config['state']: print(f"{HEADER_COLOR}~~~~ DEBUG MODE ENABLED ~~~~ {NORMAL_COLOR}") # Temporary directories to save graphs as images, etc. remove_temp_dir() configure_temp_dir() dataset, funds, periods, config = download_data_all(config=config) for period in dataset: e_check = {'tickers': config['tickers']} if has_critical_error(dataset[period], 'download_data', misc=e_check): return None return dataset, funds, periods, config