def main(): api_key = os.environ.get('QUANDL_API_KEY') start_date = '2014-1-1' end_date = '2015-1-1' symbols = 'AAPL', 'BRK_A', 'MSFT', 'ZEN' url = format_table_query( api_key=api_key, start_date=start_date, end_date=end_date, symbols=symbols ) print('Fetching equity data from %s' % url) response = requests.get(url) response.raise_for_status() archive_path = zipfile_path('QUANDL_ARCHIVE.zip') print('Writing compressed table to %s' % archive_path) with ZipFile(archive_path, 'w') as zip_file: zip_file.writestr( 'QUANDL_SAMPLE_TABLE.csv', BytesIO(response.content).getvalue(), ZIP_DEFLATED ) print('Writing mock metadata') cols = ( 'file.link', 'file.status', 'file.data_snapshot_time', 'datatable.last_refreshed_time\n', ) row = ( 'https://file_url.mock.quandl', 'fresh', '2017-10-17 23:48:25 UTC', '2017-10-17 23:48:15 UTC\n', ) metadata = ','.join(cols) + ','.join(row) path = zipfile_path('metadata.csv.gz') print('Writing compressed metadata to %s' % path) write_compressed(path, metadata)
def main(): api_key = os.environ.get("QUANDL_API_KEY") start_date = "2014-1-1" end_date = "2015-1-1" symbols = "AAPL", "BRK_A", "MSFT", "ZEN" url = format_table_query(api_key=api_key, start_date=start_date, end_date=end_date, symbols=symbols) print("Fetching equity data from %s" % url) response = requests.get(url) response.raise_for_status() archive_path = zipfile_path("QUANDL_ARCHIVE.zip") print("Writing compressed table to %s" % archive_path) with ZipFile(archive_path, "w") as zip_file: zip_file.writestr( "QUANDL_SAMPLE_TABLE.csv", BytesIO(response.content).getvalue(), ZIP_DEFLATED, ) print("Writing mock metadata") cols = ( "file.link", "file.status", "file.data_snapshot_time", "datatable.last_refreshed_time\n", ) row = ( "https://file_url.mock.quandl", "fresh", "2017-10-17 23:48:25 UTC", "2017-10-17 23:48:15 UTC\n", ) metadata = ",".join(cols) + ",".join(row) path = zipfile_path("metadata.csv.gz") print("Writing compressed metadata to %s" % path) write_compressed(path, metadata)
def main(): start_date = pd.Timestamp('2014') end_date = pd.Timestamp('2015') symbols = 'AAPL', 'MSFT', 'BRK_A', 'ZEN' names = ( 'Apple Inc.', 'Microsoft Corporation', 'Berkshire Hathaway Inc. Class A', 'Zendesk Inc', ) print('Downloading equity data') for sym in symbols: url = format_wiki_url( api_key=None, symbol=sym, start_date=start_date, end_date=end_date, ) print('Fetching from %s' % url) response = requests.get(url) response.raise_for_status() path = zipfile_path(sym) print('Writing compressed data to %s' % path) write_compressed(path, response.content) print('Writing mock metadata') cols = b'dataset_code,name,oldest_available_date,newest_available_date\n' metadata = cols + b'\n'.join( b','.join(map(methodcaller('encode', 'ascii'), ( symbol, name, str(start_date.date()), str(end_date.date())) )) for symbol, name in zip(symbols, names) ) path = zipfile_path('metadata-1') print('Writing compressed data to %s' % path) write_compressed(path, metadata) path = zipfile_path('metadata-2') print('Writing compressed data to %s' % path) write_compressed(path, cols)
def main(): start_date = pd.Timestamp('2014') end_date = pd.Timestamp('2015') symbols = 'AAPL', 'MSFT', 'BRK_A', 'ZEN' names = ( 'Apple Inc.', 'Microsoft Corporation', 'Berkshire Hathaway Inc. Class A', 'Zendesk Inc', ) print('Downloading equity data') for sym in symbols: url = format_wiki_url( api_key=None, symbol=sym, start_date=start_date, end_date=end_date, ) print('Fetching from %s' % url) response = requests.get(url) response.raise_for_status() path = zipfile_path(sym) print('Writing compressed data to %s' % path) write_compressed(path, response.content) print('Writing mock metadata') cols = b'dataset_code,name,oldest_available_date,newest_available_date\n' metadata = cols + b'\n'.join(b','.join( map(methodcaller('encode', 'ascii'), (symbol, name, str(start_date.date()), str(end_date.date())))) for symbol, name in zip(symbols, names)) path = zipfile_path('metadata-1') print('Writing compressed data to %s' % path) write_compressed(path, metadata) path = zipfile_path('metadata-2') print('Writing compressed data to %s' % path) write_compressed(path, cols)