def test_basic_but_ticks(self): exporter = Exporter() start_date = datetime(2015, 1, 1) end_date = datetime(2016, 1, 1) got_daily = exporter.download(SBER.id, Market.SHARES, start_date=start_date, end_date=end_date, timeframe=Timeframe.DAILY) daily_count = len(got_daily) assert daily_count > 0 got_minutes = exporter.download(SBER.id, Market.SHARES, start_date=start_date, end_date=end_date, timeframe=Timeframe.MINUTES30) minutes30_count = len(got_minutes) assert minutes30_count > daily_count * SHARES_SESSION_MINUTES / 30 for got in (got_daily, got_minutes): assert got.index.min().to_datetime() >= start_date assert got.index.max().to_datetime() <= end_date assert '<LAST>' not in got.columns assert '<CLOSE>' in got.columns
def get_shares(code=None, start_date=None, end_date=None): if code: code = code.upper() exporter = Exporter() # все акции в пандас таблице из финама items = exporter.lookup(market=Market.SHARES) share_id = items[items.code == code] if share_id.empty: return None else: share_id = share_id.index[0] temp = exporter.download( share_id, Market.SHARES, # акции start_date=start_date, end_date=end_date, timeframe=Timeframe.DAILY) temp.reset_index(inplace=True) temp.columns = [x[1:-1].lower() for x in temp.columns] temp.rename(columns={'nde': 'timestamp'}, inplace=True) return temp
def main(): logging.basicConfig(level=logging.DEBUG) exporter = Exporter() for market in Market: print('{0.name:*^25}'.format(market)) items = exporter.lookup(market=market) print('Total items: {}'.format(len(items))) print('Sample: {}'.format(', '.join(items['code'][:SAMPLE_SIZE])))
def test_ticks(self): exporter = Exporter() ticks_date = datetime(2016, 10, 27) got = exporter.download(SBER.id, Market.SHARES, start_date=ticks_date, end_date=ticks_date, timeframe=Timeframe.TICKS) assert len(got) > SHARES_SESSION_MINUTES * 60 assert got.index.min().to_pydatetime() >= ticks_date assert got.index.min().to_pydatetime() < ticks_date + timedelta(days=1) assert '<LAST>' in got.columns assert '<CLOSE>' not in got.columns
def test_ticks(self): exporter = Exporter() ticks_date = datetime(2016, 10, 27) got = exporter.download(SBER.id, Market.SHARES, start_date=ticks_date, end_date=ticks_date, timeframe=Timeframe.TICKS) assert len(got) > SHARES_SESSION_MINUTES * 60 assert got['<DATE>'].min() >= 20161027 assert got['<DATE>'].max() < 20161027 + 1 assert '<LAST>' in got.columns assert '<CLOSE>' not in got.columns
def main(): exporter = Exporter() print('*** Looking up all RTS futures codes ***') res = exporter.lookup( market=[Market.FUTURES_ARCHIVE, Market.FUTURES], name='RTS-', name_comparator=LookupComparator.STARTSWITH) print(','.join(res['code'])) print('*** Looking up Russian Ministry of Finance\'s bonds ***') print(exporter.lookup(market=Market.BONDS, name=u'ОФЗ', name_comparator=LookupComparator.STARTSWITH)) print('*** Looking up Microsoft ***') print(exporter.lookup(market=Market.USA, name='Microsoft', name_comparator=LookupComparator.CONTAINS))
def main(contracts, market, timeframe, destdir, lineterm, delay, startdate, enddate, skiperr, ext): exporter = Exporter() if not any((contracts, market)): raise click.BadParameter('Neither contracts nor market is specified') market_filter = dict() if market: market_filter.update(market=Market[market]) if not contracts: contracts = exporter.lookup(**market_filter)['code'].tolist() for contract_code in contracts: logging.info('Handling {}'.format(contract_code)) try: contracts = exporter.lookup(code=contract_code, **market_filter) except FinamObjectNotFoundError: logger.error('unknown contract "{}"'.format(contract_code)) sys.exit(1) else: contract = contracts.reset_index().iloc[0] logger.info(u'Downloading contract {}'.format(contract)) try: data = exporter.download(contract.id, start_date=startdate, end_date=enddate, timeframe=Timeframe[timeframe], market=Market(contract.market)) except FinamExportError as e: if skiperr: logger.error(repr(e)) continue else: raise destpath = os.path.join(destdir, '{}-{}.{}' .format(contract.code, timeframe, ext)) data.to_csv(destpath, index=False, line_terminator=lineterm) if delay > 0: logger.info('Sleeping for {} second(s)'.format(delay)) time.sleep(delay)
def main(contract, market): exporter = Exporter() if all((contract, market)): raise click.BadParameter('Either contract or market must be specified') elif not any((contract, market)): raise click.BadParameter('Neither contract nor market is specified') pd.options.display.max_rows = 1000 if contract: try: meta = exporter.lookup(code=contract) except FinamObjectNotFoundError: logger.info('No such contract') else: print(meta) else: contracts = exporter.lookup(market=Market[market]) print(contracts)
def main(contracts, market, timeframe, destdir, lineterm, delay, startdate, enddate, skiperr): exporter = Exporter() if all((contracts, market)): raise click.BadParameter('Either contracts or ' 'market must be specified') elif not any((contracts, market)): raise click.BadParameter('Neither contracts nor market is specified') elif market: contracts = exporter.lookup(market=Market[market])['code'].tolist() for contract_code in contracts: logging.info('Handling {}'.format(contract_code)) try: contracts = exporter.lookup(code=contract_code) except FinamObjectNotFoundError: raise RuntimeError('unknown contract "{}"'.format(contract_code)) else: contract = contracts.reset_index().iloc[0] logger.info(u'Downloading contract {}'.format(contract)) try: data = exporter.download(contract.id, start_date=startdate, end_date=enddate, timeframe=Timeframe[timeframe], market=Market(contract.market)) except FinamExportError as e: if skiperr: logger.error(e.message) continue else: raise destpath = os.path.join(destdir, '{}-{}.csv'.format(contract.code, timeframe)) data.to_csv(destpath, line_terminator=lineterm) if delay > 0: logger.info('Sleeping for {} second(s)'.format(delay)) time.sleep(delay)
def download_contract_data(contract: ContractSpec, timeframe: Timeframe) -> pd.DataFrame: exporter = Exporter() lookup_df = exporter.lookup( name=f"{contract.full_code}({contract.short_code})", name_comparator=LookupComparator.EQUALS, ) if len(lookup_df.index) != 1: raise ValueError( f"Contract lookup failed. Returned {len(lookup_df.index)} rows. " f"Names: {', '.join(lookup_df['name'])}") today = datetime.date.today() start_date = contract.expiration - relativedelta(months=3, day=1) end_date = contract.expiration + relativedelta(months=1, day=1) if end_date > today: end_date = today df = exporter.download( lookup_df.index[0], Market(lookup_df["market"].iloc[0]), timeframe=timeframe, start_date=start_date, end_date=end_date, ) df = df.rename( columns={ "<OPEN>": "open", "<HIGH>": "high", "<LOW>": "low", "<CLOSE>": "close", "<VOL>": "volume", }) df.insert(0, "contract", contract.full_code) return df
def main(): exporter = Exporter() print('*** Current Russian ruble exchange rates ***') rub = exporter.lookup(name='USDRUB_TOD', market=Market.CURRENCIES) assert len(rub) == 1 data = exporter.download(rub.index[0], market=Market.CURRENCIES) print(data.tail(1)) print('*** Current Brent Oil price ***') oil = exporter.lookup(name='Brent', market=Market.COMMODITIES, name_comparator=LookupComparator.EQUALS) assert len(oil) == 1 data = exporter.download(oil.index[0], market=Market.COMMODITIES) print(data.tail(1))
def setup(self): super(TestExporter, self).setup() self.mock_meta.return_value = fixtures.meta_valid__split self.exporter = Exporter()
class TestExporter(MockedExporterMixin, MockedMetaMixin): def setup(self): super(TestExporter, self).setup() self.mock_meta.return_value = fixtures.meta_valid__split self.exporter = Exporter() def test_results_except_ticks(self): for timeframe in (Timeframe.DAILY, Timeframe.MINUTES30, Timeframe.MONTHLY): fixture = 'data_sber_{}'.format(timeframe.name.lower()) self.mock_exporter.return_value = getattr(fixtures, fixture) got = self.exporter.download(SBER.id, Market.SHARES, timeframe=timeframe) assert got.index[1] - got.index[0] > datetime.timedelta(0) assert got.columns.equals( pd.Index(['<OPEN>', '<HIGH>', '<LOW>', '<CLOSE>', '<VOL>'])) assert got.sort_index().equals(got) def test_results_ticks(self): self.mock_exporter.return_value = fixtures.data_sber_ticks got = self.exporter.download(SBER.id, Market.SHARES, timeframe=Timeframe.TICKS) assert got.index[1] - got.index[0] == datetime.timedelta(0) assert got.columns.equals( pd.Index(['<TICKER>', '<PER>', '<LAST>', '<VOL>'])) # we need a stable sorting algorithm here assert got.sort_index(kind='mergesort').equals(got) def test_timeframe_too_long(self): self.mock_exporter.return_value = ('some noise\n\n' + Exporter.ERROR_TOO_MUCH_WANTED + ' noise\n').encode(FINAM_CHARSET) with assert_raises(FinamTooLongTimeframeError): self.exporter.download(SBER.id, Market.SHARES) def test_sanity_checks(self): self.mock_exporter.return_value = 'any\nstring'.encode(FINAM_CHARSET) with assert_raises(FinamParsingError): self.exporter.download(SBER.id, Market.SHARES) self.mock_exporter.return_value = ( '<html><h1>Forbidden: Access is denied</h1></html>') with assert_raises(FinamThrottlingError): self.exporter.download(SBER.id, Market.SHARES) @mock.patch('finam.export.pd.read_csv', return_value=pd.DataFrame()) def test_remote_calls(self, read_csv_mock): # any valid data would do in this mock self.mock_exporter.return_value = fixtures.data_sber_daily url_pattern = 'http://export.finam.ru/table.csv?sep=3&at=1&e=.csv&d=d&f=table&dtf=1&MSOR=0&tmf=3&mstimever=1&mstime=on&sep2=1&em=3&code=SBER&cn=SBER&df=27&yf=2016&dt=27&datf={datf}&yt=2016&market=1&mf=9&mt=9&p={timeframe}' # noqa start_date = datetime.date(2016, 10, 27) end_date = datetime.date(2016, 10, 27) for timeframe in Timeframe: datf = timeframe == Timeframe.TICKS and 6 or 5 expected = url_pattern.format(timeframe=timeframe.value, datf=datf) self.exporter.download(SBER.id, Market.SHARES, start_date, end_date, timeframe) self.mock_exporter.assert_called_once() assert urls_equal(expected, self.mock_exporter.call_args[0][0]) self.mock_exporter.reset_mock()
def get_asset_data(assetCode): exporter = Exporter() ind = exporter.lookup(code=assetCode) assetId = ind.index.values[0] assetName, assetCode, market = ind.values[0] return assetId, assetName, market
def get_data_by_code(assetCode, start_date=datetime.date(2008, 1, 1), end_date=None, timeframe=Timeframe.DAILY): '''gets finam historical data only bue assetCode''' ts = 2 assetId, assetName, market = get_asset_data(assetCode) print("assetId:{}, assetName:{}, market:{}".format(assetId, assetName, market)) exporter = Exporter() if timeframe >= Timeframe.DAILY: print('download all') data = exporter.download(assetId, market=Market(market), start_date=start_date, end_date=end_date, timeframe=timeframe) data.columns = [ col.replace("<", "").replace(">", "").lower() for col in data.columns ] return data elif timeframe > Timeframe.TICKS: print("timeframe is {}, download by days".format(timeframe)) dates = exporter.download(assetId, market=Market(market), start_date=start_date, end_date=end_date, timeframe=Timeframe.DAILY).index years = dates.year.unique() downloaded_list = [] counter = 0 for year in years: y_dates = dates[dates.year == year] date_start = datetime.date(y_dates[0].year, y_dates[0].month, y_dates[0].day) date_end = datetime.date(y_dates[-1].year, y_dates[-1].month, y_dates[-1].day) print(date_start, date_end) downloaded_list.append( exporter.download(assetId, market=Market(market), start_date=date_start, end_date=date_end, timeframe=timeframe)) counter += 1 if counter == 3: print('pause {} sec'.format(ts)) time.sleep(ts) counter = 0 data = pd.concat(downloaded_list) data.columns = [ col.replace("<", "").replace(">", "").lower() for col in data.columns ] return data elif timeframe == Timeframe.TICKS: print("timeframe is {}, download by days".format(timeframe)) dates = exporter.download(assetId, market=Market(market), start_date=start_date, end_date=end_date, timeframe=Timeframe.DAILY).index time.sleep(ts) downloaded_list = [] counter = 0 for d in dates: date = (datetime.date(d.year, d.month, d.day)) print(date) downloaded_list.append( exporter.download(assetId, market=Market(market), start_date=date, end_date=date, timeframe=timeframe)) counter += 1 if counter == 3: print('pause {} sec'.format(ts)) time.sleep(ts) counter = 0 data = pd.concat(downloaded_list) #data.columns = [col.replace("<","").replace(">","").lower() for col in data.columns] return data