def do_busca_carteira(): from src.crawler_cei import CrawlerCei crawler_cei = CrawlerCei(headless=True) df_carteira = crawler_cei.busca_carteira(data=data_referencia) df_to_csv(df_carteira, WORK_DIR + nome_com_referencia('carteira_cei.txt'), colunas=df_carteira.columns, header=True)
def test_busca_carteira(self): # Permite separar os arquivos de testes por CPF directory = os.getenv('DIR_TESTES_CEI','./temp/tests_cei_carteira/') + os.environ['CPF'] + '/' if not os.path.exists(directory): os.makedirs(directory) crawler_cei = CrawlerCei(headless=True, directory=directory, debug=True) carteira = crawler_cei.busca_carteira(datetime.date(2020,12,31)) verifica_carteira(carteira, directory)
def do_salva_carteira(filepath): from src.crawler_cei import CrawlerCei crawler_cei = CrawlerCei(headless=True) with open(filepath, 'r') as file: df_carteira = crawler_cei.converte_html_carteira(file.read()) df_to_csv(df_carteira, WORK_DIR + nome_com_referencia('carteira_cei.txt'), colunas=df_carteira.columns, header=True)
def test_busca_carteira_html(self): # Permite separar os arquivos de testes por CPF directory = os.getenv('DIR_TESTES_CEI','./temp/tests_cei_carteira/') + os.environ['CPF'] + '/' if not os.path.exists(directory): os.makedirs(directory) crawler_cei = CrawlerCei(headless=True, directory=directory, debug=True) with open(HTML_CARTEIRA) as file: carteira = crawler_cei.converte_html_carteira(file.read()) self.verifica_carteira(carteira, directory)
def test_busca_trades(self): directory = '../public/' import os if not os.path.exists(directory): os.makedirs(directory) crawler_cei = CrawlerCei(headless=True, directory=directory, debug=True) trades = crawler_cei.busca_trades() assert type(trades) is pd.DataFrame assert len(trades)
def do_busca_trades_e_faz_merge_operacoes(): from src.crawler_cei import CrawlerCei crawler_cei = CrawlerCei(headless=True) df_cei = crawler_cei.busca_trades() from src.dropbox_files import download_dropbox_file download_dropbox_file() df = get_operations_dataframe() df = merge_operacoes(df, df_cei) df_to_csv(df, OPERATIONS_FILEPATH) upload_dropbox_file(OPERATIONS_FILEPATH, os.environ['DROPBOX_FILE_LOCATION'])
def do_busca_trades_e_faz_merge_operacoes(): from src.crawler_cei import CrawlerCei crawler_cei = CrawlerCei(headless=True) df_cei = crawler_cei.busca_trades() from src.dropbox_files import download_dropbox_file download_dropbox_file(OPERATIONS_FILEPATH) df = get_operations(filepath=OPERATIONS_FILEPATH) df = merge_operacoes(df, df_cei) df_to_csv(df, OPERATIONS_FILEPATH) upload_dropbox_file(OPERATIONS_FILEPATH, None)
def test_busca_trades(self): # Permite separar os arquivos de testes por CPF directory = os.getenv('DIR_TESTES_CEI','./temp/tests_cei_trades/') + os.environ['CPF'] + '/' if not os.path.exists(directory): os.makedirs(directory) crawler_cei = CrawlerCei(headless=True, directory=directory, debug=True) trades = crawler_cei.busca_trades(dropExtras=False) assert type(trades) is pd.DataFrame assert len(trades) print(trades.columns) assert 'valor' in trades.columns assert 'data' in trades.columns assert 'ticker' in trades.columns assert 'taxas' in trades.columns assert 'operacao' in trades.columns assert 'aquisicao_via' in trades.columns assert 'qtd_ajustada' in trades.columns assert 'qtd' in trades.columns # Grava arquivo com todos os dados sem remover colunas adicionais para facilitar analise with open(directory + "todos_trades.txt", "w") as file: file.write(tabulate(trades, headers=trades.columns, showindex=True, tablefmt='psql'))