def ScreenerView(self, limit=-1, verbose=1): """Get screener table. Args: verbose(int): choice of visual the progress. 1 for visualize progress. Returns: tickers(list): get all the tickers as list. """ soup = webScrap(self.url) page = self._get_page(soup) if page == 0: if verbose == 1: print('No ticker found.') return None if limit != -1: if page > (limit - 1) // 1000 + 1: page = (limit - 1) // 1000 + 1 if verbose == 1: progressBar(1, page) tickers = [] tickers = self._screener_helper(0, page, soup, tickers, limit) for i in range(1, page): if verbose == 1: progressBar(i + 1, page) soup = webScrap(self.url + '&r={}'.format(i * 1000 + 1)) tickers = self._screener_helper(i, page, soup, tickers, limit) return tickers
def ScreenerView(self, order='ticker', limit=-1, verbose=1, ascend=True): """Get screener table. Args: order(str): sort the table by the choice of order. limit(int): set the top k rows of the screener. verbose(int): choice of visual the progress. 1 for visualize progress. ascend(bool): if True, the order is ascending. Returns: df(pandas.DataFrame): screener information table """ url = self.url if order != 'ticker': if order not in self.order_dict: raise ValueError() url = self.url+'&'+self.order_dict[order] if not ascend: url = url.replace('o=', 'o=-') soup = webScrap(url) page = self._get_page(soup) if page == 0: print('No ticker found.') return None if limit != -1: if page > (limit-1)//20+1: page = (limit-1)//20+1 if verbose == 1: progressBar(1, page) table = soup.findAll('table')[18] rows = table.findAll('tr') table_header = [i.text for i in rows[0].findAll('td')][1:] num_col_index = [table_header.index(i) for i in table_header if i in NUMBER_COL] df = pd.DataFrame([], columns=table_header) df = self._screener_helper(0, page, rows, df, num_col_index, table_header, limit) for i in range(1, page): if verbose == 1: progressBar(i+1, page) url = self.url if order == 'ticker': url += '&r={}'.format(i * 20 + 1) else: url += '&r={}'.format(i * 20 + 1) + '&' + self.order_dict[order] if not ascend: url = url.replace('o=', 'o=-') soup = webScrap(url) table = soup.findAll('table')[18] rows = table.findAll('tr') df = self._screener_helper(i, page, rows, df, num_col_index, table_header, limit) return df