示例#1
0
    def ScreenerView(self, limit=-1, verbose=1):
        """Get screener table.

        Args:
            verbose(int): choice of visual the progress. 1 for visualize progress.
        Returns:
            tickers(list): get all the tickers as list.
        """
        soup = webScrap(self.url)
        page = self._get_page(soup)
        if page == 0:
            print('No ticker found.')
            return None

        if limit != -1:
            if page > (limit - 1) // 1000 + 1:
                page = (limit - 1) // 1000 + 1

        if verbose == 1:
            print('[Info] loading page 1/{} ...'.format(page))

        tickers = []
        tickers = self._screener_helper(0, page, soup, tickers, limit)

        for i in range(1, page):
            if verbose == 1:
                print('[Info] loading page {}/{} ...'.format((i + 1), page))
            soup = webScrap(self.url + '&r={}'.format(i * 1000 + 1))
            tickers = self._screener_helper(i, page, soup, tickers, limit)
        return tickers
示例#2
0
 def __init__(self, option='latest'):
     """initiate module
     """
     if option == 'latest':
         self.soup = webScrap(INSIDER_URL)
     elif option == 'latest buys':
         self.soup = webScrap(INSIDER_URL + '?tc=1')
     elif option == 'latest sales':
         self.soup = webScrap(INSIDER_URL + '?tc=2')
     elif option == 'top week':
         self.soup = webScrap(INSIDER_URL +
                              '?or=-10&tv=100000&tc=7&o=-transactionValue')
     elif option == 'top week buys':
         self.soup = webScrap(INSIDER_URL +
                              '?or=-10&tv=100000&tc=1&o=-transactionValue')
     elif option == 'top week sales':
         self.soup = webScrap(INSIDER_URL +
                              '?or=-10&tv=100000&tc=2&o=-transactionValue')
     elif option == 'top owner trade':
         self.soup = webScrap(INSIDER_URL +
                              '?or=10&tv=1000000&tc=7&o=-transactionValue')
     elif option == 'top owner buys':
         self.soup = webScrap(INSIDER_URL +
                              '?or=10&tv=1000000&tc=1&o=-transactionValue')
     elif option == 'top owner sales':
         self.soup = webScrap(INSIDER_URL +
                              '?or=10&tv=1000000&tc=2&o=-transactionValue')
     elif option.isdigit():
         self.soup = webScrap(INSIDER_URL + '?oc=' + option + '&tc=7')
     self.df = None
示例#3
0
    def ScreenerView(self, order='ticker', limit=-1, verbose=1, ascend=True):
        """Get screener table.

        Args:
            order(str): sort the table by the choice of order.
            limit(int): set the top k rows of the screener.
            verbose(int): choice of visual the progress. 1 for visualize progress.
            ascend(bool): if True, the order is ascending.
        Returns:
            df(pandas.DataFrame): screener information table
        """
        url = self.url
        if order != 'ticker':
            if order not in self.order_dict:
                raise ValueError()
            url = self.url+'&'+self.order_dict[order]
        if not ascend:
            url = url.replace('o=', 'o=-')
        soup = webScrap(url)

        page = self._get_page(soup)
        if page == 0:
            print('No ticker found.')
            return None

        if limit != -1:
            if page > (limit-1)//20+1:
                page = (limit-1)//20+1

        if verbose == 1:
            progressBar(1, page)
        table = soup.findAll('table')[18]
        rows = table.findAll('tr')
        table_header = [i.text for i in rows[0].findAll('td')][1:]
        num_col_index = [table_header.index(i) for i in table_header if i in NUMBER_COL]
        df = pd.DataFrame([], columns=table_header)
        df = self._screener_helper(0, page, rows, df, num_col_index, table_header, limit)

        for i in range(1, page):
            if verbose == 1:
                progressBar(i+1, page)

            url = self.url
            if order == 'ticker':
                url += '&r={}'.format(i * 20 + 1)
            else:
                url += '&r={}'.format(i * 20 + 1) + '&' + self.order_dict[order]
            if not ascend:
                url = url.replace('o=', 'o=-')
            soup = webScrap(url)
            table = soup.findAll('table')[18]
            rows = table.findAll('tr')
            df = self._screener_helper(i, page, rows, df, num_col_index, table_header, limit)
        return df
示例#4
0
    def ScreenerView(self, order='ticker', limit=-1, verbose=1):
        """Get screener table.

        Args:
            order(str): sort the table by the choice of order
            limit(int): set the top k rows of the screener
            verbose(int): choice of visual the progress. 1 for visualize progress
        Returns:
            df(pandas.DataFrame): screener information table
        """
        url = self.url
        if order != 'ticker':
            if order not in self.order_dict:
                raise ValueError()
            url = self.url + '&' + self.order_dict[order]
        soup = webScrap(url)

        page = self._get_page(soup)
        if page == 0:
            print('No ticker found.')
            return None

        if limit != -1:
            if page > (limit - 1) // 20 + 1:
                page = (limit - 1) // 20 + 1

        if verbose == 1:
            print('[Info] loading page 1/{} ...'.format(page))
        table = soup.findAll('table')[18]
        rows = table.findAll('tr')
        table_header = [i.text for i in rows[0].findAll('td')][1:]
        num_col_index = [
            table_header.index(i) for i in table_header if i in self.NUMBER_COL
        ]
        df = pd.DataFrame([], columns=table_header)
        df = self._screener_helper(0, page, rows, df, num_col_index,
                                   table_header, limit)

        for i in range(1, page):
            if verbose == 1:
                print('[Info] loading page {}/{} ...'.format((i + 1), page))
            if order == 'ticker':
                soup = webScrap(self.url + '&r={}'.format(i * 20 + 1))
            else:
                soup = webScrap(self.url + '&r={}'.format(i * 20 + 1) + '&' +
                                self.order_dict[order])
            table = soup.findAll('table')[18]
            rows = table.findAll('tr')
            df = self._screener_helper(i, page, rows, df, num_col_index,
                                       table_header, limit)
        return df
示例#5
0
    def performance(self, timeframe='D'):
        """Get forex performance table.

        Args:
            timeframe (str): choice of timeframe(D, W, M, Q, HY, Y)

        Returns:
            df(pandas.DataFrame): forex performance table
        """
        params = None
        if timeframe == 'D':
            params = ''
        elif timeframe == 'W':
            params = '?v=12'
        elif timeframe == 'M':
            params = '?v=13'
        elif timeframe == 'Q':
            params = '?v=14'
        elif timeframe == 'HY':
            params = '?v=15'
        elif timeframe == 'Y':
            params = '?v=16'
        else:
            raise ValueError()

        soup = webScrap('https://finviz.com/futures_performance.ashx' + params)
        data = soup.text[soup.text.find('var rows = ') + 11:soup.text.
                         find('FinvizInitFuturesPerformance(rows);')]
        data = json.loads(data.strip()[:-1])
        df = pd.DataFrame(data)
        return df
示例#6
0
    def ScreenerView(self, group='Sector', order='Name'):
        """Get screener table.

        Args:
            group(str): choice of group option.
            order(str): sort the table by the choice of order.
        Returns:
            df(pandas.DataFrame): group information table.
        """
        if group not in self.group_dict:
            raise ValueError()
        if order not in self.order_dict:
            raise ValueError()
        self.url = self.BASE_URL.format(
            group=self.group_dict[group]) + '&' + self.order_dict[order]

        soup = webScrap(self.url)
        table = soup.findAll('table')[5]
        rows = table.findAll('tr')
        table_header = [i.text for i in rows[0].findAll('td')][1:]
        df = pd.DataFrame([], columns=table_header)
        rows = rows[1:]
        num_col_index = [i for i in range(2, len(table_header))]
        for row in rows:
            cols = row.findAll('td')[1:]
            info_dict = {}
            for i, col in enumerate(cols):
                # check if the col is number
                if i not in num_col_index:
                    info_dict[table_header[i]] = col.text
                else:
                    info_dict[table_header[i]] = numberCovert(col.text)

            df = df.append(info_dict, ignore_index=True)
        return df
示例#7
0
    def getCurrent(self, ticker):
        """Getting current price of the ticker.

        Returns:
            price(float): price of the ticker
        """
        soup = webScrap(
            "https://finviz.com/request_quote.ashx?t={}".format(ticker))
        return soup.text
示例#8
0
 def __init__(self, ticker, verbose=0):
     """initiate module
     """
     self.ticker = ticker
     self.flag = False
     self.quote_url = QUOTE_URL.format(ticker=ticker)
     self.soup = webScrap(self.quote_url)
     if self._checkexist(verbose):
         self.flag = True
     self.info = {}
示例#9
0
    def ScreenerView(self, group='Sector', order='Name', out_dir=''):
        """Get screener table.

        Args:
            group(str): choice of group option.
            order(str): sort the table by the choice of order.
        """
        if group not in self.group_dict:
            raise ValueError()
        if order not in self.order_dict:
            raise ValueError()
        self.url = self.BASE_URL.format(group=self.group_dict[group])+'&'+self.order_dict[order]

        soup = webScrap(self.url)
        url = 'https://finviz.com/' + soup.findAll('img')[5]['src']
        imageScrap(url, group, '')
示例#10
0
    def _loadSetting(self):
        """load all the signals and filters."""
        soup = webScrap(self.url)

        # signal
        select = soup.find(id='signalSelect')
        options = select.findAll('option')[1:]
        key = [i.text for i in options]
        value = [i['value'].split('&')[1].split('=')[1] for i in options]
        self.signal_dict = dict(zip(key, value))

        # filter
        table = soup.find('td', class_='filters-border')
        rows = table.find('table').children
        filter_dict = {}
        for row in rows:
            if len(row) > 1:
                cols = row.findAll('td')
                for i, col in enumerate(cols):
                    span = col.findAll('span')
                    if len(span) > 0:
                        header = span[0].text
                        continue
                    if header != 'After-Hours Close' and header != 'After-Hours Change':
                        select = col.find('select')
                        if select is not None:
                            option_dict = {}
                            prefix = select['data-filter']
                            option_dict['prefix'] = prefix
                            option_dict['option'] = {}
                            options = col.findAll('option')
                            for option in options:
                                if '(Elite only)' not in option.text:
                                    option_dict['option'][
                                        option.text] = option['value']
                            filter_dict[header] = option_dict
        self.filter_dict = filter_dict

        # order
        select = soup.find(id='orderSelect')
        options = select.findAll('option')
        key = [i.text for i in options]
        value = [i['value'].split('&')[2] for i in options]
        self.order_dict = dict(zip(key, value))
示例#11
0
    def _loadSetting(self):
        """load all the groups."""
        soup = webScrap(self.url)
        selects = soup.findAll('select')

        # group
        options = selects[0].findAll('option')
        key = [i.text for i in options]
        value = []
        for option in options:
            temp = option['value'].split('?')[1].split('&')
            if len(temp) == 4:
                temp = '&'.join(temp[:2])
            else:
                temp = temp[0]
            value.append(temp)
        self.group_dict = dict(zip(key, value))

        # order
        options = selects[1].findAll('option')
        key = [i.text for i in options]
        value = [i['value'].split('&')[-1] for i in options]
        self.order_dict = dict(zip(key, value))
示例#12
0
 def __init__(self):
     """initiate module
     """
     self.all_news = {}
     self.soup = webScrap(NEWS_URL)
     self.news = {}