コード例 #1
0
 def __init__(self, option="latest"):
     """initiate module"""
     if option == "latest":
         self.soup = web_scrap(INSIDER_URL)
     elif option == "latest buys":
         self.soup = web_scrap(INSIDER_URL + "?tc=1")
     elif option == "latest sales":
         self.soup = web_scrap(INSIDER_URL + "?tc=2")
     elif option == "top week":
         self.soup = web_scrap(
             INSIDER_URL + "?or=-10&tv=100000&tc=7&o=-transactionValue"
         )
     elif option == "top week buys":
         self.soup = web_scrap(
             INSIDER_URL + "?or=-10&tv=100000&tc=1&o=-transactionValue"
         )
     elif option == "top week sales":
         self.soup = web_scrap(
             INSIDER_URL + "?or=-10&tv=100000&tc=2&o=-transactionValue"
         )
     elif option == "top owner trade":
         self.soup = web_scrap(
             INSIDER_URL + "?or=10&tv=1000000&tc=7&o=-transactionValue"
         )
     elif option == "top owner buys":
         self.soup = web_scrap(
             INSIDER_URL + "?or=10&tv=1000000&tc=1&o=-transactionValue"
         )
     elif option == "top owner sales":
         self.soup = web_scrap(
             INSIDER_URL + "?or=10&tv=1000000&tc=2&o=-transactionValue"
         )
     elif option.isdigit():
         self.soup = web_scrap(INSIDER_URL + "?oc=" + option + "&tc=7")
     self.df = None
コード例 #2
0
    def screener_view(self, limit=-1, verbose=1):
        """Get screener table.

        Args:
            verbose(int): choice of visual the progress. 1 for visualize progress.
        Returns:
            tickers(list): get all the tickers as list.
        """
        soup = web_scrap(self.url)
        page = self._get_page(soup)
        if page == 0:
            if verbose == 1:
                print("No ticker found.")
            return None

        if limit != -1:
            if page > (limit - 1) // 1000 + 1:
                page = (limit - 1) // 1000 + 1

        if verbose == 1:
            progress_bar(1, page)

        tickers = []
        tickers = self._screener_helper(0, page, soup, tickers, limit)

        for i in range(1, page):
            if verbose == 1:
                progress_bar(i + 1, page)
            soup = web_scrap(self.url + "&r={}".format(i * 1000 + 1))
            tickers = self._screener_helper(i, page, soup, tickers, limit)
        return tickers
コード例 #3
0
    def performance(self, timeframe="D"):
        """Get forex performance table.

        Args:
            timeframe (str): choice of timeframe(D, W, M, Q, HY, Y)

        Returns:
            df(pandas.DataFrame): forex performance table
        """
        params = None
        if timeframe == "D":
            params = ""
        elif timeframe == "W":
            params = "?v=12"
        elif timeframe == "M":
            params = "?v=13"
        elif timeframe == "Q":
            params = "?v=14"
        elif timeframe == "HY":
            params = "?v=15"
        elif timeframe == "Y":
            params = "?v=16"
        else:
            raise ValueError("Invalid timeframe '{}'".format(timeframe))

        soup = web_scrap("https://finviz.com/futures_performance.ashx" + params)
        data = soup.text[
            soup.text.find("var rows = ")
            + 11 : soup.text.find("FinvizInitFuturesPerformance(rows);")
        ]
        data = json.loads(data.strip()[:-1])
        df = pd.DataFrame(data)
        return df
コード例 #4
0
ファイル: spectrum.py プロジェクト: ajmal017/finvizfinance
    def screener_view(self, group="Sector", order="Name", out_dir=""):
        """Get screener table.

        Args:
            group(str): choice of group option.
            order(str): sort the table by the choice of order.
        """
        if group not in self.group_dict:
            raise ValueError()
        if order not in self.order_dict:
            raise ValueError()
        self.url = (self.BASE_URL.format(group=self.group_dict[group]) + "&" +
                    self.order_dict[order])

        soup = web_scrap(self.url)
        url = "https://finviz.com/" + soup.findAll("img")[5]["src"]
        image_scrap(url, group, "")
コード例 #5
0
    def calendar(self):
        """Get economic calendar table.

        Returns:
            df(pandas.DataFrame): economic calendar table
        """
        soup = web_scrap("https://finviz.com/calendar.ashx")
        tables = soup.findAll("table", class_="calendar")
        columns = [
            "Datetime",
            "Release",
            "Impact",
            "For",
            "Actual",
            "Expected",
            "Prior",
        ]
        df = pd.DataFrame([], columns=columns)

        for table in tables:
            rows = table.findAll("tr")
            # check row
            if rows[1].findAll("td")[2].text != "No economic releases":
                # parse date
                date = rows[0].find("td").text
                for row in rows[1:]:
                    cols = row.findAll("td")
                    info_dict = {
                        "Datetime":
                        "{}, {}".format(date, cols[0].text),
                        "Release":
                        cols[2].text,
                        "Impact":
                        re.findall("gfx/calendar/impact_(.*).gif",
                                   cols[3].find("img")["src"])[0],
                        "For":
                        cols[4].text,
                        "Actual":
                        cols[5].text,
                        "Expected":
                        cols[6].text,
                        "Prior":
                        cols[7].text,
                    }
                    df = df.append(info_dict, ignore_index=True)
        return df
コード例 #6
0
    def screener_view(
        self, group="Sector", order="Name", columns=[0, 1, 2, 3, 10, 22, 24, 25, 26]
    ):
        """Get screener table.

        Args:
            group(str): choice of group option.
            order(str): sort the table by the choice of order.
            columns(list): columns of your choice. Default index: 0, 1, 2, 3, 10, 22, 24, 25, 26.
        Returns:
            df(pandas.DataFrame): group information table.
        """
        if group not in self.group_dict:
            raise ValueError()
        if order not in self.order_dict:
            raise ValueError()
        self.url = (
            self.BASE_URL.format(group=self.group_dict[group])
            + "&"
            + self.order_dict[order]
        )
        columns = [str(i) for i in columns]
        self.url += "&c=" + ",".join(columns)

        soup = web_scrap(self.url)
        table = soup.findAll("table")[6]
        rows = table.findAll("tr")
        table_header = [i.text for i in rows[0].findAll("td")][1:]
        df = pd.DataFrame([], columns=table_header)
        rows = rows[1:]
        num_col_index = list(range(2, len(table_header)))
        for row in rows:
            cols = row.findAll("td")[1:]
            info_dict = {}
            for i, col in enumerate(cols):
                # check if the col is number
                if i not in num_col_index:
                    info_dict[table_header[i]] = col.text
                else:
                    info_dict[table_header[i]] = number_covert(col.text)

            df = df.append(info_dict, ignore_index=True)
        return df
コード例 #7
0
ファイル: overview.py プロジェクト: ajmal017/finvizfinance
    def _load_setting(self):
        """load all the groups."""
        soup = web_scrap(self.url)
        selects = soup.findAll("select")

        # group
        options = selects[0].findAll("option")
        key = [i.text for i in options]
        value = []
        for option in options:
            temp = option["value"].split("?")[1].split("&")
            if len(temp) == 4:
                temp = "&".join(temp[:2])
            else:
                temp = temp[0]
            value.append(temp)
        self.group_dict = dict(zip(key, value))

        # order
        options = selects[1].findAll("option")
        key = [i.text for i in options]
        value = [i["value"].split("&")[-1] for i in options]
        self.order_dict = dict(zip(key, value))
コード例 #8
0
    def screener_view(self,
                      order="ticker",
                      limit=-1,
                      select_page=None,
                      verbose=1,
                      ascend=True):
        """Get screener table.

        Args:
            order(str): sort the table by the choice of order.
            limit(int): set the top k rows of the screener.
            select_page(int): set the page of the screener.
            verbose(int): choice of visual the progress. 1 for visualize progress.
            ascend(bool): if True, the order is ascending.
        Returns:
            df(pandas.DataFrame): screener information table
        """
        url = self.url
        if order != "ticker":
            if order not in self.order_dict:
                order_keys = list(self.order_dict.keys())
                raise ValueError(
                    "Invalid order '{}'. Possible order: {}".format(
                        order, order_keys))
            url = self.url + "&" + self.order_dict[order]
        if not ascend:
            url = url.replace("o=", "o=-")
        soup = web_scrap(url)

        page = self._get_page(soup)
        if page == 0:
            print("No ticker found.")
            return None

        start_page = 1
        end_page = page

        if select_page:
            if select_page > page:
                raise ValueError("Invalid page {}".format(select_page))
            if limit != -1:
                limit = -1
                warnings.warn(
                    "Limit parameter is ignored when page is selected.")
            start_page = select_page - 1
            end_page = select_page

        if limit != -1:
            if page > (limit - 1) // 20 + 1:
                page = (limit - 1) // 20 + 1

        if verbose == 1:
            if not select_page:
                progress_bar(start_page, end_page)
            else:
                progress_bar(1, 1)

        table = soup.findAll("table")[18]
        rows = table.findAll("tr")
        table_header = [i.text for i in rows[0].findAll("td")][1:]
        num_col_index = [
            table_header.index(i) for i in table_header if i in NUMBER_COL
        ]
        df = pd.DataFrame([], columns=table_header)
        if not select_page or select_page == 1:
            df = self._screener_helper(0, page, rows, df, num_col_index,
                                       table_header, limit)

        if select_page != 1:
            for i in range(start_page, end_page):
                if verbose == 1:
                    if not select_page:
                        progress_bar(i + 1, page)
                    else:
                        progress_bar(1, 1)

                url = self.url
                if order == "ticker":
                    url += "&r={}".format(i * 20 + 1)
                else:
                    url += "&r={}".format(i * 20 +
                                          1) + "&" + self.order_dict[order]
                if not ascend:
                    url = url.replace("o=", "o=-")
                soup = web_scrap(url)
                table = soup.findAll("table")[18]
                rows = table.findAll("tr")
                df = self._screener_helper(i, page, rows, df, num_col_index,
                                           table_header, limit)
        return df