Esempio n. 1
0
    def get_possible_attributes(cls):
        """Returns dict of all possible attributes parsed from Groundspeak's website."""
        try:
            page = BeautifulSoup(requests.get(cls._attributes_url).text)
        except requests.exceptions.ConnectionError as e:
            raise errors.Error("Cannot load attributes page.") from e

        # get all <img>s containing attributes from all <dl>s with specific class
        images = chain(*map(lambda i: i.find_all("img"), page.find_all("dl", "AttributesList")))
        # create dict as {"machine name": "human description"}
        attributes = {i.get("src").split("/")[-1].rsplit("-", 1)[0]: i.get("alt") for i in images}

        return attributes
Esempio n. 2
0
def get_possible_attributes(*, session=None):
    """Return a dict of all possible attributes parsed from Groundspeak's website."""
    # imports are here not to slow down other parts of program which normally doesn't use this method
    import requests
    from bs4 import BeautifulSoup

    session = session or requests.Session()

    try:
        page = BeautifulSoup(session.get(_attributes_url).text, "html.parser")
    except requests.exceptions.ConnectionError as e:
        raise errors.Error("Cannot load attributes page.") from e

    symbols = page.select("symbol")
    # {"machine name": "human description"}
    return {s.get("id"): s.select_one("title").text for s in symbols}
Esempio n. 3
0
def get_possible_attributes():
    """Return a dict of all possible attributes parsed from Groundspeak's website."""
    # imports are here not to slow down other parts of program which normally doesn't use this method
    from itertools import chain
    import requests
    from bs4 import BeautifulSoup

    try:
        page = BeautifulSoup(requests.get(_attributes_url).text, "html.parser")
    except requests.exceptions.ConnectionError as e:
        raise errors.Error("Cannot load attributes page.") from e

    # get all <img>s containing attributes from all <dl>s with specific class
    images = chain(*map(lambda i: i.find_all("img"),
                        page.find_all("dl", "AttributesList")))
    # create dict as {"machine name": "human description"}
    attributes = {
        i.get("src").split("/")[-1].rsplit("-", 1)[0]: i.get("alt")
        for i in images
    }

    return attributes