Ejemplo n.º 1
0
 def _clearFilters(self):
     self._platforms.clearSelection()
     self._regions.clearSelection()
     self._genres.clearSelection()
     self._years.clearSelection()
     self._item.setCheckState(Qt.PartiallyChecked)
     self._box.setCheckState(Qt.PartiallyChecked)
     self._manual.setCheckState(Qt.PartiallyChecked)
     logger.info("Cleared all filters.")
Ejemplo n.º 2
0
def _tryAlternatives(title: str, platform: str):
    # Occasionally the title has a trailing "-", "_", "__", or very rarely "___" in the url

    pTitle = _parseTitle(title)
    testurl = [_platforms[platform], pTitle, "release-info"]
    for c in ["-", "_", "__", "___"]:
        testurl[1] = pTitle + c  # Add either '-', '_', or '__' to string
        logger.info(f"Trying with url: {_baseURL + '/'.join(testurl)}")
        res = requests.get(_baseURL + "/".join(testurl))  # Try alternative URL

        try:
            res.raise_for_status()
        except requests.exceptions.HTTPError:  # Not a valid page
            logger.info("Not a valid page.")
            continue

        # Parse the html and get title and platform strings
        soup = bs4.BeautifulSoup(res.text, 'html.parser')
        te = soup.select(_titleCSS)
        pf = soup.select(_platformCSS)
        if len(te) == 0 or len(pf) == 0:
            continue

        # Check if title and platform match
        if te[0].text.strip().lower() == title.lower() and pf[0].text.strip().lower() == platform.lower():
            logger.info(f"Found matching title at: {_baseURL + '/'.join(testurl)}")
            return res, _baseURL + "/".join(testurl)
        else:
            continue

    return None, ""
Ejemplo n.º 3
0
def _trySuggestions(platform: str, region: int, soup: bs4.BeautifulSoup) -> bs4.BeautifulSoup:
    """
    Goes through the list of games and tries to find one that matches the platform
    :param platform: The platform we're looking for
    :param region: Which region. 0: NTSC (JP), 1: NTSC (NA), 2: PAL
    :param soup: BeautifulSoup object
    :return: BeautifulSoup object for new page if found, else a NoneType BeautifulSoup object
    """

    logger.info("Couldn't find game at url. Trying alternatives...")

    titleUrlRegex = re.compile(r'href=\".*?\"')
    titles = soup.find_all("td", {"class": "title"})
    consoles = soup.find_all("td", {"class": "console"})
    url = ""

    for title, console in zip(titles, consoles):
        if console.text.lower().replace(" ", "-") == _platforms[platform][region]:
            url = titleUrlRegex.findall(title.decode()).pop()[5:].strip('"')
            break

    if len(url) > 0:
        logger.info(f"New url found: {url}")
        res = requests.get(url)
        soup = bs4.BeautifulSoup(res.text, "html.parser")
        return soup

    logger.info("Couldn't find title in alternate urls.")
    return soup.clear()
Ejemplo n.º 4
0
def main():
    gcmDir = getScriptDir()
    dbPath = gcmDir + "/data/db/collection.db"
    logger.info(f"Program directory set to {gcmDir}.")

    # Make sure we have everything
    if not os.path.exists(gcmDir + "/data"):
        logger.critical(
            "Data directory doesn't exist. Creating '/data/db' and '/data/vgdb'..."
        )
        os.makedirs(gcmDir + "/data/db")
        os.mkdir(gcmDir + "/data/vgdb")
        createDB(dbPath)
    if not os.path.exists(gcmDir + "/data/db"):
        logger.critical("'/data/db' doest't exist. Creating...")
        os.mkdir(gcmDir + "/data/db")
        createDB(dbPath)
    if not os.path.exists(gcmDir + "/data/vgdb"):
        os.mkdir(gcmDir + "/data/vgdb")
    if not os.path.exists(dbPath):
        logger.critical("No database was found.")
        createDB(dbPath)

    createWindow(dbPath)
Ejemplo n.º 5
0
def getMobyRelease(name: str, platform: str, region: str, country: str = ""):
    """
    Finds a specific release for a game on MobyGames.com
    :param name: The name of the game
    :param platform: The game's platform
    :param region: The game's region (NTSC (JP), NTSC (NA), PAL accepted)
    :param country: Optionally specify a specific country
    :return: Dictionary of the release info
    """

    logger.info(f"Find release info for '{name}' on '{region} {platform}' for country '{country}'"
                if country != "" else f"Find release info for '{name}' on '{region} {platform}'.")

    releaseInfo = {"publisher": "", "developer": "", "platforms": "",
                   "genre": "", "code": "", "year": ""}

    regionDict = {"NTSC (JP)": ("Japan", "Worldwide"),
                  "NTSC (NA)": ("United States", "Canada", "Worldwide"),
                  "PAL": ("United Kingdom", "Ireland", "Germany", "France", "Italy",
                          "Austria", "Belgium", "The Netherlands", "Portugal",
                          "Spain", "Switzerland", "Russia",
                          "Sweden", "Denmark", "Norway", "Finland",
                          "Australia, New Zealand", "Worldwide")}

    if region in ("PAL A", "PAL B"):
        region = "PAL"
    elif region not in ("PAL", "NTSC (NA)", "NTSC (JP)"):  # Catch all for other non-valid regions
        region = "NTSC (NA)"

    regionValue = regionDict[region]
    info = getMobyInfo(name, platform)

    if info["title"] == "":
        # No data found, return empty values
        logger.error("Release info not found.")
        return releaseInfo

    publisher = info["publisher"]
    developer = info["developer"]
    platforms = info["platforms"]
    genre = info["genre"]
    covers = info["covers"] if "covers" in info.keys() else ""
    yearFormat = re.compile(r"\d{4}")
    skipCode = False
    code = ""
    year = ""

    # Try to get product code, and also year since it might be different between releases
    correctRelease = ""
    for release in info["releases"].keys():
        # Optionally check the specific country's release, but only if it makes sense
        # (e.g. don't check for Norway if region == NTSC (JP)
        if country != "" and country in regionValue and country in release:
            correctRelease = release

        else:
            if region == "PAL" and "United Kingdom" in release:
                # Make UK release default for PAL
                correctRelease = release

            else:
                if country == "" and correctRelease == "":
                    # UK not found, or region isn't PAL, try to find another release
                    for r in release:
                        if r in regionValue or r == regionValue:
                            correctRelease = release
                            break

                elif country != "" and correctRelease == "":
                    continue

        if correctRelease != "":
            break

    if correctRelease == "":
        correctRelease = list(info['releases'].keys())[0]
        skipCode = True
        logger.warning(f"Couldn't find correct release for given region. Defaulting to the first one {correctRelease}. "
                       "This also means we skip checking the product code. Please enter it manually.")

    details = info['releases'][correctRelease]

    for d in details:
        if d[0] in ("Company Code", "Nintendo Media PN", "Sony PN") and not skipCode:
            code = d[1]
        elif d[0] == "Release Date":
            year = yearFormat.findall(d[1])[0]

        # Try with EAN-13 or UPC-A for the code as a fallback
        if code == "" and not skipCode:
            logger.warning("Couldn't find product code. Trying with barcode instead.")
            for d in info["releases"][correctRelease]:
                if d[0] in ("EAN-13", "UPC-A"):
                    code = d[0] + ": " + d[1]
                    break
            if code == "":
                logger.warning("Can't find barcode either.")

    # Find the release's cover image
    if len(covers) > 0 and str(list(covers.values())[0]).find("/shots/") == -1:
        # We have a cover image, determine region
        res = None
        for cover in covers:
            countries = cover.split(" , ")
            for country in countries:
                if region == "PAL" and country.strip() == "United Kingdom":
                    # Default to UK for PAL region
                    res = requests.get(covers[cover])
                    break

                elif country.strip() in regionValue:
                    res = requests.get(covers[cover])
                    break

            if res is not None:
                break

        if res is None:  # Correct region not found, select the first one.
            logger.warning("Couldn't find correct cover for the region. Defaulting to the first image.")
            res = requests.get(list(covers.values())[0])

        imgCSS = ".img-responsive"
        imgURLReg = re.compile(r'src=\".*?\"')
        soup = bs4.BeautifulSoup(res.text, "html.parser")
        imgURL = "https://www.mobygames.com" + imgURLReg.findall(str(soup.select(imgCSS))).pop().split('=')[1].strip('"')
        logger.info(f"Found cover image at: {imgURL}")

    elif len(covers) > 0 and str(list(covers.values())[0]).find("/shots/") != -1:
        # Cover image wasn't found but we have a screen shot
        imgURL = list(covers.values())[0]
        logger.warning("A proper cover image wasn't found. But found a screen shot.")
        logger.info(f"Screenshot url: {imgURL}")
    else:  # No image found
        logger.warning("Couldn't find a cover image.")
        imgURL = ""

    releaseInfo = {"publisher": publisher, "developer": developer, "platforms": platforms,
                   "genre": genre, "image": imgURL, "code": code, "year": year}

    logger.info("Release info found.")
    return releaseInfo
Ejemplo n.º 6
0
def getMobyInfo(title: str, platform: str) -> dict:
    """Takes a game name and its platform, and returns a dictionary with the game's
       information from MobyGames.com.
       :param title: Title of the game
       :param platform: The game's platform
       :return: Dictionary of the game's info
    """

    logger.info(f"Getting info for '{title}' for '{platform}'...")

    mobyCSSData = {
        "title": "html body div#wrapper div.container div#main.row div.col-md-12.col-lg-12 div.rightPanelHeader "
                 "h1.niceHeaderTitle a",
        "publisher": "#coreGameRelease > div:nth-child(2) > a:nth-child(1)",
        "developer": "#coreGameRelease > div:nth-child(4) > a:nth-child(1)",
        "release": "#coreGameRelease > div:nth-child(6) > a:nth-child(1)",
        "platforms": "#coreGameRelease > div:nth-child(8)",
        "genre": "#coreGameGenre > div:nth-child(1) > div:nth-child(2)",
    }

    # Gameplay type. If found, used instead of Genre since it's usually more
    # representative (e.g. "Platform" instead of "Action")
    gameplayCSS = ["#coreGameGenre > div:nth-child(1) > div:nth-child(3)",
                   "#coreGameGenre > div:nth-child(1) > div:nth-child(4)",
                   "#coreGameGenre > div:nth-child(1) > div:nth-child(5)",
                   "#coreGameGenre > div:nth-child(1) > div:nth-child(6)",
                   "#coreGameGenre > div:nth-child(1) > div:nth-child(7)",
                   "#coreGameGenre > div:nth-child(1) > div:nth-child(8)",
                   "#coreGameGenre > div:nth-child(1) > div:nth-child(9)",
                   "#coreGameGenre > div:nth-child(1) > div:nth-child(10)",
                   "#coreGameGenre > div:nth-child(1) > div:nth-child(11)",
                   "#coreGameGenre > div:nth-child(1) > div:nth-child(12)",
                   "#coreGameGenre > div:nth-child(2) > div:nth-child(3)",
                   "#coreGameGenre > div:nth-child(2) > div:nth-child(4)",
                   "#coreGameGenre > div:nth-child(2) > div:nth-child(5)",
                   "#coreGameGenre > div:nth-child(2) > div:nth-child(6)",
                   "#coreGameGenre > div:nth-child(2) > div:nth-child(7)",
                   "#coreGameGenre > div:nth-child(2) > div:nth-child(8)",
                   "#coreGameGenre > div:nth-child(2) > div:nth-child(9)",
                   "#coreGameGenre > div:nth-child(2) > div:nth-child(10)",
                   "#coreGameGenre > div:nth-child(2) > div:nth-child(11)",
                   "#coreGameGenre > div:nth-child(2) > div:nth-child(12)"]

    if platform.lower() == "game & watch":
        title = "Game & Watch Wide Screen: " + title  # TODO: Need to figure out something better for each variety

    pTitle = _parseTitle(title)
    pPlatform = _parsePlatform(platform)
    logger.info(f"Parsed title to '{pTitle}'.")
    logger.info(f"Parsed platform to '{pPlatform}'")

    # Get data
    if pPlatform not in _platforms.keys():  # Platform not supported
        logger.error(f"Platform '{platform}' not supported.")
        return {x: "" for x in mobyCSSData.keys()}

    fullURL = _baseURL + "/".join((_platforms[pPlatform], pTitle, "release-info"))
    logger.info(f"Full url to mobygames: {fullURL}")

    try:
        res = requests.get(fullURL)
    except socket.gaierror:
        # Most likely no internet connection
        logger.error("Can't establish connection.")
        return {x: "" for x in mobyCSSData.keys()}

    try:
        res.raise_for_status()
        soup = bs4.BeautifulSoup(res.text, 'html.parser')
    except requests.exceptions.HTTPError:
        # Try the suggested results on the 404 page
        logger.info("Title not immediately found. Trying the suggestions.")
        res, title, fullURL = _trySuggestions(title, pPlatform)
        if res is None:
            # Couldn't find anything. Return empty values
            logger.error("Title not found.")
            return {x: "" for x in mobyCSSData.keys()}

        soup = bs4.BeautifulSoup(res.text, 'html.parser')

    # Extract data
    for key in mobyCSSData.keys():
        pf = soup.select(_platformCSS)
        pf = pf[0].text.strip() if len(pf) > 0 else ""
        if pf.lower() != pPlatform.lower():
            # Try some alternative URLs
            logger.info("Platform mismatch. Trying some alternative urls.")
            res, fullURL = _tryAlternatives(title, pPlatform)
            if res is None:  # Nothing was found.
                logger.error("Title not found.")
                return {x: "" for x in mobyCSSData.keys()}

            soup = bs4.BeautifulSoup(res.text, 'html.parser')

        try:
            value = soup.select(mobyCSSData[key])
            if key == "platforms":
                # Make sure we don't include the '| Combined View' text
                mobyCSSData[key] = ucd.normalize("NFKD", value[0].text.split("|", 1)[0].strip())
                # Also make sure to insert the platform we're looking for
                platforms = mobyCSSData[key].split(", ")
                if pPlatform not in platforms:
                    platforms.append(pPlatform)
                    mobyCSSData[key] = ", ".join(sorted(platforms, key=str.lower))

            elif key == "genre":
                # Try finding the "Gameplay" category since it's more accurate but not always available
                gameplay = ""
                for i in range(len(gameplayCSS)):
                    temp = soup.select(gameplayCSS[i])
                    if len(temp) > 0:
                        if temp[0].text.strip() == "Gameplay":  # Gameplay type is under this header
                            gameplay = soup.select(gameplayCSS[i + 1])
                            break

                # "Arcade" and "Puzzle elements" by themselves are about as useful as "Action"
                if len(gameplay) > 0 and gameplay[0].text.strip() != "Arcade" and\
                        ucd.normalize("NFKD", gameplay[0].text.strip()) != "Puzzle elements" and\
                        ucd.normalize("NFKD", gameplay[0].text.strip()) != "Arcade, Puzzle elements":
                    # Save gameplay types without "Arcade", since it's pretty useless.
                    mobyCSSData[key] = str(ucd.normalize("NFKD", gameplay[0].text.strip())).replace("Arcade, ", "")
                else:  # Default back to normal Genre
                    mobyCSSData[key] = ucd.normalize("NFKD", value[0].text.strip())
            else:
                mobyCSSData[key] = ucd.normalize("NFKD", value[0].text.strip())

        except IndexError:  # Not all games have all data. Just add an empty string instead.
            if key == "genre":
                # If there's an ESRB rating it takes the place of the normal genre position
                altGenreCSS = "#coreGameGenre > div:nth-child(2) > div:nth-child(4)"
                try:
                    value = soup.select(altGenreCSS)
                    mobyCSSData[key] = ucd.normalize("NFKD", value[0].text.strip())
                except IndexError:  # Still nothing
                    logger.info(f"No data for value: '{key}'")
                    mobyCSSData[key] = ""
            else:
                logger.info(f"No data for value: '{key}'")
                mobyCSSData[key] = ""

    # Get release info
    releases = {}
    release = ""
    info = soup.find_all("div", {"class": "floatholder relInfo"})

    for i in info:
        titles = i.find_all("div", {"class": "relInfoTitle"})
        details = i.find_all("div", {"class": "relInfoDetails"})

        for title, detail in zip(titles, details):
            if title.text.strip() in ("Country", "Countries"):  # Make the country name the dict key
                temprelease = detail.text.split(",")
                temprelease = [x.strip() for x in temprelease]
                release = tuple(temprelease)
                releases[release] = []
            else:  # Add the rest of the info to the country name key
                releases[release].append([ucd.normalize("NFKD", title.text.strip()),
                                          ucd.normalize("NFKD", detail.text.strip())])

    mobyCSSData["releases"] = releases

    # Get cover image
    imgurlReg = re.compile(r'href=\".*?\"')
    coverURL = fullURL.replace("release-info", "cover-art")
    coverRes = requests.get(coverURL)
    coverSoup = bs4.BeautifulSoup(coverRes.text, "html.parser")
    coverReleases = coverSoup.find_all("table", {"summary": "Description of Covers"})
    coverMedia = coverSoup.find_all("div", {"class": "thumbnail"})

    if len(coverMedia) == 0:  # No covers found, default to title screen shot
        imgurlReg = re.compile(r'src=\".*?\"')
        image = soup.find_all("div", {"id": "coreGameCover"})
        temp = []
        for i in image.pop().decode():
            temp.append(i)
        tmpsrc = imgurlReg.findall("".join(temp))

        if len(tmpsrc) > 0:
            imgsrc = tmpsrc.pop().split('=')[1].strip('"')  # Find 'src=' part, then split at '='
            mobyCSSData["covers"] = {"United States": "https://www.mobygames.com" + imgsrc}
        else:
            logger.warning("No cover image found.")

    else:
        # Find the "Front Cover" URLs
        coverURLs = []
        for media in coverMedia:
            covers = media.find_all("a", {"class": "thumbnail-cover"})
            for cover in covers:
                if str(cover).find("Front Cover") != -1:
                    # Find 'href=' part, split at '=', and strip away '"' on the right side
                    coverURLs.append(imgurlReg.findall(str(cover)).pop().split('=')[1].strip('"'))

        covers = {}
        for release, url in zip(coverReleases, coverURLs):
            rel = release.find_all("td")
            for j, r in enumerate(rel):  # Find index of countries list
                # if len(rel) > 9:
                #    continue  # Skips player's choice releases etc (anything that has a "Package Comments" section)
                if r.text in ("Country", "Countries"):
                    # Country as key, url as value. When several countries the last ones are separated with " and ".
                    covers[rel[j + 2].text.replace(" and ", " , ")] = url
                    break

        mobyCSSData["covers"] = covers

    logger.info("Title info found.")
    return mobyCSSData
Ejemplo n.º 7
0
def _trySuggestions(title: str, platform: str):
    # Checks if the suggested URLs match

    pTitle = _parseTitle(title)
    res = requests.get(_baseURL + "/".join((_platforms[platform], pTitle, "release-info")))
    suggestionsCSS = ".col-md-12 > div:nth-child(3) > ul:nth-child(2)"  # List of URLs
    alternativeTitlesCSS = [".col-md-8 > ul:nth-child(17)",
                            ".col-md-8 > ul:nth-child(18)",
                            ".col-md-8 > ul:nth-child(19)",  # List of alternative titles
                            ".col-md-8 > ul:nth-child(20)",  # Not all of them might be valid,
                            ".col-md-8 > ul:nth-child(21)",  # but I've seen 17, 19, 20, 25, 28, 31, 39, and 42
                            ".col-md-8 > ul:nth-child(22)",  # being used
                            ".col-md-8 > ul:nth-child(23)",
                            ".col-md-8 > ul:nth-child(24)",
                            ".col-md-8 > ul:nth-child(25)",
                            ".col-md-8 > ul:nth-child(26)",
                            ".col-md-8 > ul:nth-child(27)",
                            ".col-md-8 > ul:nth-child(28)",
                            ".col-md-8 > ul:nth-child(29)",
                            ".col-md-8 > ul:nth-child(30)",
                            ".col-md-8 > ul:nth-child(31)",
                            ".col-md-8 > ul:nth-child(32)",
                            ".col-md-8 > ul:nth-child(33)",
                            ".col-md-8 > ul:nth-child(34)",
                            ".col-md-8 > ul:nth-child(35)",
                            ".col-md-8 > ul:nth-child(36)",
                            ".col-md-8 > ul:nth-child(37)",
                            ".col-md-8 > ul:nth-child(38)",
                            ".col-md-8 > ul:nth-child(39)",
                            ".col-md-8 > ul:nth-child(40)",
                            ".col-md-8 > ul:nth-child(41)",
                            ".col-md-8 > ul:nth-child(42)"]

    # Find new url
    url = re.compile(r'".*"')  # URL is located within quotation marks
    soup = bs4.BeautifulSoup(res.text, 'html.parser')
    res = soup.select(suggestionsCSS)
    if len(res) > 0:
        suggestionURLs = [u.strip('"') for u in url.findall(res.pop().decode())]
    else:  # No suggestions found
        logger.warning("Couldn't find any suggestions.")
        return None, title, ""

    # Try each suggestion
    for suggestion in suggestionURLs:
        # The suggestions all use the Combined View. Insert the platform into url
        temp = suggestion.split("/")
        temp.insert(4, _platforms[platform])
        temp.append("release-info")
        newurl = "/".join(temp)
        logger.info(f"Trying with url: {newurl}")

        # Get the platform and title strings
        res = requests.get(newurl)
        soup = bs4.BeautifulSoup(res.text, "html.parser")
        te = soup.select(_titleCSS)
        pf = soup.select(_platformCSS)

        if len(te) == 0 or len(pf) == 0:  # This shouldn't happen but who knows
            continue
        if pf[0].text.strip() != platform:
            logger.info("Not the correct platform.")
            continue  # Not the right platform, abort

        newtitle = te[0].text.strip()
        # Sometimes ō is transliterated as ou or oo, and ū as uu
        if newtitle.lower() == title.lower() or \
                newtitle.lower() == title.replace("ō", "ou").lower() or \
                newtitle.lower() == title.replace("ou", "ō").lower() or \
                newtitle.lower() == title.replace("ō", "oo").lower() or \
                newtitle.lower() == title.replace("oo", "ō").lower() or \
                newtitle.lower() == title.replace("ū", "uu").lower() or \
                newtitle.lower() == title.replace("uu", "ū").lower():
            logger.info("Found match at url.")
            return res, title, newurl

        else:
            # Try removing any weird characters from the sides of the game name:
            t = title.rstrip("\\-%$£@")
            t = t.lstrip("\\-%$£@")
            if newtitle.lower() == t.lower() or \
                    newtitle.lower() == t.replace("ou", "ō").lower() or \
                    newtitle.lower() == t.replace("ō", "oo").lower() or \
                    newtitle.lower() == t.replace("oo", "ō").lower() or \
                    newtitle.lower() == t.replace("ū", "uu").lower() or \
                    newtitle.lower() == t.replace("uu", "ū").lower():
                logger.info(f"Found match at url with title '{t}'.")
                return res, t, newurl

            else:
                # Check the alternative titles (Japanese games often have different titles for example)
                logger.info(f"Platform matches, but not title ({newtitle}). Trying to find it in 'Alternate Titles'.")
                alturl = newurl.split("/")
                alturl = "/".join(alturl[:-1])  # Remove the 'release-info' part. Alt titles are on the main page.
                altres = requests.get(alturl)
                soup = bs4.BeautifulSoup(altres.text, "html.parser")

                temp = []
                for alt in alternativeTitlesCSS:
                    # Try to find the alt titles
                    temp = soup.select(alt)
                    if len(temp) > 0:
                        break

                if len(temp) == 0:  # Still nothing, give up
                    logger.info("No alternative titles found on page.")
                    continue

                altTitles = [t.strip('"') for t in url.findall(temp[0].text)]  # Not URLs but regex rule is the same
                for alt in altTitles:
                    logger.info(f"Found alternative title: '{alt}'.")
                    if alt.lower() == title.lower():
                        logger.info("Found match at url.")
                        return res, title, newurl
                    elif alt.lower() == t.lower():
                        logger.info(f"Found match at url with title '{t}'.")
                        return res, t, newurl

                    # Sometimes ō is transliterated as ou or oo, and ū as uu
                    elif alt.lower() == title.replace("ō", "ou").lower() or \
                            alt.lower() == title.replace("ou", "ō").lower() or \
                            alt.lower() == title.replace("ō", "oo").lower() or \
                            alt.lower() == title.replace("oo", "ō").lower() or \
                            alt.lower() == title.replace("ū", "uu").lower() or \
                            alt.lower() == title.replace("uu", "ū").lower():
                        logger.info(f"Found matching alternative title'.")
                        return res, title, newurl
                    elif alt.lower() == t.replace("ō", "ou").lower() or \
                            alt.lower() == t.replace("ou", "ō").lower() or \
                            alt.lower() == t.replace("ō", "oo").lower() or \
                            alt.lower() == t.replace("oo", "ō").lower() or \
                            alt.lower() == t.replace("ū", "uu").lower() or \
                            alt.lower() == t.replace("uu", "ū").lower():
                        logger.info(f"Found matching alternative title, with title '{t}'")
                        return res, t, newurl

                    else:
                        continue

    logger.info("Suggestions doesn't match the title.")
    return None, title, ""
Ejemplo n.º 8
0
def getPriceData(title: str, platform: str, region: str, currency="USD") -> dict:
    """
    Tries to look up pricing info for a game on Pricecharting.com
    :param title: The title of the game we're looking for
    :param platform: Which platform the game is on
    :param region: Which regional release we're looking for
    :param currency: Which currency we want back. Possible options are: USD, AUD, BRL, CAD, EUR, GBP, and MXN
    :return: A dictionary with the game's current average prices (loose, cib, and new)
    """

    logger.info(f"Looking up price for '{title}' {region} on {platform}...")

    priceInfo = {"loose": "#used_price > span:nth-child(1)",
                 "cib": "#complete_price > span:nth-child(1)",
                 "new": "#new_price > span:nth-child(1)"}

    regions = {"NTSC (JP)": 0, "NTSC (NA)": 1, "PAL": 2}
    rates = {"USD": 1.0, "AUD": 0.0, "BRL": 0.0, "CAD": 0.0, "EUR": 0.0, "GBP": 0.0, "MXN": 0.0}
    sign = {"USD": "$", "AUD": "AUD ", "BRL": "R$", "CAD": "CAD ", "EUR": "€", "GBP": "£", "MXN": "Mex$"}
    ratesRegex = re.compile(r'("\w{3}":\d\.\d.*.)')

    pTitle = _parseTitle(title)
    pPlatform = _parsePlatform(platform)
    logger.info(f"Title parsed to '{pTitle}'.")
    logger.info(f"Platform parsed to '{pPlatform}'.")

    # Sanity check
    if region in ("PAL A", "PAL B"):
        logger.info(f"Changing {region} to 'PAL'...")
        region = "PAL"
    elif region not in ("NTSC (JP)", "NTSC (NA)", "PAL"):
        logger.info(f"Changing {region} to 'NTSC (NA)'")
        region = "NTSC (NA)"
    if pPlatform not in _platforms.keys():  # Platform not supported
        logger.error("Platform is not supported currently.")
        return {x: "N/A" for x in priceInfo.keys()}

    fullURL = _baseURL + "/".join((_platforms[pPlatform][regions[region]], pTitle))
    logger.info(f"Full url to price info is: {fullURL}")

    # Error handling
    try:
        res = requests.get(fullURL)
    except socket.gaierror:  # Most likely no internet connection
        logger.error("Couldn't establish connection.")
        return {x: "N/A" for x in priceInfo.keys()}
    try:
        res.raise_for_status()
        soup = bs4.BeautifulSoup(res.text, "html.parser")
    except requests.exceptions.HTTPError:  # Not found
        logger.error("Title not found.")
        return {x: "N/A" for x in priceInfo.keys()}
    if len(soup.select("#product_name > a:nth-child(1)")) == 0:  # Didn't find the right page, try suggestions
        soup = _trySuggestions(pPlatform, regions[region], soup)
        if soup is None:  # Still couldn't find anything
            logger.error("Title not found.")
            return {x: "N/A" for x in priceInfo.keys()}

    # Get currency rates
    currentRates = ratesRegex.findall(res.text)[0].split(",")
    for c in currentRates:
        cur, rate = c.split(":")
        rates[cur.strip('"')] = float(rate.rstrip("};"))

    for key, val in priceInfo.items():
        if len(soup.select(val)) == 0:  # Maybe not right page?
            priceInfo[key] = "N/A"
            continue

        price = ucd.normalize("NFKD", soup.select(val)[0].text.strip()).lstrip("$")
        if price == "N/A":  # No price found
            priceInfo[key] = "N/A"
            continue
        getcontext().prec = len(price) - 1  # Dynamically set precision to 2 decimal points
        priceInfo[key] = sign[currency] + str(Decimal(price) * Decimal(rates[currency]))

    logger.info("Price data found.")
    return priceInfo
Ejemplo n.º 9
0
    def __init__(self, dbpath):
        super(MainWindow, self).__init__()

        # 'Add to collection' window
        self.addWindow = None

        # 'Import games' window
        self.importWindow = None

        # Side panel
        self.sidePanel = SidePanel()

        # Tables and their databases
        db = QSqlDatabase.addDatabase("QSQLITE")
        db.setDatabaseName(dbpath)
        if not db.open():
            logger.critical(f"Couldn't open database: {db.lastError().text()}")
            QMessageBox.critical(None, "Database Error", db.lastError().text())
        self.gamesTableView = Table("games", db)
        self.gamesTableView.doubleClick.connect(self.sidePanel.showDetails)
        self.consolesTableView = Table("consoles", db)
        self.consolesTableView.doubleClick.connect(self.sidePanel.showDetails)
        self.accessoriesTableView = Table("accessories", db)
        self.accessoriesTableView.doubleClick.connect(self.sidePanel.showDetails)
        self.tableViewList = [self.gamesTableView,
                              self.consolesTableView,
                              self.accessoriesTableView]

        self.allPlatforms = set()
        self.allRegions = set()
        self.allGenres = set()
        self.allYears = set()
        for table in self.tableViewList:
            for row in table.ownedItems():
                self.allPlatforms.add(row["platform"])
                self.allRegions.add(row["region"])
                self.allYears.add(row["year"])
                # Split multi-genre entries
                for genre in row["genre"].split(", "):
                    self.allGenres.add(genre)

        self.filterDock = FilterDock(sorted(self.allPlatforms, key=str.lower),
                                     sorted(self.allRegions, key=str.lower),
                                     sorted(self.allGenres, key=str.lower),
                                     sorted(self.allYears, key=str.lower))

        # Overview tab
        self.overview = Overview(self.tableViewList)

        # Randomizer tab
        self.randomizer = Randomizer(self.gamesTableView.ownedItems(),
                                     sorted(self.allPlatforms, key=str.lower),
                                     sorted(self.allGenres, key=str.lower))
        self.randomizer.consoleList.itemClicked.connect(self.updateStatusbar)
        self.randomizer.genreList.itemClicked.connect(self.updateStatusbar)
        self.randomizer.genreMatchExclusiveCB.stateChanged.connect(self.updateStatusbar)
        self.randomizer.btnAll.clicked.connect(self.updateStatusbar)
        self.randomizer.btnNone.clicked.connect(self.updateStatusbar)

        ## MainWindow layout
        # Widgets
        self.centralWidget = QWidget()
        self.setCentralWidget(self.centralWidget)
        self.tab = QTabWidget()

        self.toolbar = self.addToolBar("Exit")
        self.toolbar.addAction(self.buttonActions("exit"))
        self.toolbar.addAction(self.buttonActions("add"))
        self.toolbar.addAction(self.buttonActions("import"))

        self.fileMenu = self.menuBar().addMenu(self.tr("&File"))
        self.fileMenu.addAction(self.buttonActions("add"))
        self.fileMenu.addAction(self.buttonActions("export"))
        self.fileMenu.addAction(self.buttonActions("import"))
        self.fileMenu.addAction(self.buttonActions("steam"))
        self.fileMenu.addAction(self.buttonActions("fetch"))
        self.fileMenu.insertSeparator(self.buttonActions("exit"))
        self.fileMenu.addAction(self.buttonActions("exit"))
        self.viewMenu = self.menuBar().addMenu(self.tr("&View"))
        self.viewMenu.addAction(self.buttonActions("owned"))
        self.viewMenu.addAction(self.buttonActions("delnotowned"))
        self.viewMenu.addAction(self.buttonActions("value"))
        self.helpMenu = self.menuBar().addMenu(self.tr("&Help"))
        self.helpMenu.addAction(self.buttonActions("about"))

        self.statusProgressBar = QProgressBar()
        self.statusProgressBar.setMaximumSize(100, 15)
        self.statusProgressBar.setRange(0, 0)
        self.statusProgressBar.setVisible(False)
        self.statusBar().addPermanentWidget(self.statusProgressBar)

        # Search stuff
        self.searchLabel = QLabel("Search")
        self.searchLabel.setVisible(False)
        self.searchBox = QLineEdit()
        self.searchBox.setVisible(False)
        self.searchBox.setClearButtonEnabled(True)
        # self.searchBox.textChanged.connect(self.search)
        self.searchBox.returnPressed.connect(self.search)
        self.searchBtn = QPushButton("Search")
        self.searchBtn.clicked.connect(self.search)
        self.searchBtn.setVisible(False)
        self.filterBtn = QPushButton("Filter")
        self.filterBtn.clicked.connect(self.filterDock.toggleVisibility)
        self.filterBtn.setVisible(False)

        # Tab layout.
        self.tab.addTab(self.overview.widget, "Overview")
        self.tab.addTab(self.gamesTableView, "Games")
        self.tab.addTab(self.consolesTableView, "Consoles")
        self.tab.addTab(self.accessoriesTableView, "Accessories")
        self.tab.addTab(self.randomizer.widget, "Randomizer")
        self.tab.currentChanged.connect(self.search)
        self.tab.currentChanged.connect(self.sidePanel.hideDetails)
        # Connect sidePanel's saved signal to corresponding table's updateData()
        # TODO: Update the sets of platforms and genres properly
        self.sidePanel.saved.connect(self.tableViewList[self.tab.currentIndex()].updateData)
        self.sidePanel.saved.connect(lambda: self.randomizer.updateLists(self.gamesTableView.ownedItems(),
                                                                         sorted(self.allPlatforms, key=str.lower),
                                                                         sorted(self.allGenres, key=str.lower)))

        # Main layout
        self.tabHbox = QHBoxLayout()
        self.tabHbox.addWidget(self.tab, 1)
        self.tabHbox.addWidget(self.sidePanel, 1)
        self.advSearchHbox = QHBoxLayout()
        self.advSearchHbox.addWidget(self.filterDock, 0)
        self.searchHbox = QHBoxLayout()
        self.searchHbox.addWidget(self.searchLabel, 0)
        self.searchHbox.addWidget(self.searchBox, 1)
        self.searchHbox.addWidget(self.filterBtn, 0)
        self.searchHbox.addWidget(self.searchBtn, 0)
        self.mainLayout = QVBoxLayout()
        self.mainLayout.addLayout(self.tabHbox, 1)
        self.mainLayout.addLayout(self.advSearchHbox, 0)
        self.mainLayout.addLayout(self.searchHbox, 0)
        self.centralWidget.setLayout(self.mainLayout)

        # Make sure screen geometry is big enough. Otherwise set window to maximized.
        gSize = QApplication.desktop().availableGeometry()
        if gSize.width() <= 1280 or gSize.height() <= 768:
            logger.info("Screen geometry smaller than 1280x768. Setting window to maximized mode.")
            self.showMaximized()
        else:
            self.resize(1280, 768)
            self.center()

        self.setWindowTitle(f"Game Collection Manager v{_VERSION}")
        self.statusBar().showMessage("")
Ejemplo n.º 10
0
 def updateYears(self, years):
     self._years.clear()
     self._years.addItems(years)
     logger.info("Updated years list.")
Ejemplo n.º 11
0
 def updateGenres(self, genres):
     self._genres.clear()
     self._genres.addItems(genres)
     logger.info("Updated genres list.")
Ejemplo n.º 12
0
 def updateRegions(self, regions):
     self._regions.clear()
     self._regions.addItems(regions)
     logger.info("Updated regions list.")
Ejemplo n.º 13
0
 def updatePlatforms(self, platforms):
     self._platforms.clear()
     self._platforms.addItems(platforms)
     logger.info("Updated platforms list.")
Ejemplo n.º 14
0
def main():
    gcmDir = getScriptDir()
    dbPath = gcmDir + "/data/db/collection.db"
    logger.info(f"Program directory set to {gcmDir}.")

    # Make sure we have everything
    if not os.path.exists(gcmDir + "/data"):
        logger.critical(
            "Data directory doesn't exist. Creating '/data/db' and '/data/vgdb'..."
        )
        os.makedirs(gcmDir + "/data/db")
        os.mkdir(gcmDir + "/data/vgdb")
        createDB(dbPath)
    if not os.path.exists(gcmDir + "/data/db"):
        logger.critical("'/data/db' doest't exist. Creating...")
        os.mkdir(gcmDir + "/data/db")
        createDB(dbPath)
    if not os.path.exists(gcmDir + "/data/vgdb"):
        os.mkdir(gcmDir + "/data/vgdb")
    if not os.path.exists(dbPath):
        logger.critical("No database was found.")
        createDB(dbPath)

    createWindow(dbPath)


if __name__ == "__main__":
    logger.info("Starting up...")
    main()