Example #1
0
def validate_dat(file: Path, use_hashes: bool) -> None:
    root = datafile.parse(file, silence=True)
    has_cloneof = False
    lacks_sha1 = False
    offending_entry = ''
    for game in root.game:
        if game.cloneof:
            has_cloneof = True
            break
    for game in root.game:
        for game_rom in game.rom:
            if not game_rom.sha1:
                lacks_sha1 = True
                offending_entry = game.name
                break
    if use_hashes and lacks_sha1:
        sys.exit(
            'ERROR: Cannot use hash information because DAT lacks SHA1 digests '
            'for [%s].' % offending_entry)
    if not has_cloneof:
        print('This DAT *seems* to be a Standard DAT', file=sys.stderr)
        print(
            'A Parent/Clone XML DAT is required to generate a 1G1R ROM set',
            file=sys.stderr)
        if use_hashes:
            print(
                'If you are using this to rename files based on their hashes, '
                'a Standard DAT is enough',
                file=sys.stderr)
        print('Do you want to continue anyway? (y/n)', file=sys.stderr)
        answer = input()
        if answer.strip() not in ('y', 'Y'):
            sys.exit()
Example #2
0
def index_files(
        input_dir: Path,
        dat_file: Path) -> Dict[str, Optional[Path]]:
    result: Dict[str, Optional[Path]] = {}
    also_check_archive: bool = False
    root = datafile.parse(dat_file, silence=True)
    global RULES
    if not RULES:
        RULES = get_header_rules(root)
    for game in root.game:
        for rom_entry in game.rom:
            result[rom_entry.sha1.lower()] = None
            also_check_archive |= bool(ZIP_REGEX.search(rom_entry.name))
    print('Scanning directory: %s\033[K' % input_dir, file=sys.stderr)
    files_data = []
    for full_path in input_dir.rglob('*'):
        if not full_path.is_file():
            continue
        try:
            print(
                '%s%s\033[K' % (
                    FOUND_PREFIX,
                    trim_to(
                        full_path.relative_to(input_dir),
                        available_columns(FOUND_PREFIX) - 2)),
                end='\r',
                file=sys.stderr)
            file_size = full_path.stat().st_size
            files_data.append(FileData(file_size, full_path))
        except OSError as e:
            print(
                'Error while reading file: %s\033[K' % e,
                file=sys.stderr)
    files_data.sort(key=FileData.get_size, reverse=True)
    print('%s%i files\033[K' % (FOUND_PREFIX, len(files_data)), file=sys.stderr)

    if files_data:
        global PROGRESSBAR
        PROGRESSBAR = MultiThreadedProgressBar(
            len(files_data),
            THREADS,
            prefix='Calculating hashes')
        PROGRESSBAR.init()

        def process_thread_with_progress(
                shared_files_data: List[FileData],
                shared_result_data: List[Dict[str, Path]]) -> None:
            curr_thread = current_thread()
            if not isinstance(curr_thread, IndexedThread):
                sys.exit('Bad thread type. Expected %s' % IndexedThread)
            while True:
                try:
                    next_file = shared_files_data.pop(0)
                    PROGRESSBAR.print_thread(
                        curr_thread.index,
                        next_file.path.relative_to(input_dir))
                    shared_result_data.append(process_file(
                        next_file,
                        also_check_archive))
                    PROGRESSBAR.print_bar()
                except IndexError:
                    PROGRESSBAR.print_thread(curr_thread.index, "DONE")
                    break

        threads = []
        intermediate_results = []
        for i in range(0, THREADS):
            t = IndexedThread(
                index=i,
                target=process_thread_with_progress,
                args=[files_data, intermediate_results],
                daemon=True)
            t.start()
            threads.append(t)

        for t in threads:
            t.join()

        print('\n', file=sys.stderr)

        for intermediate_result in intermediate_results:
            for key, value in intermediate_result.items():
                if key in result and not \
                        (result[key] and is_zipfile(result[key])):
                    result[key] = value
    return result
Example #3
0
def parse_games(
        file: Path,
        filter_bios: bool,
        filter_program: bool,
        filter_enhancement_chip: bool,
        filter_pirate: bool,
        filter_promo: bool,
        filter_unlicensed: bool,
        filter_proto: bool,
        filter_beta: bool,
        filter_demo: bool,
        filter_sample: bool,
        exclude: List[Pattern]) -> Dict[str, List[GameEntry]]:
    games = {}
    root = datafile.parse(file, silence=True)
    for input_index in range(0, len(root.game)):
        game = root.game[input_index]
        beta_match = BETA_REGEX.search(game.name)
        demo_match = DEMO_REGEX.search(game.name)
        sample_match = SAMPLE_REGEX.search(game.name)
        proto_match = PROTO_REGEX.search(game.name)
        if filter_bios and BIOS_REGEX.search(game.name):
            continue
        if filter_unlicensed and UNL_REGEX.search(game.name):
            continue
        if filter_pirate and PIRATE_REGEX.search(game.name):
            continue
        if filter_promo and PROMO_REGEX.search(game.name):
            continue
        if filter_program and PROGRAM_REGEX.search(game.name):
            continue
        if filter_enhancement_chip and ENHANCEMENT_CHIP_REGEX.search(game.name):
            continue
        if filter_beta and beta_match:
            continue
        if filter_demo and demo_match:
            continue
        if filter_sample and sample_match:
            continue
        if filter_proto and proto_match:
            continue
        if check_in_pattern_list(game.name, exclude):
            continue
        is_parent = not game.cloneof
        is_bad = bool(BAD_REGEX.search(game.name))
        beta = parse_prerelease(beta_match)
        demo = parse_prerelease(demo_match)
        sample = parse_prerelease(sample_match)
        proto = parse_prerelease(proto_match)
        is_prerelease = bool(
            beta_match
            or demo_match
            or sample_match
            or proto_match)
        revision = parse_revision(game.name)
        version = parse_version(game.name)
        region_data = parse_region_data(game.name)
        for release in game.release:
            if release.region and not is_present(release.region, region_data):
                region_data.append(get_region_data(release.region))
        languages = parse_languages(game.name)
        if not languages:
            languages = get_languages(region_data)
        parent_name = game.cloneof if game.cloneof else game.name
        region_codes = [rd.code for rd in region_data]
        game_entries: List[GameEntry] = []
        for region in region_codes:
            game_entries.append(
                GameEntry(
                    is_bad,
                    is_prerelease,
                    region,
                    languages,
                    input_index,
                    revision,
                    version,
                    sample,
                    demo,
                    beta,
                    proto,
                    is_parent,
                    game.name,
                    game.rom if game.rom else []))
        if game_entries:
            if parent_name not in games:
                games[parent_name] = game_entries
            else:
                games[parent_name].extend(game_entries)
        else:
            log('WARNING [%s]: no recognizable regions found' % game.name)
        if not game.rom:
            log('WARNING [%s]: no ROMs found in the DAT file' % game.name)
    return games