예제 #1
0
    def test_parse_filename(self):
        self.assertEqual(
            util.parse_filename(
                'MP Replay v101.101.34793.0 @2020.02.13 213505 (1).aoe2record'
            ), (datetime.datetime(2020, 2, 13, 21, 35, 5), '101.101.34793.0'))

        self.assertEqual(
            util.parse_filename('rec.20190615-112706-anything.mgz'),
            (datetime.datetime(2019, 6, 15, 11, 27, 6), None))

        self.assertEqual(
            util.parse_filename(
                'recorded game -  29-Mar-2001 00`35`51 3v3 iketh vs woogy.mgx'
            ), (datetime.datetime(2001, 3, 29, 0, 35, 51), None))
예제 #2
0
 def add_zip(self, platform_id, zip_path):
     """Add matches via zip file."""
     with zipfile.ZipFile(zip_path) as series_zip:
         LOGGER.info("[%s] opened archive", os.path.basename(zip_path))
         for zi in series_zip.infolist():
             series_zip.extract(zi, path=self.temp_dir.name)
             date_time = time.mktime(zi.date_time + (0, 0, -1))
             os.utime(os.path.join(self.temp_dir.name, zi.filename),
                      (date_time, date_time))
         for filename in sorted(series_zip.namelist()):
             if filename.endswith('/'):
                 continue
             if not filename.endswith('.mgz'):
                 continue
             LOGGER.info("[%s] processing member %s",
                         os.path.basename(zip_path), filename)
             played, _ = parse_filename(os.path.basename(filename))
             if not played:
                 played = datetime.fromtimestamp(
                     os.path.getmtime(
                         os.path.join(self.temp_dir.name, filename)))
             self.add_file(os.path.join(self.temp_dir.name, filename),
                           os.path.basename(zip_path),
                           platform_id=platform_id,
                           played=played)
         LOGGER.info("[%s] finished", os.path.basename(zip_path))
예제 #3
0
    def test_parse_filename(self):
        self.assertEqual(
            util.parse_filename(
                'MP Replay v101.101.34793.0 @2020.02.13 213505 (1).aoe2record'
            ), (datetime.datetime(2020, 2, 13, 21, 35, 5), '101.101.34793.0'))

        self.assertEqual(
            util.parse_filename('rec.20190615-112706-anything.mgz'),
            (datetime.datetime(2019, 6, 15, 11, 27, 6), None))

        self.assertEqual(
            util.parse_filename(
                'recorded game -  29-Mar-2001 00`35`51 3v3 iketh vs woogy.mgx'
            ), (datetime.datetime(2001, 3, 29, 0, 35, 51), None))

        self.assertEqual(
            util.parse_filename('partida-grabada-21-sep-2010-22-19-44.mgx'),
            (datetime.datetime(2010, 9, 21, 22, 19, 44), None))

        self.assertEqual(
            util.parse_filename(
                'MP Replay v5.8 @2019.06.16 221513 (4).aoe2record'),
            (datetime.datetime(2019, 6, 16, 22, 15, 13), '5.8'))
예제 #4
0
 def add_series(self, archive_path, series=None, series_id=None):
     """Add a series via zip file."""
     try:
         if archive_path.endswith('zip'):
             compressed = zipfile.ZipFile(archive_path)
         elif archive_path.endswith('rar'):
             compressed = rarfile.RarFile(archive_path)
         else:
             LOGGER.error("[%s] not a valid archive",
                          os.path.basename(archive_path))
             return
     except zipfile.BadZipFile:
         LOGGER.error("[%s] bad zip file", os.path.basename(archive_path))
         return
     with compressed as series_zip:
         LOGGER.info("[%s] opened archive", os.path.basename(archive_path))
         for zip_member in series_zip.infolist():
             series_zip.extract(zip_member, path=self.temp_dir.name)
             date_time = time.mktime(zip_member.date_time + (0, 0, -1))
             os.utime(os.path.join(self.temp_dir.name, zip_member.filename),
                      (date_time, date_time))
         for filename in sorted(series_zip.namelist()):
             if filename.endswith('/'):
                 continue
             LOGGER.info("[%s] processing member %s",
                         os.path.basename(archive_path), filename)
             played, _ = parse_filename(os.path.basename(filename))
             if not played:
                 played = datetime.fromtimestamp(
                     os.path.getmtime(
                         os.path.join(self.temp_dir.name, filename)))
             self.add_file(os.path.join(self.temp_dir.name, filename),
                           os.path.basename(archive_path),
                           series,
                           series_id,
                           played=played)
         LOGGER.info("[%s] finished", os.path.basename(archive_path))
예제 #5
0
 def add_zip(self, platform_id, archive_path):
     """Add matches via zip file."""
     guess = platform_id == 'auto'
     try:
         if archive_path.endswith('zip'):
             compressed = zipfile.ZipFile(archive_path)
         elif archive_path.endswith('rar'):
             compressed = rarfile.RarFile(archive_path)
         else:
             LOGGER.error("[%s] not a valid archive",
                          os.path.basename(archive_path))
             return
     except zipfile.BadZipFile:
         LOGGER.error("[%s] bad zip file", os.path.basename(archive_path))
         return
     with compressed as series_zip:
         LOGGER.info("[%s] opened archive", os.path.basename(archive_path))
         for compressed_file in compressed.infolist():
             try:
                 series_zip.extract(compressed_file,
                                    path=self.temp_dir.name)
                 date_time = time.mktime(compressed_file.date_time +
                                         (0, 0, -1))
                 os.utime(
                     os.path.join(self.temp_dir.name,
                                  compressed_file.filename),
                     (date_time, date_time))
             except (zipfile.BadZipFile, rarfile.Error):
                 LOGGER.error("Failed to extract file")
                 return
         for filename in sorted(compressed.namelist()):
             if filename.endswith('/'):
                 continue
             if not (filename.endswith('.mgz') or filename.endswith('.mgx')
                     or filename.endswith('.mgl')
                     or filename.endswith('.aoe2record')):
                 continue
             LOGGER.info("[%s] processing member %s",
                         os.path.basename(archive_path), filename)
             played, _ = parse_filename(os.path.basename(filename))
             if not played:
                 played = datetime.fromtimestamp(
                     os.path.getmtime(
                         os.path.join(self.temp_dir.name, filename)))
             if guess and played:
                 if played >= datetime(2009, 9, 17):
                     platform_id = 'voobly'
                 elif played < datetime(2009, 9, 17) and played >= datetime(
                         2007, 9, 20):
                     platform_id = 'igz'
                 elif played < datetime(2007, 9, 20) and played >= datetime(
                         2006, 8, 1):
                     platform_id = 'gamepark'
                 elif played < datetime(2006, 8, 1):
                     platform_id = 'zone'
                 else:
                     platform_id = None
             elif guess and not played:
                 platform_id = None
             self.add_file(os.path.join(self.temp_dir.name, filename),
                           os.path.basename(archive_path),
                           platform_id=platform_id,
                           played=played)
         LOGGER.info("[%s] finished", os.path.basename(archive_path))
예제 #6
0
    def add_file(  # pylint: disable=too-many-return-statements, too-many-branches
            self,
            rec_path,
            reference,
            series_name=None,
            series_id=None,
            platform_id=None,
            platform_match_id=None,
            platform_metadata=None,
            played=None,
            ladder=None,
            user_data=None):
        """Add a single mgz file."""
        start = time.time()
        if not os.path.isfile(rec_path):
            LOGGER.error("%s is not a file", rec_path)
            return False, 'Not a file'

        original_filename = os.path.basename(rec_path)
        modified = datetime.fromtimestamp(os.path.getmtime(rec_path))

        with open(rec_path, 'rb') as handle:
            data = handle.read()

        try:
            handle = io.BytesIO(data)
            playback = self.playback
            if rec_path.endswith('aoe2record') and os.path.exists(
                    rec_path.replace('.aoe2record', '.json')):
                playback = open(rec_path.replace('.aoe2record', '.json'))
            summary = mgz.summary.Summary(handle, playback)
            # Hash against body only because header can vary based on compression
            file_hash = summary.get_file_hash()
            log_id = file_hash[:LOG_ID_LENGTH]
            LOGGER.info("[f:%s] add started", log_id)
        except RuntimeError as error:
            LOGGER.error("[f] invalid mgz file: %s", str(error))
            return False, 'Invalid mgz file'
        except LookupError:
            LOGGER.error("[f] unknown encoding")
            return False, 'Unknown text encoding'
        except ValueError as error:
            LOGGER.error("[f] error: %s", error)
            return False, error

        existing_match_id = file_exists(self.session, file_hash, series_name,
                                        series_id, modified)
        if existing_match_id:
            LOGGER.warning("[f:%s] file already exists (%d)", log_id,
                           existing_match_id)
            #self._handle_file(file_hash, data, Version(summary.get_version()[0]))
            return None, existing_match_id

        try:
            encoding = summary.get_encoding()
        except ValueError as error:
            LOGGER.error("[f] error: %s", error)
            return False, error
        match_hash_obj = summary.get_hash()
        if not match_hash_obj:
            LOGGER.error("f:%s] not enough data to calculate safe match hash",
                         log_id)
            return False, 'Not enough data to calculate safe match hash'
        match_hash = match_hash_obj.hexdigest()
        build = None

        try:
            if not platform_match_id and summary.get_platform(
            )['platform_match_id']:
                platform_match_id = summary.get_platform()['platform_match_id']
            where = (Match.hash == match_hash)
            if platform_match_id:
                where |= (Match.platform_match_id == platform_match_id)
            match = self.session.query(Match).filter(where).one()
            LOGGER.info("[f:%s] match already exists (%d); appending", log_id,
                        match.id)
            series_match_exists(self.session, match, series_id, series_name)
        except MultipleResultsFound:
            LOGGER.error("[f:%s] mismatched hash and match id: %s, %s", log_id,
                         match_hash, platform_match_id)
            return False, 'Mismatched hash and match id'
        except NoResultFound:
            LOGGER.info("[f:%s] adding match", log_id)
            parsed_played, build = parse_filename(original_filename)
            if not played:
                played = parsed_played
            try:
                match, message = self._add_match(summary, played, match_hash,
                                                 user_data, series_name,
                                                 series_id, platform_id,
                                                 platform_match_id,
                                                 platform_metadata, ladder,
                                                 build)
                if not match:
                    return False, message
                self._update_match_users(platform_id, match.id, user_data)
                self._update_match_hd(match)
                self._update_match_dashboard(match, user_data)
            except IntegrityError:
                LOGGER.error(
                    "[f:%s] constraint violation: could not add match", log_id)
                return False, 'Failed to add match'
            except MemoryError:
                LOGGER.error("[f:%s] compression failed", log_id)
                return False, 'Compression failed'

        try:
            compressed_filename, compressed_size = self._handle_file(
                file_hash, data, Version(match.version_id))
        except MemoryError:
            LOGGER.error("[f:%s] compression failed", log_id)
            return False, 'Compression failed'

        try:
            new_file = get_unique(
                self.session,
                File, ['hash'],
                filename=compressed_filename,
                original_filename=original_filename,
                hash=file_hash,
                size=summary.size,
                modified=modified,
                compressed_size=compressed_size,
                encoding=encoding,
                language=summary.get_language(),
                reference=reference,
                match=match,
                owner_number=summary.get_owner(),
                parser_version=pkg_resources.get_distribution('mgz').version)
            self.session.add(new_file)
            self.session.commit()
        except RuntimeError:
            LOGGER.error("[f:%s] unable to add file, likely hash collision",
                         log_id)
            return False, 'File hash collision'

        LOGGER.info(
            "[f:%s] add finished in %.2f seconds, file id: %d, match id: %d",
            log_id,
            time.time() - start, new_file.id, match.id)
        return file_hash, match.id