def init_tag_database(self, importer=None): """ Create/open .sqlite database of game header tags """ # Import .pgn header tags to .sqlite database sqlite_path = self.path.replace(".pgn", ".sqlite") if os.path.isfile( self.path) and os.path.isfile(sqlite_path) and getmtime( self.path) > getmtime(sqlite_path): metadata.drop_all(self.engine) metadata.create_all(self.engine) ini_schema_version(self.engine) size = self.size if size > 0 and self.tag_database.count == 0: if size > 10000000: drop_indexes(self.engine) if self.progressbar is not None: from gi.repository import GLib GLib.idle_add(self.progressbar.set_text, _("Importing game headers...")) if importer is None: importer = PgnImport(self) importer.initialize() importer.do_import(self.path, progressbar=self.progressbar) if size > 10000000 and not importer.cancel: create_indexes(self.engine) return importer
def init_tag_database(self, importer=None): """ Create/open .sqlite database of game header tags """ # Import .pgn header tags to .sqlite database sqlite_path = self.path.replace(".pgn", ".sqlite") if os.path.isfile(self.path) and os.path.isfile(sqlite_path) and getmtime(self.path) > getmtime(sqlite_path): metadata.drop_all(self.engine) metadata.create_all(self.engine) ini_schema_version(self.engine) size = self.size if size > 0 and self.tag_database.count == 0: if size > 10000000: drop_indexes(self.engine) if self.progressbar is not None: GLib.idle_add(self.progressbar.set_text, _("Importing game headers...")) if importer is None: importer = PgnImport(self) importer.initialize() importer.do_import(self.path, progressbar=self.progressbar) if size > 10000000 and not importer.cancel: create_indexes(self.engine) return importer
game.c.event_id==a1.c.id, game.c.site_id==a2.c.id, game.c.white_id==a3.c.id, game.c.black_id==a4.c.id)).where(and_(a3.c.name.startswith(unicode("Réti")), a4.c.name.startswith(unicode("Van Nüss")))) result = self.conn.execute(s) games = result.fetchall() for g in games: print("%s %s %s %s %s %s %s %s %s %s %s %s" % (g['id'], g['event'], g['site'], g['white'], g['black'], g[5], g[6], g[7], g['eco'], reprResult[g['result']], g['white_elo'], g['black_elo'])) if __name__ == "__main__": if 1: metadata.drop_all(engine) metadata.create_all(engine) imp = PgnImport() from .timer import Timer if len(sys.argv) > 1: arg = sys.argv[1] with Timer() as t: if arg[-4:].lower() in (".pgn", ".zip"): if os.path.isfile(arg): imp.do_import(arg) elif os.path.exists(arg): for file in sorted(os.listdir(arg)): if file[-4:].lower() in (".pgn", ".zip"): imp.do_import(os.path.join(arg, file)) print("Elapsed time (secs): %s" % t.elapsed_secs)
and_(a3.c.name.startswith(unicode("Réti")), a4.c.name.startswith(unicode("Van Nüss")))) result = self.conn.execute(s) games = result.fetchall() for g in games: print("%s %s %s %s %s %s %s %s %s %s %s %s" % (g['id'], g['event'], g['site'], g['white'], g['black'], g[5], g[6], g[7], g['eco'], reprResult[g['result']], g['white_elo'], g['black_elo'])) if __name__ == "__main__": if 1: metadata.drop_all(engine) metadata.create_all(engine) imp = PgnImport() from .timer import Timer if len(sys.argv) > 1: arg = sys.argv[1] with Timer() as t: if arg[-4:].lower() in (".pgn", ".zip"): if os.path.isfile(arg): imp.do_import(arg) elif os.path.exists(arg): for file in sorted(os.listdir(arg)): if file[-4:].lower() in (".pgn", ".zip"): imp.do_import(os.path.join(arg, file)) print("Elapsed time (secs): %s" % t.elapsed_secs)
def tearDown(self): metadata.drop_all(self.engine) metadata.create_all(self.engine)
def setUp(self): model.set_engine("sqlite://") metadata.create_all(model.engine) self.conn = model.engine.connect()
def tearDown(self): if self.test_db is None: metadata.drop_all(self.engine) metadata.create_all(self.engine)