Пример #1
0
def restoredb(config, restorefs):
    if not os.path.exists(restorefs):
        print 'backup file not exists'
    else:
        engine = get_engine(config)
        db = engine.connect()
        metadata = models.get_metadata(engine)
        with gzip.open(restorefs, 'rb') as rfs:
            for line in rfs:
                try:
                    obj = json.loads(line)
                    print "delete from %s" % obj['table']
                    db.execute("delete from %s" % obj['table'])
                    print 'insert datas into %s' % obj['table']
                    objs = obj['data']
                    if len(objs) < 500:
                        if objs:
                            db.execute(metadata.tables[
                                obj['table']].insert().values(objs))
                    else:
                        while len(objs) > 0:
                            _tmp_pbjs = objs[:500]
                            objs = objs[500:]
                            db.execute(metadata.tables[
                                obj['table']].insert().values(_tmp_pbjs))

                    # db.execute("commit;")
                except:
                    print 'error data %s ...' % line[:128]
                    import traceback
                    traceback.print_exc()
        db.close()
Пример #2
0
def restoredb(config,restorefs):
    if not os.path.exists(restorefs):
        print 'backup file not exists'
    else:
        engine = get_engine(config)
        db = engine.connect()
        metadata = models.get_metadata(engine)
        with gzip.open(restorefs,'rb') as rfs:
            for line in rfs:
                try:
                    obj = json.loads(line)
                    print "delete from %s"%obj['table']
                    db.execute("delete from %s"%obj['table'])
                    print 'insert datas into %s'%obj['table']
                    objs =  obj['data']
                    if len(objs) < 500:
                        if objs:db.execute(metadata.tables[obj['table'] ].insert().values(objs))
                    else:
                        while len(objs) > 0:
                            _tmp_pbjs = objs[:500]
                            objs = objs[500:]
                            db.execute(metadata.tables[obj['table'] ].insert().values(_tmp_pbjs))
                        
                    # db.execute("commit;")
                except:
                    print 'error data %s ...'%line[:128] 
                    import traceback
                    traceback.print_exc()
        db.close()
Пример #3
0
def update(db_engine):
    print 'starting update database...'
    metadata = models.get_metadata(db_engine)
    metadata.drop_all(db_engine)
    metadata.create_all(db_engine)
    print 'update database done'
    db = scoped_session(sessionmaker(bind=db_engine, autocommit=False, autoflush=True))()
    init_db(db)
Пример #4
0
def update(db_engine):
    print 'starting update database...'
    metadata = models.get_metadata(db_engine)
    metadata.drop_all(db_engine)
    metadata.create_all(db_engine)
    print 'update database done'
    db = scoped_session(
        sessionmaker(bind=db_engine, autocommit=False, autoflush=True))()
    init_db(db)
Пример #5
0
 def init_db_engine(self):
     if not self.db_engine:
         self.db_engine = get_engine(self.config)
     metadata = models.get_metadata(self.db_engine)
     self.sqla_pg = sqla_plugin.Plugin(self.db_engine,
                                       metadata,
                                       keyword='db',
                                       create=False,
                                       commit=False,
                                       use_kwargs=False)
Пример #6
0
def run_gensql(config):
    from sqlalchemy import create_engine

    def _e(sql, *multiparams, **params):
        print (sql)

    engine = create_engine(config.get("database", "dburl"), strategy="mock", executor=_e)
    from toughradius.console import models

    metadata = models.get_metadata(engine)
    metadata.create_all(engine)
Пример #7
0
 def init_db_engine(self):
     if not self.db_engine:
         self.db_engine = get_engine(self.config)
     metadata = models.get_metadata(self.db_engine)
     self.sqla_pg = sqla_plugin.Plugin(
         self.db_engine,
         metadata,
         keyword='db',
         create=False,
         commit=False,
         use_kwargs=False
     )
Пример #8
0
def run_gensql(config):
    from sqlalchemy import create_engine

    def _e(sql, *multiparams, **params):
        print(sql)

    engine = create_engine(config.get('database', "dburl"),
                           strategy='mock',
                           executor=_e)
    from toughradius.console import models
    metadata = models.get_metadata(engine)
    metadata.create_all(engine)
Пример #9
0
def restoredb(config,restorefs):
    if not os.path.exists(restorefs):
        print 'backup file not exists'
    else:
        engine = get_engine(config)
        db = engine.connect()
        metadata = models.get_metadata(engine)
        with gzip.open(restorefs,'rb') as rfs:
            cache_datas = {}
            for line in rfs:
                try:
                    tabname, rdata = json.loads(line)

                    if tabname == 'table_names' and rdata:
                        for table_name in rdata:
                            print "clean table %s" % table_name
                            db.execute("delete from %s;" % table_name)
                        continue

                    if tabname not in cache_datas:
                        cache_datas[tabname] = [rdata]
                    else:
                        cache_datas[tabname].append(rdata)

                    if tabname in cache_datas and len(cache_datas[tabname]) >= 500:
                        print 'insert datas<%s> into %s' % (len(cache_datas[tabname]), tabname)
                        db.execute(metadata.tables[tabname].insert().values(cache_datas[tabname]))
                        del cache_datas[tabname]

                except:
                    print 'error data %s ...'% line
                    import traceback
                    traceback.print_exc()

            print "insert last data"
            for tname, tdata in cache_datas.iteritems():
                try:
                    print 'insert datas<%s> into %s' % (len(tdata), tname)
                    db.execute(metadata.tables[tname].insert().values(tdata))
                except:
                    print 'error data %s ...' % tdata
                    import traceback
                    traceback.print_exc()

            cache_datas.clear()

        db.close()
Пример #10
0
def dumpdb(config,dumpfs):
    _dir = os.path.split(dumpfs)[0]
    if not os.path.exists(_dir):
        os.makedirs(_dir)

    engine = get_engine(config)
    db = engine.connect()
    metadata = models.get_metadata(engine)
    with gzip.open(dumpfs,'wb') as dumpfs:
        for _name,_table in metadata.tables.items():
            if _name in excludes:
                continue
            rows = db.execute(select([_table])).fetchall()
            obj = dict(table=_name,data=[dict(r.items()) for r in rows])
            dumpfs.write(json.dumps(obj,ensure_ascii=False).encode('utf-8'))
            dumpfs.write('\n')
    db.close()
Пример #11
0
def dumpdb(config, dumpfs):
    _dir = os.path.split(dumpfs)[0]
    if not os.path.exists(_dir):
        os.makedirs(_dir)

    engine = get_engine(config)
    db = engine.connect()
    metadata = models.get_metadata(engine)
    with gzip.open(dumpfs, 'wb') as dumpfs:
        for _name, _table in metadata.tables.items():
            if _name in excludes:
                continue
            rows = db.execute(select([_table])).fetchall()
            obj = dict(table=_name, data=[dict(r.items()) for r in rows])
            dumpfs.write(json.dumps(obj, ensure_ascii=False).encode('utf-8'))
            dumpfs.write('\n')
    db.close()
Пример #12
0
def dumpdb(config,dumpfs):
    _dir = os.path.split(dumpfs)[0]
    if not os.path.exists(_dir):
        os.makedirs(_dir)

    engine = get_engine(config)
    db = engine.connect()
    metadata = models.get_metadata(engine)
    with gzip.open(dumpfs, 'wb') as dumpfs:

        table_names = [_name for _name, _ in metadata.tables.items()]
        table_headers = ('table_names', table_names)
        dumpfs.write(json.dumps(table_headers, ensure_ascii=False).encode('utf-8'))
        dumpfs.write('\n')

        for _name,_table in metadata.tables.items():
            if _name in excludes:
                continue
            rows = db.execute(select([_table]))
            for rows in rows:
                obj = (_name, dict(rows.items()))
                dumpfs.write(json.dumps(obj,ensure_ascii=False).encode('utf-8'))
                dumpfs.write('\n')
    db.close()