示例#1
0
from simpleflow.types import failure
import eventlet
eventlet.monkey_patch()

failure.TRACEBACK = True

dst = {
    'host': '172.20.0.3',
    'port': 3304,
    'schema': 'simpleflow',
    'user': '******',
    'passwd': '111111'
}
from simpleservice.ormdb.argformater import connformater
sql_connection = connformater % dst
session = build_session(sql_connection)

connection = Connection(session)


class MysqlDumper(task.Task):
    def execute(self, server_id):
        if server_id % 2 != 0:
            raise Exception('server id %d error' % server_id)
        print 'success', server_id

    def revert(self, *args, **kwargs):
        print 'revert', args, kwargs


servers = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
示例#2
0
def merge_entitys(appendpoint, uuid, entity, databases):
    datadb = databases[common.DATADB]
    mergepath = 'merge-%s' % uuid
    mergeroot = os.path.join(appendpoint.endpoint_backup, mergepath)
    stepsfile = os.path.join(mergeroot, 'steps.dat')
    initfile = os.path.join(mergeroot, 'init.sql')
    if not os.path.exists(stepsfile):
        raise exceptions.MergeException('Steps file not exist')
    with open(stepsfile, 'rb') as f:
        data = cPickle.load(f)
        steps = data['steps']
    prepares = []
    for _entity, step in six.iteritems(steps):
        # 一些post sql执行错误对整体无影响情况下
        # 可以直接讲step改为FINISHED避免重复合服步骤
        if step == FINISHED:
            for _step in six.itervalues(steps):
                if _step != FINISHED:
                    raise exceptions.MergeException('Steps is finish?')
            appendpoint.client.finish_merge(uuid)
            appendpoint.flush_config(entity,
                                     databases,
                                     opentime=data['opentime'],
                                     chiefs=data['chiefs'])
            return
        if step != INSERT:
            prepares.append(_entity)
    mini_entity = min(prepares)
    if prepares:
        name = 'prepare-merge-at-%d' % int(time.time())
        book = LogBook(name=name)
        store = dict(timeout=5,
                     dtimeout=600,
                     mergeroot=mergeroot,
                     entity=entity)
        taskflow_session = build_session(
            'sqlite:///%s' % os.path.join(mergeroot, '%s.db' % name))
        connection = Connection(taskflow_session)

        prepare_uflow = uf.Flow(name)
        for _entity in prepares:
            entity_flow = lf.Flow('prepare-%d' % _entity)
            entity_flow.add(Swallow(uuid, steps, _entity, appendpoint))
            entity_flow.add(
                DumpData(uuid, steps, _entity, appendpoint,
                         _entity != mini_entity))
            entity_flow.add(Swallowed(uuid, steps, _entity, appendpoint))
            prepare_uflow.add(entity_flow)
        engine = load(connection,
                      prepare_uflow,
                      store=store,
                      book=book,
                      engine_cls=ParallelActionEngine,
                      max_workers=4)
        try:
            engine.run()
        except Exception as e:
            if LOG.isEnabledFor(logging.DEBUG):
                LOG.exception('Prepare merge task execute fail')
            raise exceptions.MergeException(
                'Prepare merge task execute fail, %s %s' %
                (e.__class__.__name__, str(e)))
        finally:
            connection.session = None
            taskflow_session.close()
            with open(stepsfile, 'wb') as f:
                cPickle.dump(data, f)

    for _entity, step in six.iteritems(steps):
        if step != INSERT:
            raise exceptions.MergeException('Some step not on %s' % INSERT)
        if not os.path.exists(os.path.join(mergeroot, sqlfile(_entity))):
            raise exceptions.MergeException('Entity %d sql file not exist' %
                                            _entity)

    if not os.path.exists(initfile):
        LOG.error('Init database file not exist')
        raise exceptions.MergeException('Init database file not exist')
    LOG.info('Prepare merge success, try merge database')

    now = int(time.time())
    name = 'merge-at-%d' % now
    book = LogBook(name=name)
    store = dict(timeout=1800, root=mergeroot, database=datadb, timeline=now)
    taskflow_session = build_session('sqlite:///%s' %
                                     os.path.join(mergeroot, '%s.db' % name))
    connection = Connection(taskflow_session)

    merge_flow = lf.Flow('merge-to')
    merge_flow.add(SafeCleanDb())
    merge_flow.add(InitDb())
    insert_lflow = lf.Flow('insert-db')
    stoper = [0]
    for _entity in steps:
        insert_lflow.add(InserDb(_entity, stoper))
    merge_flow.add(insert_lflow)
    merge_flow.add(PostDo(uuid, appendpoint))

    engine = load(connection,
                  merge_flow,
                  store=store,
                  book=book,
                  engine_cls=ParallelActionEngine,
                  max_workers=4)
    try:
        engine.run()
    except Exception as e:
        if LOG.isEnabledFor(logging.DEBUG):
            LOG.exception('Merge database task execute fail')
        raise exceptions.MergeException(
            'Merge database task execute fail, %s %s' %
            (e.__class__.__name__, str(e)))
    else:
        for _entity in steps:
            steps[_entity] = FINISHED
        with open(stepsfile, 'wb') as f:
            cPickle.dump(data, f)
        appendpoint.client.finish_merge(uuid)
        appendpoint.flush_config(entity,
                                 databases,
                                 opentime=data['opentime'],
                                 chiefs=data['chiefs'])
        LOG.info('Merge task %s all finish' % uuid)
    finally:
        connection.session = None
        taskflow_session.close()
示例#3
0
import time
from simpleflow.utils.storage_utils import build_session
from simpleflow.storage.impl import Connection

dst = {
    'host': '172.20.0.3',
    'port': 3304,
    'schema': 'simpleflow',
    'user': '******',
    'passwd': '111111'
}

session = build_session(dst)

conn = Connection(session)
s = time.time()
for book in conn.get_logbooks():
    print book
print time.time() - s
conn.clear_all()
print time.time() - s