Ejemplo n.º 1
0
def migrate_db(ctx):
    from_engine = create_engine(ctx.current_db.engine.url)
    to_engine = create_engine(ctx.new_db.engine.url)
    # Create databases
    if not ctx.data_only:
        create_database_if_not_exists(ctx, to_engine)

    tables = [table for name, table in models.all_tables()]

    if not ctx.data_only:
        # Create
        tables = list(sync_schema(ctx, tables, from_engine, to_engine))
        alembic_sync_schema(ctx, from_engine, to_engine, tables=tables)

    if not ctx.schema_only:
        new_tables = []
        for table in tables:
            new_tables.append(reflect_table(to_engine, table.name))

        tables_to_empty = (
            'admission_rules',
        )

        for table in new_tables:
            with to_engine.engine.connect() as to_conn:
                if table.name in tables_to_empty:
                    delete_from_table(ctx, table, to_conn)

        sync_tables(ctx, new_tables, ctx.current_db, ctx.new_db)

        fix_sequences(ctx, to_engine, new_tables)
Ejemplo n.º 2
0
def test_simple_models():
    expected_models = [
        'Accounting', 'AdmissionRule', 'AssignedResource', 'Challenge',
        'EventLog', 'EventLogHostname', 'File', 'FragJob',
        'GanttJobsPrediction', 'GanttJobsPredictionsLog',
        'GanttJobsPredictionsVisu', 'GanttJobsResource',
        'GanttJobsResourcesLog', 'GanttJobsResourcesVisu', 'Job',
        'JobDependencie', 'JobResourceDescription', 'JobResourceGroup',
        'JobStateLog', 'JobType', 'MoldableJobDescription', 'Queue',
        'Resource', 'ResourceLog', 'Scheduler'
    ]
    assert set(list(db.models.keys())) == set(expected_models)
    assert set(list(dict(all_models()).keys())) == set(expected_models)

    # len(tables) = len(Models) + table schema
    assert len(dict(all_tables()).keys()) == len(expected_models) + 1
Ejemplo n.º 3
0
def archive_db(ctx):
    from_engine = create_engine(ctx.current_db.engine.url)
    to_engine = create_engine(ctx.archive_db.engine.url)

    ignored_tables = [
        'accounting',
        'gantt_jobs_predictions',
        'gantt_jobs_predictions_log',
        'gantt_jobs_predictions_visu',
        'gantt_jobs_resources',
        'gantt_jobs_resources_log',
        'gantt_jobs_resources_visu',
    ]
    if (is_local_database(from_engine, to_engine)
       and from_engine.dialect.name in ("postgresql", "mysql")):
        clone_db(ctx, ignored_tables=ignored_tables)
        tables = list(sync_schema(ctx, None, from_engine, to_engine))
        # copy data
        sync_tables(ctx, sorted(tables, key=lambda x: x.name),
                    ctx.current_db, ctx.archive_db, delete=True,
                    ignored_tables=ignored_tables)
    else:
        # Create databases
        create_database_if_not_exists(ctx, to_engine)

        if from_engine.dialect.name != to_engine.dialect.name:
            # Collect all tables from our models
            tables = [table for name, table in models.all_tables()]
            # Create missing tables
            tables = list(sync_schema(ctx, tables, from_engine, to_engine))
            # Upgrade schema
            alembic_sync_schema(ctx, from_engine, to_engine, tables=tables)
        else:
            tables = None
            # Create missing tables
            tables = list(sync_schema(ctx, tables, from_engine, to_engine))

        tables = sorted(tables, key=lambda x: x.name)

        sync_tables(ctx, sorted(tables, key=lambda x: x.name),
                    ctx.current_db, ctx.archive_db, delete=True,
                    ignored_tables=ignored_tables)
    fix_sequences(ctx, to_engine, tables)