def upgrade():
    try:
        op.drop_table("player")
    except sa.exc.OperationalError:
        pass  # Table doesn't exist
    op.create_table(
        'player',
        sa.Column('id', sa.Integer, primary_key=True),
        sa.Column('name', sa.Text),
        sa.Column('goals', sa.Integer, default=0),
        sa.Column('picture', sa.Text, default="images/unknown.jpg")
    )
    player = table('player',
                   column("id", sa.Integer),
                   column("name", sa.Text),
                   column("goals", sa.Integer),
                   column("picture", sa.Text)
                   )
    op.bulk_insert(player,
        [
            {'id': 1,
             'name': 'Tom Manley',
             'goals': 0,
             'picture': "images/tom.manley.jpg"},
            {'id': 2,
             'name': 'Peter Yamashiro',
             'goals': 0,
             'picture': "images/unknown.jpg"},
        ]
    )
def _insert_operation_form_translation():
    tb = table(
        'operation_form_translation',
        column('id', Integer),
        column('locale', String),
        column('name', String)
    )

    columns = [c.name for c in tb.columns]
    data = [
        (102, 'en', 'Execution'),
        (102, 'pt', 'Execução'),

        (103, 'en', 'Execution'),
        (103, 'pt', 'Execução'),

        (104, 'en', 'Execution'),
        (104, 'pt', 'Execução'),

        (105, 'en', 'Execution'),
        (105, 'pt', 'Execução'),

        (106, 'en', 'Execution'),
        (106, 'pt', 'Execução'),

        (107, 'en', 'Execution'),
        (107, 'pt', 'Execução'),

    ]
    rows = [dict(zip(columns, row)) for row in data]

    op.bulk_insert(tb, rows)
def _insert_operation_form_field_translation():
    tb = table(
        'operation_form_field_translation',
        column('id', Integer),
        column('locale', String),
        column('label', String),
        column('help', String), )

    columns = ('id', 'locale', 'label', 'help')
    data = [
		#normalize
		(3078, 'en', 'Attributes', 'Attributes'),
		(3078, 'pt', 'Attributos', 'Colunas para serem consideradas'),
		(3079, 'en', 'Alias', 'Name of the new column'),
		(3079, 'pt', 'Alias', 'Nome para a nova coluna criada'),
		(3080, 'en', 'Normalization Type',   'Type of Normalization to perform.'),
		(3080, 'pt', 'Tipo de Normalização', 'Tipo de Normalização para ser feita.'),

        #feature-indexer'
		(3027,'en','Attributes', 'Attributes (features) to be indexed'),
		(3027,'pt','Atributos', 'Atributos (features) a ser indexados'),
		(3028,'en','Indexer type', 'Indexer type'),
		(3028,'pt','Tipo de indexador', 'Tipo de indexador'),
		(3029,'en','Name for new indexed attribute(s)', 'Name for new indexed attribute(s)'),
		(3029,'pt','Nome para novo(s) atributo(s) indexado(s)', 'Nome para novo(s) atributo(s) indexado(s)'),
	]
    rows = [dict(zip(columns, row)) for row in data]
    op.bulk_insert(tb, rows)
def upgrade():
    ### commands auto generated by Alembic - please adjust! ###
    roles = op.create_table('roles',
    sa.Column('id', sa.Integer(), nullable=False),
    sa.Column('name', sa.String(length=80), nullable=False),
    sa.PrimaryKeyConstraint('id'),
    sa.UniqueConstraint('name')
    )
    op.create_table('users',
    sa.Column('id', sa.Integer(), nullable=False),
    sa.Column('email', sa.String(length=80), nullable=False),
    sa.Column('created_at', sa.DateTime(), nullable=False),
    sa.Column('first_name', sa.String(length=30), nullable=True),
    sa.Column('last_name', sa.String(length=30), nullable=True),
    sa.Column('active', sa.Boolean(), nullable=True),
    sa.Column('role_id', sa.Integer(), nullable=True),
    sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ondelete='SET NULL'),
    sa.PrimaryKeyConstraint('id'),
    sa.UniqueConstraint('email')
    )
    op.create_unique_constraint('company_company_name_key', 'company', ['company_name'])
    op.bulk_insert(roles, [
        { 'id': 1, 'name': 'superadmin' },
        { 'id': 2, 'name': 'admin' },
        { 'id': 3, 'name': 'staff' }
    ])
def _insert_operation_operation_form():
    tb = table(
        'operation_operation_form',
        column('operation_id', Integer),
        column('operation_form_id', Integer))

    columns = [c.name for c in tb.columns]
    data = [
        [REGRESSION_MODEL, 102],
        [ISOTONIC_REGRESSION, 103],
        [AFT_SURVIVAL_REGRESSION, 104],
        [GBT_REGRESSOR, 105],
        [RANDOM_FOREST_REGRESSOR, 106],
        [GENERALIZED_LINEAR_REGRESSOR, 107],

        [REGRESSION_MODEL, 41],
        [ISOTONIC_REGRESSION, 41],
        [AFT_SURVIVAL_REGRESSION, 41],
        [GBT_REGRESSOR, 41],
        [RANDOM_FOREST_REGRESSOR, 41],
        [GENERALIZED_LINEAR_REGRESSOR, 41],
    ]
    rows = [dict(zip(columns, row)) for row in data]

    op.bulk_insert(tb, rows)
def _insert_operation_operation_form():
    tb = table(
        'operation_operation_form',
        column('operation_id', Integer),
        column('operation_form_id', Integer))

    columns = ('operation_id', 'operation_form_id')
    data = [
        (3022, 39),#normalize
		(3022, 40),
		(3022, 41),
		(3022, 43),
		(3022, 110),
		(3022, 3022),

		(3012, 39),#feature-indexer'
		(3012, 40),
		(3012, 41),
		(3012, 43),
		(3012, 110),
		(3012,3012),

    ]

    rows = [dict(zip(columns, row)) for row in data]
    op.bulk_insert(tb, rows)
def _insert_operation_form_field():
    tb = table(
        'operation_form_field',
        column('id', Integer),
        column('name', String),
        column('type', String),
        column('required', Integer),
        column('order', Integer),
        column('default', Text),
        column('suggested_widget', String),
        column('values_url', String),
        column('values', String),
        column('scope', String),
        column('form_id', Integer), )

    columns = ('id', 'name', 'type', 'required', 'order', 'default',
               'suggested_widget', 'values_url', 'values', 'scope', 'form_id')
    data = [
		#normalize
		(3078, 'attributes', 'TEXT',1, 1, None, 'attribute-selector', None, None, 'EXECUTION', 3022),
        (3079, 'alias', 'TEXT', 	1, 2, None, 'attribute-selector', None, None, 'EXECUTION', 3022),
		(3080, 'mode',  'TEXT', 	1, 3, 'range', 'dropdown', None,
		'[{\"key\": \"range\", \"value\": \"Range Normalization\"},\r\n  '
        ' {\"key\": \"standard\", \"value\": \"Standard Score Normalization\"}\r\n  ]', 'EXECUTION', 3022),
		#feature-indexer'
		(3027, 'attributes', 'TEXT', 	1, 1, None, 'attribute-selector', None, None, 'EXECUTION', 3012),
		(3028, 'indexer_type', 'TEXT', 	1, 3, None, 'dropdown', None,'[\r\n  {"key": "string", "value": "String"}\r\n]', 'EXECUTION', 3012),
		(3029, 'alias', 'TEXT', 		1, 2, None, 'text', None, None, 'EXECUTION', 3012),
    ]
    rows = [dict(zip(columns, row)) for row in data]
    op.bulk_insert(tb, rows)
def _insert_operation_form_translation():
    tb = table(
        'operation_form_translation',
        column('id', Integer),
        column('locale', String),
        column('name', String))

    columns = ('id', 'locale', 'name')
    data = [
		(3024, 'en', 'Execution'),
		(3024, 'pt', 'Execução'),
        (3025, 'en', 'Execution'),
		(3025, 'pt', 'Execução'),
        (3026, 'en', 'Execution'),
		(3026, 'pt', 'Execução'),
        (3027, 'en', 'Execution'),
		(3027, 'pt', 'Execução'),
        (3028, 'en', 'Execution'),
		(3028, 'pt', 'Execução'),
        (3029, 'en', 'Execution'),
		(3029, 'pt', 'Execução'),
        (3030, 'en', 'Execution'),
		(3030, 'pt', 'Execução'),
        (3031, 'en', 'Execution'),
		(3031, 'pt', 'Execução'),
    ]
    rows = [dict(zip(columns, row)) for row in data]

    op.bulk_insert(tb, rows)
def upgrade():
    # Insert G-Cloud 9 lot records

    lot_table = table(
        'lots',
        column('name', sa.String),
        column('slug', sa.String),
        column('one_service_limit', sa.Boolean),
        column('data', sa.JSON)
    )

    op.bulk_insert(lot_table, [
        {
            'name': 'Cloud hosting', 'slug': 'cloud-hosting', 'one_service_limit': False,
            'data': {"unitSingular": "service", "unitPlural": "services"}
        },
        {
            'name': 'Cloud software', 'slug': 'cloud-software', 'one_service_limit': False,
            'data': {"unitSingular": "service", "unitPlural": "services"}
        },
        {
            'name': 'Cloud support', 'slug': 'cloud-support', 'one_service_limit': False,
            'data': {"unitSingular": "service", "unitPlural": "services"}
        },
    ])
def upgrade():
 conn = op.get_bind()
 res = conn.execute(problemtable.select().where(problemtable.c.identifier == 'NEXUS 00.526.6'))
 if len(res.fetchall()) == 0:
  op.bulk_insert(problemtable,[
{"userId":-1,'identifier':'NEXUS.00.526.6',"a0" : 0, "a1":0, "a2" : 2, "a3":0, "a4":1, "a5" : 0, "a6" : 0, "a7" : 0, "a8":0, "a9":0, "a10":0, 'a11':1,"entry1":0,"entry2":1,"entry3":0,"entry4":1,"output1":0,"output2":0,"output3":1,"output4":0,"name":'Sequence merger'},
{"userId":-1,'identifier':'NEXUS.01.874.8',"a0" : 0, "a1":0, "a2" : 0, "a3":2, "a4":0, "a5" : 0, "a6" : 0, "a7" : 0, "a8":0, "a9":0, "a10":0, 'a11':2,"entry1":0,"entry2":1,"entry3":0,"entry4":0,"output1":0,"output2":1,"output3":0,"output4":0,"name":'Integer series calculator'},
{"userId":-1,'identifier':'NEXUS.02.981.2',"a0" : 0, "a1":0, "a2" : 0, "a3":2, "a4":0, "a5" : 0, "a6" : 0, "a7" : 0, "a8":0, "a9":0, "a10":0, 'a11':0,"entry1":1,"entry2":1,"entry3":1,"entry4":0,"output1":0,"output2":1,"output3":0,"output4":0,"name":'Sequence range limiter'},
{"userId":-1,'identifier':'NEXUS.03.176.9',"a0" : 2, "a1":0, "a2" : 0, "a3":2, "a4":0, "a5" : 0, "a6" : 0, "a7" : 0, "a8":0, "a9":0, "a10":0, 'a11':0,"entry1":0,"entry2":1,"entry3":1,"entry4":0,"output1":0,"output2":1,"output3":1,"output4":1,"name":'Signal error corrector'},
{"userId":-1,'identifier':'NEXUS.04.340.5',"a0" : 0, "a1":0, "a2" : 0, "a3":0, "a4":0, "a5" : 0, "a6" : 0, "a7" : 0, "a8":2, "a9":0, "a10":0, 'a11':0,"entry1":0,"entry2":1,"entry3":1,"entry4":0,"output1":0,"output2":0,"output3":1,"output4":0,"name":'Subsequence extractor'},
{"userId":-1,'identifier':'NEXUS.05.647.1',"a0" : 0, "a1":2, "a2" : 2, "a3":2, "a4":0, "a5" : 0, "a6" : 0, "a7" : 0, "a8":0, "a9":0, "a10":0, 'a11':0,"entry1":1,"entry2":0,"entry3":0,"entry4":0,"output1":0,"output2":1,"output3":1,"output4":1,"name":'Signal prescaler'},
{"userId":-1,'identifier':'NEXUS.06.786.0',"a0" : 0, "a1":0, "a2" : 0, "a3":0, "a4":2, "a5" : 0, "a6" : 0, "a7" : 0, "a8":0, "a9":0, "a10":0, 'a11':0,"entry1":0,"entry2":1,"entry3":1,"entry4":0,"output1":0,"output2":0,"output3":1,"output4":0,"name":'Signal averager'},
{"userId":-1,'identifier':'NEXUS.07.050.0',"a0" : 0, "a1":0, "a2" : 0, "a3":0, "a4":0, "a5" : 0, "a6" : 0, "a7" : 0, "a8":2, "a9":0, "a10":0, 'a11':0,"entry1":1,"entry2":1,"entry3":1,"entry4":1,"output1":0,"output2":0,"output3":1,"output4":0,"name":'Submaximum selector'},
{"userId":-1,'identifier':'NEXUS.08.633.9',"a0" : 0, "a1":0, "a2" : 0, "a3":2, "a4":0, "a5" : 0, "a6" : 0, "a7" : 0, "a8":0, "a9":0, "a10":0, 'a11':0,"entry1":0,"entry2":1,"entry3":0,"entry4":0,"output1":1,"output2":1,"output3":1,"output4":0,"name":'Decimal decomposer'},
{"userId":-1,'identifier':'NEXUS.09.904.9',"a0" : 1, "a1":0, "a2" : 1, "a3":2, "a4":0, "a5" : 0, "a6" : 0, "a7" : 2, "a8":0, "a9":0, "a10":0, 'a11':2,"entry1":0,"entry2":1,"entry3":0,"entry4":0,"output1":0,"output2":1,"output3":0,"output4":0,"name":'Sequence mode calculator'},
{"userId":-1,'identifier':'NEXUS.10.656.5',"a0" : 0, "a1":0, "a2" : 1, "a3":0, "a4":0, "a5" : 0, "a6" : 0, "a7" : 1, "a8":0, "a9":2, "a10":0, 'a11':0,"entry1":0,"entry2":1,"entry3":0,"entry4":0,"output1":0,"output2":0,"output3":1,"output4":0,"name":'Sequence normalizer'},
{"userId":-1,'identifier':'NEXUS.11.711.2',"a0" : 2, "a1":0, "a2" : 0, "a3":0, "a4":0, "a5" : 0, "a6" : 0, "a7" : 0, "a8":0, "a9":0, "a10":0, 'a11':0,"entry1":0,"entry2":0,"entry3":0,"entry4":0,"output1":0,"output2":0,"output3":1,"output4":0,"name":'Image test pattern 3'},
{"userId":-1,'identifier':'NEXUS.12.534.4',"a0" : 2, "a1":0, "a2" : 0, "a3":0, "a4":0, "a5" : 0, "a6" : 0, "a7" : 0, "a8":0, "a9":0, "a10":0, 'a11':0,"entry1":0,"entry2":0,"entry3":0,"entry4":0,"output1":0,"output2":0,"output3":1,"output4":0,"name":'Image test pattern 4'},
{"userId":-1,'identifier':'NEXUS.13.370.9',"a0" : 2, "a1":0, "a2" : 0, "a3":0, "a4":0, "a5" : 0, "a6" : 0, "a7" : 0, "a8":0, "a9":0, "a10":0, 'a11':0,"entry1":0,"entry2":1,"entry3":0,"entry4":0,"output1":0,"output2":0,"output3":1,"output4":0,"name":'Spatial path viewer'},
{"userId":-1,'identifier':'NEXUS.14.781.3',"a0" : 1, "a1":0, "a2" : 0, "a3":2, "a4":0, "a5" : 0, "a6" : 0, "a7" : 0, "a8":1, "a9":0, "a10":0, 'a11':0,"entry1":0,"entry2":1,"entry3":0,"entry4":0,"output1":0,"output2":0,"output3":1,"output4":0,"name":'Character terminal'},
{"userId":-1,'identifier':'NEXUS.15.897.9',"a0" : 0, "a1":0, "a2" : 0, "a3":0, "a4":0, "a5" : 0, "a6" : 0, "a7" : 0, "a8":2, "a9":0, "a10":0, 'a11':0,"entry1":0,"entry2":1,"entry3":1,"entry4":0,"output1":0,"output2":0,"output3":1,"output4":0,"name":'Back-reference reifier'},
{"userId":-1,'identifier':'NEXUS.16.212.8',"a0" : 0, "a1":0, "a2" : 0, "a3":0, "a4":0, "a5" : 0, "a6" : 0, "a7" : 0, "a8":0, "a9":0, "a10":0, 'a11':2,"entry1":1,"entry2":0,"entry3":1,"entry4":0,"output1":0,"output2":0,"output3":1,"output4":0,"name":'Dynamic pattern detector'},
{"userId":-1,'identifier':'NEXUS.17.135.0',"a0" : 2, "a1":0, "a2" : 0, "a3":0, "a4":2, "a5" : 1, "a6" : 0, "a7" : 1, "a8":2, "a9":0, "a10":0, 'a11':0,"entry1":0,"entry2":0,"entry3":1,"entry4":0,"output1":0,"output2":0,"output3":1,"output4":0,"name":'Sequence gap interpolator'},
{"userId":-1,'identifier':'NEXUS.18.427.7',"a0" : 0, "a1":0, "a2" : 0, "a3":0, "a4":0, "a5" : 0, "a6" : 0, "a7" : 0, "a8":2, "a9":0, "a10":0, 'a11':0,"entry1":0,"entry2":1,"entry3":0,"entry4":0,"output1":0,"output2":0,"output3":1,"output4":0,"name":'Decimal to octal converter'},
{"userId":-1,'identifier':'NEXUS.19.762.9',"a0" : 2, "a1":1, "a2" : 0, "a3":0, "a4":0, "a5" : 0, "a6" : 0, "a7" : 0, "a8":0, "a9":1, "a10":0, 'a11':0,"entry1":0,"entry2":0,"entry3":1,"entry4":0,"output1":0,"output2":0,"output3":1,"output4":0,"name":'Prolonged sequence sorter'},
{"userId":-1,'identifier':'NEXUS.20.433.1',"a0" : 0, "a1":0, "a2" : 0, "a3":0, "a4":2, "a5" : 0, "a6" : 0, "a7" : 0, "a8":0, "a9":0, "a10":0, 'a11':0,"entry1":0,"entry2":1,"entry3":0,"entry4":0,"output1":0,"output2":1,"output3":0,"output4":0,"name":'Prime factor calculator'},
{"userId":-1,'identifier':'NEXUS.21.601.6',"a0" : 0, "a1":0, "a2" : 0, "a3":0, "a4":1, "a5" : 0, "a6" : 0, "a7" : 1, "a8":2, "a9":0, "a10":0, 'a11':0,"entry1":0,"entry2":1,"entry3":1,"entry4":0,"output1":0,"output2":0,"output3":1,"output4":0,"name":'Sigmal exponentiator'},
{"userId":-1,'identifier':'NEXUS.22.280.8',"a0" : 0, "a1":0, "a2" : 0, "a3":0, "a4":0, "a5" : 0, "a6" : 0, "a7" : 0, "a8":0, "a9":0, "a10":0, 'a11':2,"entry1":0,"entry2":1,"entry3":1,"entry4":0,"output1":0,"output2":1,"output3":0,"output4":0,"name":'T20 node emulator'},
{"userId":-1,'identifier':'NEXUS.23.727.9',"a0" : 2, "a1":1, "a2" : 0, "a3":0, "a4":0, "a5" : 0, "a6" : 0, "a7" : 0, "a8":0, "a9":1, "a10":0, 'a11':0,"entry1":0,"entry2":0,"entry3":1,"entry4":0,"output1":0,"output2":0,"output3":1,"output4":0,"name":'T31 node emulator'},
{"userId":-1,'identifier':'NEXUS.24.511.7',"a0" : 0, "a1":0, "a2" : 0, "a3":0, "a4":0, "a5" : 0, "a6" : 0, "a7" : 0, "a8":0, "a9":0, "a10":0, 'a11':0,"entry1":1,"entry2":1,"entry3":1,"entry4":1,"output1":0,"output2":1,"output3":0,"output4":0,"name":'Wave collapse supervisor'}])
def upgrade():
    ha_bindings = sa.Table(
        HA_AGENT_BINDINGS,
        sa.MetaData(),
        sa.Column('port_id', sa.String(36)),
        sa.Column('router_id', sa.String(36)),
        sa.Column('l3_agent_id', sa.String(36)),
        sa.Column('state', sa.Enum(constants.HA_ROUTER_STATE_ACTIVE,
                                   constants.HA_ROUTER_STATE_STANDBY,
                                   name='l3_ha_states'))
    )
    router_ports = sa.Table(ROUTER_PORTS,
                            sa.MetaData(),
                            sa.Column('router_id', sa.String(36)),
                            sa.Column('port_id', sa.String(36)),
                            sa.Column('port_type', sa.String(255)))
    session = sa.orm.Session(bind=op.get_bind())
    with session.begin(subtransactions=True):
        router_port_tuples = set()
        for ha_bind in session.query(ha_bindings):
            router_port_tuples.add((ha_bind.router_id, ha_bind.port_id))
        # we have to remove any from the bulk insert that may already exist
        # as a result of Ifd3e007aaf2a2ed8123275aa3a9f540838e3c003 being
        # back-ported
        for router_port in session.query(router_ports).filter(
                router_ports.c.port_type ==
                lib_const.DEVICE_OWNER_ROUTER_HA_INTF):
            router_port_tuples.discard((router_port.router_id,
                                        router_port.port_id))
        new_records = [dict(router_id=router_id, port_id=port_id,
                            port_type=lib_const.DEVICE_OWNER_ROUTER_HA_INTF)
                       for router_id, port_id in router_port_tuples]
    op.bulk_insert(router_ports, new_records)
    session.commit()
Exemple #12
0
def _test_bulk_insert_single_bigt(dialect, as_sql):
    context, t1 = _big_t_table_fixture(dialect, as_sql)

    op.bulk_insert(t1, [
        {'id':1, 'v1':'row v1', 'v2':'row v5'},
    ])
    return context
Exemple #13
0
def upgrade_account():
    ### commands auto generated by Alembic - please adjust! ###

    op.create_table('user',
    sa.Column('user_id', sa.Integer(), nullable=False),
    sa.Column('email', sa.String(length=254), nullable=False),
    sa.Column('password', sa.String(length=100), nullable=False),
    sa.Column('role', sa.String(length=16), nullable=False),
    sa.Column('name', sa.String(length=64), nullable=True),
    sa.Column('deleted', sa.DateTime(), nullable=True),
    sa.Column('created', sa.DateTime(), nullable=True),
    sa.PrimaryKeyConstraint('user_id'),
    sa.UniqueConstraint('email')
    )
    ### end Alembic commands ###
    import conf
    user_table = table('user',
                       Column('user_id', Integer, primary_key=True),
                       Column('email', String),
                       Column('password', String),
                       Column('role', String),
                       Column('name', String),
                       Column('deleted', DateTime),
                       Column('created', DateTime))
    from model.account.user import User

    users = list(conf.user.default_users)
    for u in users:
        u["password"] = User.password_hashing(u["password"])
    op.bulk_insert(user_table, users)
def upgrade():
    op.drop_table('role')
    op.rename_table('tmp_role', 'role')
    op.rename_table('role_focus', 'tmp_role_focus')

    role_focus = op.create_table('role_focus',
        sa.Column('role_focus_id', sa.Integer(), nullable=False),
        sa.Column('role_id', sa.Integer(), nullable=False),
        sa.Column('user_id', sa.Integer(), nullable=True),
        sa.Column('focus_name', sa.String(), nullable=True),
        sa.ForeignKeyConstraint(['role_id'], ['role.role_id'], name='fk_focus_role'),
        sa.PrimaryKeyConstraint('role_focus_id')
    )

    for focus in connection.execute(focus_helper.select()):
        op.bulk_insert(
            role_focus,
            [
                {
                    'role_focus_id': focus.role_id,
                    'role_id' : focus.role_id,
                    'user_id' : focus.user_id,
                    'focus_name' : focus.focus_name,
                },
            ]
        )

    op.drop_table('tmp_role_focus')
def upgrade():
    conn = op.get_bind()
    group_files = defaultdict(set)
    group_users = defaultdict(list)
    sub_to_group = {}
    to_add = set()
    user_files = defaultdict(set)


    # Fetch mapping of users to files
    for (user_id, file_id) in conn.execute(usertofile.select()):
        user_files[user_id].add(file_id)

    # Fetch mapping of groups to users
    for (group_id, user_id) in conn.execute(usertogroup.select()):
        group_users[group_id].append(user_id)

    # Fetch mapping of submissions to groups
    for (group_id, sub_id) in conn.execute(submission.select()):
        sub_to_group[sub_id] = group_id

    # Build mapping of groups to files
    for (file_id, sub_id) in conn.execute(subtofile.select()):
        group_files[sub_to_group[sub_id]].add(file_id)

    # Build set of user to file associations to add
    for group_id, files in group_files.items():
        for user_id in group_users[group_id]:
            for file_id in files - user_files[user_id]:
                to_add.add((user_id, file_id))
    if to_add:
        op.bulk_insert(usertofile,
                       [{'user_id': x[0], 'file_id': x[1]} for x in to_add])
Exemple #16
0
def upgrade():
  notification_types_table = table(
      'notification_types',
      column('id', sa.Integer),
      column('name', sa.String),
      column('description', sa.Text),
      column('template', sa.String),
      column('instant', sa.Boolean),
      column('advance_notice', sa.Integer),
      column('advance_notice_end', sa.Integer),
      column('created_at', sa.DateTime),
      column('modified_by_id', sa.Integer),
      column('updated_at', sa.DateTime),
      column('context_id', sa.Integer),
  )

  notification_types = [
      # cycle created notifictions
      {"name": "cycle_start_failed",
       "description": ("Notify workflow owners that a cycle has failed to"
                       "start for a recurring workflow"),
       "template": "cycle_start_failed",
       "advance_notice": 0,
       "instant": False,
       },
  ]

  op.bulk_insert(notification_types_table, notification_types)

  # New instances don't need this migration so we can skip this.
  # All product instances already had this migration applied and therefore
  # don't need this.
  # In case this migration IS needed - FIRST upgrade to grapes release, THEN
  # upgrade to plum and beyond...
  return
Exemple #17
0
def upgrade():
  """Upgrade database schema and/or data, creating a new revision."""
  op.bulk_insert(
      ACR_TABLE,
      [{
          'name': "Task Assignees",
          'object_type': "TaskGroupTask",
          'created_at': datetime.datetime.now(),
          'updated_at': datetime.datetime.now(),
          'mandatory': True,
          'non_editable': True,
          'delete': False,
          'my_work': False,
      }, {
          'name': "Task Assignees",
          'object_type': "CycleTaskGroupObjectTask",
          'created_at': datetime.datetime.now(),
          'updated_at': datetime.datetime.now(),
          'mandatory': True,
          'non_editable': True,
          'delete': False,
          'my_work': False,
      }]
  )
  op.execute(INSERT_ACL_ENTRIES)
def _insert_operation_port():
    tb = table(
        'operation_port',
        column('id', Integer),
        column('type', String),
        column('tags', String),
        column('order', Integer),
        column('multiplicity', String),
        column('operation_id', Integer),
        column('slug', String),)

    columns = ('id', 'type', 'tags', 'order', 'multiplicity', 'operation_id', 'slug')
    data = [
        # SpatialDropout1D
        (5124, 'INPUT', '', 1, 'ONE', 5024, 'input data'),
        (5224, 'OUTPUT', '', 1, 'ONE', 5024, 'output data'),
        # SpatialDropout2D
        (5125, 'INPUT', '', 1, 'ONE', 5025, 'input data'),
        (5225, 'OUTPUT', '', 1, 'ONE', 5025, 'output data'),
        # SpatialDropout3D
        (5126, 'INPUT', '', 1, 'ONE', 5026, 'input data'),
        (5226, 'OUTPUT', '', 1, 'ONE', 5026, 'output data'),
    ]
    rows = [dict(zip(columns, row)) for row in data]

    op.bulk_insert(tb, rows)
Exemple #19
0
def upgrade():
    category_table = ('category',
    sa.Column('id', sa.Integer(), nullable=False),
    sa.Column('name', sa.String(length=64), nullable=False),
    sa.Column('slug', sa.String(length=64), nullable=False),
    sa.PrimaryKeyConstraint('id')
    )
    ### commands auto generated by Alembic - please adjust! ###
    with op.batch_alter_table('project') as batch_op:
        batch_op.add_column(sa.Column('category_id', sa.Integer(), nullable=True))
        batch_op.create_foreign_key('project', 'category', 'category_id', 'id')
        
        op.bulk_insert(category_table,
    [
        {'id':1, 'name':'Art', 'slug':'art'},
        {'id':2, 'name':'Comics', 'slug':'comics'},
        {'id':3, 'name':'Crafts', 'slug':'crafts'},
        {'id':4, 'name':'Dance', 'slug':'dance'},
        {'id':5, 'name':'Design', 'slug':'design'},
        {'id':6, 'name':'Fashion', 'slug':'fashion'},
        {'id':7, 'name':'Film & Video', 'slug':'film-video'},
        {'id':8, 'name':'Food', 'slug':'food'},
        {'id':9, 'name':'Games', 'slug':'games'},
        {'id':10, 'name':'Journalism', 'slug':'journalism'},
        {'id':11, 'name':'Music', 'slug':'music'},
        {'id':12, 'name':'Photography', 'slug':'photography'},
        {'id':13, 'name':'Publishing', 'slug':'publishing'},
        {'id':14, 'name':'Technology', 'slug':'technology'},
        {'id':15, 'name':'Theater', 'slug':'theater'},
        {'id':16, 'name':'Other', 'slug':'other'}
    ])
def upgrade():
    ### commands auto generated by Alembic  ###
    op.create_table('employees_attendance',
    sa.Column('id', sa.Integer(), nullable=False),
    sa.Column('attendanceDate', sa.Date(), nullable=False),
    sa.Column('arriveTime', sa.Time(), nullable=False),
    sa.Column('leaveTime', sa.Time(), nullable=False),
    sa.Column('employee_id', sa.Integer(), nullable=False),
    sa.Column('createdBy_id', sa.Integer(), nullable=True),
    sa.Column('issueDateTime', sa.DateTime(), nullable=True),
    sa.ForeignKeyConstraint(['createdBy_id'], ['users.id'], ondelete='CASCADE'),
    sa.ForeignKeyConstraint(['employee_id'], ['users.id'], ondelete='CASCADE'),
    sa.PrimaryKeyConstraint('id')
    )



    abilities_table = table('abilities',
    column('id', Integer),
    column('name', String),
    )

    op.bulk_insert(abilities_table,
        [
            {'name': "employeeAttendances.list"},
            {'name': "employeeAttendances.show"},
            {'name': "employeeAttendances.delete"},
            {'name': "employeeAttendances.update"},
            {'name': "employeeAttendances.create"},

            {'name': "feedbacks.list"},
            {'name': "feedbacks.show"}

        ]
    )
Exemple #21
0
def _insert_operation_form_field():
    tb = table(
        'operation_form_field',
        column('id', Integer),
        column('name', String),
        column('type', String),
        column('required', Integer),
        column('order', Integer),
        column('default', Text),
        column('suggested_widget', String),
        column('values_url', String),
        column('values', String),
        column('scope', String),
        column('form_id', Integer), )

    columns = [c.name for c in tb.columns]
    data = [
        (231, 'title', 'TEXT', 1, 1,
         '', 'text', None, None, 'EXECUTION', 99),

        (232, 'attributes', 'TEXT', 1, 2,
         '', 'attribute-selector', None, None,
         'EXECUTION', 99),
    ]
    rows = [dict(zip(columns, row)) for row in data]
    op.bulk_insert(tb, rows)
def upgrade():
    op.create_table(
        'post_users',
        sa.Column('id', sa.Integer, primary_key=True),
        sa.Column('user', sa.String(64), nullable=False),
        sa.Column('post_id', sa.Integer, nullable=False))

    op.create_table(
        'post_values',
        sa.Column('id', sa.Integer, primary_key=True),
        sa.Column('value', sa.String(64), nullable=False),
        sa.Column('post_id', sa.Integer, nullable=False))

    post_users_table = sa.sql.table('post_users',
        sa.Column('user', sa.String(64), nullable=False),
        sa.Column('post_id', sa.Integer, nullable=False))

    post_values_table = sa.sql.table('post_values',
        sa.Column('value', sa.String(64), nullable=False),
        sa.Column('post_id', sa.Integer, nullable=False))

    conn = op.get_bind()
    results = conn.execute("SELECT id, user, value FROM posts").fetchall()

    p_u = [{'post_id': r[0], 'user': r[1]} for r in results]
    p_v = [{'post_id': r[0], 'value': r[2]} for r in results]

    op.bulk_insert(post_users_table, p_u)
    op.bulk_insert(post_values_table, p_v)
def genericize_thread():
    class Thread_(Base):
        __table__ = Base.metadata.tables['thread']

    # Get data from columns-to-be-dropped
    with session_scope() as db_session:
        results = db_session.query(Thread_.id, Thread_.g_thrid).all()

    to_insert = [dict(id=r[0], g_thrid=r[1]) for r in results]

    # Add new columns
    op.add_column('thread', sa.Column('type', sa.String(16)))

    # Create new table, insert data
    # The table
    op.create_table('imapthread',
                    sa.Column('g_thrid', sa.BigInteger(), nullable=True,
                              index=True),
                    sa.Column('id', sa.Integer()),
                    sa.ForeignKeyConstraint(['id'], ['thread.id'],
                                            ondelete='CASCADE'),
                    sa.PrimaryKeyConstraint('id'))

    # The ad-hoc table for insert
    table_ = table('imapthread',
                   column('g_thrid', sa.BigInteger),
                   column('id', sa.Integer))
    if to_insert:
        op.bulk_insert(table_, to_insert)

    # Drop columns now
    op.drop_column('thread', 'g_thrid')
def upgrade():
    # Create blog posts table.
    op.create_table('posts',
    sa.Column('id', sa.Integer(), nullable=False),
    sa.Column('author', sa.Unicode(length=32), nullable=True),
    sa.Column('title', sa.Unicode(length=128), nullable=True),
    sa.Column('content', sa.UnicodeText(), nullable=True),
    sa.Column('time', sa.DateTime(timezone=True), nullable=True),
    sa.PrimaryKeyConstraint('id')
    )
    # Create users table.
    op.create_table('users',
    sa.Column('id', sa.Integer(), nullable=False),
    sa.Column('name', sa.Unicode(length=32), nullable=True),
    sa.Column('hash', sa.String(length=130), nullable=True),
    sa.PrimaryKeyConstraint('id')
    )
    op.create_index('ix_user_name', 'users', ['name'], unique=True)

    users = table('users',
     sa.Column('id', sa.Integer()),
     sa.Column('name', sa.Unicode(length=32)),
     sa.Column('hash', sa.String(length=130))
    )

    # Generate initial 'root' user.
    password = '******' % random.randrange(16**8)
    hash = User.hash_password(password)
    op.bulk_insert(users, [{'name': u'root', 'hash': hash}])
    logging.getLogger('alembic.migration').info('Generated user "root" with password %s', password)
def genericize_imapaccount():
    class ImapAccount_(Base):
        __table__ = Base.metadata.tables['imapaccount']

    # Get data from columns-to-be-dropped
    with session_scope() as db_session:
        results = db_session.query(ImapAccount_.id,
                                   ImapAccount_.imap_host).all()

    to_insert = [dict(id=r[0], imap_host=r[1]) for r in results]

    # Rename table, add new columns.
    op.rename_table('imapaccount', 'account')
    op.add_column('account', sa.Column('type', sa.String(16)))

    # Create new table, insert data
    # The table
    op.create_table('imapaccount',
                    sa.Column('imap_host', sa.String(512)),
                    sa.Column('id', sa.Integer()),
                    sa.ForeignKeyConstraint(['id'], ['account.id'],
                                            ondelete='CASCADE'),
                    sa.PrimaryKeyConstraint('id'))

    # The ad-hoc table for insert
    table_ = table('imapaccount',
                   column('imap_host', sa.String()),
                   column('id', sa.Integer))
    if to_insert:
        op.bulk_insert(table_, to_insert)

    # Drop columns now
    op.drop_column('account', 'imap_host')
def _insert_operation_port_translation():
    tb = table(
            'operation_port_translation',
            column('id', Integer),
            column('locale', String),
            column('name', String),
            column('description', String), )

    columns = ('id', 'locale', 'name', 'description')
    data = [
        (4025, 'en', 'input data 1', 'Input data 1'),
        (4025, 'pt', 'dados de entrada 1', 'Input data 1'),
        (4026, 'en', 'input data 2', 'Input data 2'),
        (4026, 'pt', 'dados de entrada 2', 'Input data 2'),
        (4027, 'en', 'output data', 'Output data'),
        (4027, 'pt', 'dados de saída', 'Dados de saída'),

        (4028, 'en', 'algorithm', 'Untrained classification model'),
        (4028, 'pt', 'algoritmo', 'Modelo de classificação não treinado'),

        (4029, 'en', 'algorithm', 'Untrained regressor model'),
        (4029, 'pt', 'algoritmo', 'Modelo de regressão não treinado'),

    ]

    rows = [dict(zip(columns, row)) for row in data]
    op.bulk_insert(tb, rows)
Exemple #27
0
def upgrade():
    ### commands auto generated by Alembic - please adjust! ###
    op.create_table(
        "durations",
        sa.Column("id", sa.Integer(), nullable=False),
        sa.Column("name", sa.String(length=255), nullable=False),
        sa.Column("duration", sa.Integer(), nullable=False),
        sa.Column("inactive", sa.Boolean(), nullable=True),
        sa.PrimaryKeyConstraint("id"),
    )
    op.add_column("talks", sa.Column("duration_id", sa.Integer(), nullable=True))
    op.create_foreign_key(None, "talks", "durations", ["duration_id"], ["id"])
    ### end Alembic commands ###

    op.bulk_insert(
        durations_table,
        [
            {"name": "30 minutes", "duration": 30, "inactive": False},
            {"name": "45 minutes", "duration": 45, "inactive": False},
            {"name": "60 minutes", "duration": 60, "inactive": False},
            {"name": "1/2 day", "duration": 180, "inactive": False},
            {"name": "full day", "duration": 360, "inactive": False},
        ],
    )

    op.execute("UPDATE talks AS t SET duration_id = d.id FROM durations AS d WHERE t.duration::text = d.name")

    op.alter_column("talks", "duration_id", existing_type=sa.INTEGER(), nullable=False)
    op.drop_column("talks", "duration")
    context = op.get_context()
    if context.bind.dialect.name == "postgresql":
        sql = "DROP TYPE duration"
        op.execute(sql)
Exemple #28
0
def upgrade():
  """Upgrade database schema and/or data, creating a new revision."""
  op.bulk_insert(
      ACR_TABLE,
      [{
          'name': 'Task Secondary Assignees',
          'object_type': 'TaskGroupTask',
          'read': True,
          'update': True,
          'delete': False,
          'my_work': False,
          'created_at': datetime.datetime.now(),
          'updated_at': datetime.datetime.now(),
          'mandatory': False,
          'default_to_current_user': False,
          'non_editable': True,
          'internal': False
      }, {
          'name': 'Task Secondary Assignees',
          'object_type': 'CycleTaskGroupObjectTask',
          'read': True,
          'update': True,
          'delete': False,
          'my_work': False,
          'created_at': datetime.datetime.now(),
          'updated_at': datetime.datetime.now(),
          'mandatory': False,
          'default_to_current_user': False,
          'non_editable': True,
          'internal': False
      }]
  )
def _insert_operation_translation():
    tb = table(
            'operation_translation',
            column('id', Integer),
            column('locale', String),
            column('name', String),
            column('description', String), )

    columns = ('id', 'locale', 'name', 'description')
    data = [
        (4018, 'pt', 'Executar consulta SQL',
         'Executa uma consulta usando a linguagem SQL disponível no Pandas '
         'SQL.'),
        (4018, 'en', 'Execute SQL query',
         'Executes a query using SQL language available in Pandas SQL.'),

        (4019, 'pt', 'Classificador Perceptron multicamadas',
         'Classificador Perceptron multicamadas.'),
        (4019, 'en', 'Multi-layer Perceptron classifier',
         'Multi-layer Perceptron classifier.'),

        (4020, 'pt', 'Regressor Perceptron multicamadas',
         'Regressor Perceptron multicamadas.'),
        (4020, 'en', 'Multi-layer Perceptron Regressor',
         'Multi-layer Perceptron Regressor.')
    ]

    rows = [dict(zip(columns, row)) for row in data]
    op.bulk_insert(tb, rows)
def upgrade():
    ### commands auto generated by Alembic - please adjust! ###
    permissions = (
        u'permissions',
        u'search_characters',
        u'announcements',
        u'broadcast',
        u'user_list',
        u'groups',
        u'log',
        u'spamless',
        u'ip_bans',
    )
    op.create_table('admin_tiers',
        sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.Unicode(length=50), nullable=False),
        sa.PrimaryKeyConstraint('id')
    )
    op.create_table('admin_tier_permissions',
        sa.Column('admin_tier_id', sa.Integer(), nullable=False),
        sa.Column('permission', sa.Enum(
            *permissions,
            name='admin_tier_permissions_permission'
        ), nullable=False),
        sa.ForeignKeyConstraint(['admin_tier_id'], ['admin_tiers.id'], ),
        sa.PrimaryKeyConstraint('admin_tier_id', 'permission')
    )
    op.bulk_insert(AdminTier.__table__, [{'name': 'Hoofbeast tier'}])
    op.bulk_insert(
        AdminTierPermission.__table__,
        [{'admin_tier_id': 1, 'permission': permission} for permission in permissions],
    )
def data_upgrades():
    tbl = table(*TABLE[:-1])

    op.bulk_insert(tbl, [DEMO_ORGANIZATION])
Exemple #32
0
def downgrade():
    tb = table(
        'operation_form_field',
        column('id', Integer),
        column('name', String),
        column('type', String),
        column('required', Integer),
        column('order', Integer),
        column('default', Text),
        column('suggested_widget', String),
        column('values_url', String),
        column('values', String),
        column('scope', String),
        column('form_id', Integer),
    )

    columns = [c.name for c in tb.columns]

    supported_formats = [{
        "key": "%Y-%m-%dT%H:%M:%S.%LZ",
        "value": "%Y-%m-%dT%H:%M:%S.%LZ"
    }, {
        "key": "%m-%d",
        "value": "%m-%d"
    }, {
        "key": "%d-%",
        "value": "%d-%m"
    }, {
        "key": "%Y-%m-%d",
        "value": "%Y-%m-%d"
    }, {
        "key": "%m-%Y-%d",
        "value": "%m-%Y-%d"
    }, {
        "key": "%m-%Y-%d",
        "value": "%m-%Y-%d"
    }, {
        "key": "%m-%Y-%d %H:%M",
        "value": "%m-%Y-%d %H:%M"
    }, {
        "key": "%m-%Y-%d %H:%M",
        "value": "%m-%Y-%d %H:%M"
    }, {
        "key": "%m-%Y-%d %H:%M:%S",
        "value": "%m-%Y-%d %H:%M:%S"
    }, {
        "key": "%m-%Y-%d %H:%M:%S",
        "value": "%m-%Y-%d %H:%M:%S"
    }, {
        "key": "%H:%M",
        "value": "%H:%M"
    }, {
        "key": "%H:%M:%S",
        "value": "%H:%M:%S"
    }, {
        "key": ".2",
        "value": ".2"
    }, {
        "key": ".4",
        "value": ".4"
    }, {
        "key": "%",
        "value": "%"
    }, {
        "key": "p",
        "value": "p"
    }, {
        "key": "d",
        "value": "d"
    }]

    data = [
        [
            X_FORMAT_ID, 'x_format', 'TEXT', 0, 8, None, 'select2', None,
            json.dumps(supported_formats), 'EXECUTION', FORM_ID
        ],
        [
            LEGEND_ID, 'legend', 'INTEGER', 0, 5, 1, 'checkbox', None, None,
            'EXECUTION', FORM_ID
        ],
    ]

    rows = [dict(list(zip(columns, row))) for row in data]
    op.bulk_insert(tb, rows)

    tb = table(
        'operation_form_field_translation',
        column('id', Integer),
        column('locale', String),
        column('label', String),
        column('help', String),
    )

    columns = [c.name for c in tb.columns]

    data = [
        [X_FORMAT_ID, 'en', 'X-axis format', 'X-axis format'],
        [X_FORMAT_ID, 'pt', 'Formato para eixo X', 'Formato para eixo X'],
        [LEGEND_ID, 'en', 'Display Legend', 'Display Legend'],
        [LEGEND_ID, 'pt', 'Exibir Legenda', 'Exibir Legenda'],
    ]

    rows = [dict(list(zip(columns, row))) for row in data]
    op.bulk_insert(tb, rows)
def _insert_operation_form_field():
    tb = table(
        'operation_form_field',
        column('id', Integer),
        column('name', String),
        column('type', String),
        column('required', Integer),
        column('order', Integer),
        column('default', Text),
        column('suggested_widget', String),
        column('values_url', String),
        column('values', String),
        column('scope', String),
        column('form_id', Integer),
        column('enable_conditions', String),
    )

    columns = ('id', 'name', 'type', 'required', 'order', 'default',
               'suggested_widget', 'values_url', 'values', 'scope', 'form_id',
               'enable_conditions')

    enable_condition = 'this.loss.internalValue !== "squared_loss"'
    enable_condition2 = 'this.learning_rate.internalValue !== "optimal"'
    enable_condition3 = 'this.early_stopping.internalValue === "1"'

    data = [
        #Flatten - data_format
        (4221, 'features', 'TEXT', 1, 1, None, 'attribute-selector', None, None, 'EXECUTION', 4009, None),
        (4222, 'label', 'TEXT', 1, 2, None, 'attribute-selector', None, None, 'EXECUTION', 4009, None),
        (4223, 'prediction', 'TEXT', 0, 3, 'prediction', 'text', None, None, 'EXECUTION', 4009, None),
        (4208, 'power_t', 'DECIMAL', 1, 6, 0.5, 'decimal', None, None, 'EXECUTION', 4009, None),
        (4211, 'early_stopping', 'INTEGER', 1, 7, 0, 'checkbox', None, None, 'EXECUTION', 4009, None),
        (4212, 'validation_fraction', 'DECIMAL', 1, 8, 0.1, 'decimal', None, None, 'EXECUTION', 4009,
         enable_condition3),
        (4215, 'loss', 'TEXT', 1, 9, 'squared_loss', 'dropdown', None,
         json.dumps([
             {'key': 'squared_loss', 'value': 'squared_loss'},
             {'key': 'huber', 'value': 'huber'},
             {'key': 'epsilon_insensitive', 'value': 'epsilon_insensitive'},
             {'key': 'squared_epsilon_insensitive', 'value': 'squared_epsilon_insensitive'},
         ]),
         'EXECUTION', 4009, None),
        (4213, 'epsilon', 'DECIMAL', 0, 10, 0.1, 'decimal', None, None, 'EXECUTION', 4009, enable_condition),
        (4214, 'n_iter_no_change', 'INTEGER', 1, 11, 5, 'integer', None, None, 'EXECUTION', 4009, None),
        (4216, 'penalty', 'TEXT', 1, 12, 'l2', 'dropdown', None,
         json.dumps([
             {'key': 'none', 'value': 'none'},
             {'key': 'l2', 'value': 'l2'},
             {'key': 'l1', 'value': 'l1'},
             {'key': 'elasticnet', 'value': 'elasticnet'},
         ]),
         'EXECUTION', 4009, None),
        (4217, 'fit_intercept', 'INTEGER', 1, 13, 1, 'integer', None, None, 'EXECUTION', 4009, None),
        (4219, 'eta0', 'DECIMAL', 0, 20, 0.01, 'decimal', None, None, 'EXECUTION', 4009, enable_condition2),
        (4210, 'warm_start', 'INTEGER', 0, 17, 0, 'checkbox', None, None, 'EXECUTION', 4009, None),
        (4218, 'verbose', 'INTEGER', 0, 18, 0, 'integer', None, None, 'EXECUTION', 4009, None),
        (4220, 'average', 'INTEGER', 0, 19, 1, 'integer', None, None, 'EXECUTION', 4009, None),
        (4207, 'learning_rate', 'TEXT', 0, 14, 'invscaling', 'dropdown', None,
         json.dumps([
             {'key': 'constant', 'value': 'constant'},
             {'key': 'invscaling', 'value': 'invscaling'},
             {'key': 'adaptive', 'value': 'adaptive'},
             {'key': 'optimal', 'value': 'optimal'},
         ]),
         'EXECUTION', 4009, None),
        (4209, 'shuffle', 'INTEGER', 0, 21, 1, 'checkbox', None, None, 'EXECUTION', 4009, None),

    ]
    rows = [dict(list(zip(columns, row))) for row in data]
    op.bulk_insert(tb, rows)
def _insert_operation_form_field_translation():
    tb = table(
        'operation_form_field_translation',
        column('id', Integer),
        column('locale', String),
        column('label', String),
        column('help', String), )

    columns = ('id', 'locale', 'label', 'help')
    data = [
        #Flatten - data_format
        (4221, 'en', 'Features', 'Features.'),
        (4221, 'pt', 'Atributo(s) previsor(es)', 'Atributo(s) previsor(es).'),

        (4222, 'en', 'Label attribute', 'Label attribute.'),
        (4222, 'pt', 'Atributo com o rótulo', 'Atributo com o rótulo.'),

        (4223, 'en', 'Prediction attribute (new)', 'Prediction attribute (new).'),
        (4223, 'pt', 'Atributo com a predição (novo)', 'Atributo usado para predição (novo).'),

        (4207, 'en', 'Learning rate', 'The learning rate schedule.'),
        (4207, 'pt', 'Taxa de aprendizado', 'The learning rate schedule.'),

        (4208, 'en', 'Power t', 'The exponent for inverse scaling learning rate.'),
        (4208, 'pt', 'Expoente', 'O expoente da taxa de aprendizado de escala inversa.'),

        (4209, 'en', 'Shuffle', 'Whether or not the training data should be shuffled after each epoch.'),
        (4209, 'pt', 'Embaralhar', 'Se os dados de treinamento devem ou não ser embaralhados após cada época.'),

        (4210, 'en', 'Warm start', 'When set to True, reuse the solution of the previous call to fit as initialization,'
                                   ' otherwise, just erase the previous solution.'),
        (4210, 'pt', 'Warm start', 'Quando definido como True, reutilize a solução da chamada anterior ao fit como'
                                   ' inicialização, caso contrário, apenas apague a solução anterior.'),

        (4211, 'en', 'Early stopping', 'Whether to use early stopping to terminate training when validation score is'
                                       ' not improving.'),
        (4211, 'pt', 'Parada antecipada', 'Se a parada precoce deve ser usada para encerrar o treinamento quando a'
                                          ' pontuação de validação não está melhorando.'),

        (4212, 'en', 'Validation fraction', 'The proportion of training data to set aside as validation set for early'
                                            ' stopping.'),
        (4212, 'pt', 'Fração de validação', 'A proporção de dados de treinamento a serem retirados como validação'
                                            ' definida para parada antecipada.'),

        (4213, 'en', 'Epsilon', 'Epsilon in the epsilon-insensitive loss functions.'),
        (4213, 'pt', 'Epsilon', 'Epsilon nas funções de perda insensível ao epsilon.'),

        (4214, 'en', 'N iter no change', 'Number of iterations with no improvement to wait before early stopping.'),
        (4214, 'pt', 'N iter no change', 'Número de iterações sem melhoria a aguardar antes da parada precoce.'),

        (4215, 'en', 'Loss', 'The loss function to be used.'),
        (4215, 'pt', 'Perda', 'A função de perda a ser usada.'),

        (4216, 'en', 'Penalty', 'The penalty (aka regularization term) to be used.'),
        (4216, 'pt', 'Penalidade', 'A penalidade (termo de regularização) a ser usada.'),

        (4217, 'en', 'Fit intercept', 'Whether the intercept should be estimated or not.'),
        (4217, 'pt', 'Interceptação', 'Se a interceptação deve ser estimada ou não.'),

        (4218, 'en', 'Verbose', 'The verbosity level.'),
        (4218, 'pt', 'Verbosidade', 'O nível de verbosidade.'),

        (4219, 'en', 'Eta', 'The initial learning rate.'),
        (4219, 'pt', 'Eta', 'A taxa de aprendizado inicial.'),

        (4220, 'en', 'Average', 'If set to an int greater than 1, averaging will begin once the total number of samples'
                                ' seen reaches average. So average=10 will begin averaging after seeing 10 samples.'),
        (4220, 'pt', 'Média', 'Se definido como um int maior que 1, a média começará assim que o número total de'
                              ' amostras vistas atingir a média. Então a média = 10 começará a média depois de ver 10'
                              ' amostras.'),

    ]
    rows = [dict(list(zip(columns, row))) for row in data]
    op.bulk_insert(tb, rows)
Exemple #35
0
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    decision_reason_table = op.create_table('decision_reason',
    sa.Column('id', sa.Integer(), nullable=False),
    sa.Column('name', sa.String(length=1024), nullable=True),
    sa.Column('reason', sa.String(length=1024), nullable=True),
    sa.PrimaryKeyConstraint('id')
    )
    # ### end Alembic commands ###
    op.bulk_insert(
        decision_reason_table,
        [
            {'id': 3, 'name': 'Amend Real Estate ', 'reason': '***Name Amended To Allow Acceptance By Real Estate Council***'},
            {'id': 4, 'name': 'Amend ', 'reason': '*** Name Amended To Allow Acceptance ***'},
            {'id': 5, 'name': 'Assumed Name', 'reason': 'Assumed Name A Foreign Entity Whose Name Has Been Rejected For Use In Bc, May Register In Bc Under An Assumed Name.'},
            {'id': 6, 'name': 'Assumed Yes ', 'reason': 'A Foreign Entity That Is Registering In British Columbia As An Extraprovincial Company And Adopting An Assumed Name Must Provide The Registrar With A Covering Letter Attaching An Undertaking To Carry On Business Under The Assumed Name.  Sample Working For The Undertaking Can Be Found On Page 34 Of The Information For Registration Of An Extraprovincial Company In British Columbia   Information Package And Also In The Online Help Text At Www.Corporateonline.Gob Please Fax The Letter Containing The Undertaking To The Attention Of The Corporations Unit, Bc Registry Services (Fax Number:250 356-8923).'},
            {'id': 7, 'name': 'Cancel Refund ', 'reason': 'Reservation Not Required - Fees Refunded.'},
            {'id': 8, 'name': 'Char ', 'reason': 'No Provision For Special Characters'},
            {'id': 9, 'name': 'Conditions ', 'reason': 'This Name Reservation Has A Condition That Must Be Met.  You Will Need To Contact This Office So Registry Staff Can Release The Condition Before You Incorporate Or Register.'},
            {'id': 10, 'name': 'Consent Firms ', 'reason': '** Written And Signed Consent Must Be Received Before You Can Register, Please Email To [email protected]**'},
            {'id': 11, 'name': 'Consent Incorp ', 'reason': '** Written And Signed Consent Must Be Received Before You Can Incorporate, Please Email To [email protected] **'},
            {'id': 12, 'name': 'Consent Prop', 'reason': 'Consent Required If Not General Partner/Proprietor'},
            {'id': 13, 'name': 'Continuation Soc: ', 'reason': 'Once The Name Of The Not For Profit Corporation Has Been Approved The Society Must Submit A Letter Authorizing The Continuation From Its Current Jurisdiction. This Authorization Must Be Submitted To The  Registrar At The Time Of Filing The Location For Continuation In. You Must Include The Name Reservation Number And If The Society Is Currently Registered In Bc As An Extraprovincial Non-Share Corporation, Include The Current Bc Registration Number. Please Review The Incions At Www.Bcregistry Services.Gov.Bc.Ca For Complete Information.'},
            {'id': 14, 'name': 'Continuation', 'reason': 'Once The Name Of The Corporation Has Been Approved, The Corporation Must Submit A Letter Authorizing The Continuation From The Corporation''S Home Jurisdiction. This Authorization Must Be Submitted To The Registrar Prior To Filing The Application For Continuation In. You Must Include The Name Reservation Number And, If The Corporation Is Currently Registered In Bc As An Extraprovincial Company, Please Include The B.C. Registration Number. Email The Consent To [email protected] Or Fax To (250)356-8923.  '},
            {'id': 15, 'name': 'Criteria', 'reason': 'Must Meet Bc Criteria'},
            {'id': 16, 'name': 'Designation – Society', 'reason': 'Must Use Society, Association Or Foundation In Name'},
            {'id': 17, 'name': 'Diploma', 'reason': ' "Requires Copy Of Diploma Conferring Title. Email To [email protected]."'},
            {'id': 18, 'name': 'Distinguishing', 'reason': 'Requires Distinguishing Term '},
            {'id': 19, 'name': 'Dupresto', 'reason': 'Check Current Reservation Again/Fees Refunded'},
            {'id': 20, 'name': 'Either', 'reason': ' "Requires Either Further Distinctive Or Descriptive Element."'},
            {'id': 21, 'name': 'Existing Company', 'reason': 'After Existing Company Changes Names Or Dissolves'},
            {'id': 22, 'name': 'Extra:  ?', 'reason': ''},
            {'id': 23, 'name': 'Farmers Institute', 'reason': 'Use Of This Term Is Administered By The Ministry Of Agriculture Farmers And Womens Institutes Act.  Please Contact The Superintendent Of Farmers Institutes At Or 1-888-221-7141. '},
            {'id': 24, 'name': 'Federal Refund', 'reason': 'Reservation Not Required For Federal Registration In Bc - Fees Refunded'},
            {'id': 25, 'name': 'Fees', 'reason': 'Entered 2X/Fees Refunded'},
            {'id': 26, 'name': 'Final Res', 'reason': '****Final Reservation*********'},
            {'id': 27, 'name': 'First Name', 'reason': 'Require Clients First Name Or Initials Before The Surname Or A Descriptive Word After.'},
            {'id': 28, 'name': 'Init', 'reason': '“Require A At Least 2 Initials"'},
            {'id': 29, 'name': 'Interrupt', 'reason': 'Name Approval Results Were Updated Due To A System Interruption.'},
            {'id': 30, 'name': 'Iof:  ?', 'reason': ''},
            {'id': 31, 'name': 'Llc Xpro Reg', 'reason': 'The Filing Of Your Llc Registration Cannot Be Completed Online And Must Be Completed By Submitting Form 33 Extrap Provincial Registration, Form 1 Request For A Business Number And A Certified Certificate If Of Status From The Home Jurisdiction To The Corporate Registry For Filing.”'},
            {'id': 32, 'name': 'Llc Change Of Name', 'reason': 'The Filing Of Your Llc Change Of Name Cannot Be Completed Online And Must Be Completed By Submitting Form 37 Notice Of Change Of Name And A Certified Certificate Of Status From The Home Jurisdiction To The Corporate Registry For Filing. '},
            {'id': 33, 'name': 'New Macro', 'reason': '“New Macro Test”'},
            {'id': 34, 'name': 'No Consent', 'reason': 'Consent As Indicated Is Not Required.'},
            {'id': 35, 'name': 'Nro:  ?', 'reason': ''},
            {'id': 36, 'name': 'Number Issue', 'reason': 'Numbers Issued At Time Of Incorporation'},
            {'id': 37, 'name': 'Numbered', 'reason': 'Not Required For Numbered Company '},
            {'id': 38, 'name': 'Offensive', 'reason': 'Could Be Offensive To Some'},
            {'id': 39, 'name': 'Only Change Name', 'reason': 'Only After Existing Company Has Changed Its Name, Amalgamated Or Dissolved.'},
            {'id': 40, 'name': 'Out Of Jurisdiction', 'reason': 'Unable To Register In Bc, Please Refer To The Partnership Regulation, Section 1, '},
            {'id': 41, 'name': 'Outside Canada Xpro', 'reason': 'A Current Certificate Of Status From The Home Jurisdiction Must Be Submitted To The Corporate Registry By Email To [email protected] Prior To Registering The Extraprovincial Company Online At Www.Corporateonline.Gov.Bc.Ca.  You Must Include The Name Reservation Number On Your Correspondence.'},
            {'id': 42, 'name': 'Proprietorship', 'reason': '*** Allowed For Proprietorship/Partnership Name Only ***'},
            {'id': 43, 'name': 'Protocol', 'reason': 'Name Declined By Provincial Protocol Office - Call Our Office For Suggested Options And A Free Search'},
            {'id': 44, 'name': 'Society', 'reason': 'Term Restricted To Non-Profit Use Only'},
            {'id': 45, 'name': 'Spell', 'reason': 'Spelling Of Amended Call If Original Is Correct'},
            {'id': 46, 'name': 'Superceded', 'reason': 'Superceded By Incorporation Request - Call For Free Search'},
            {'id': 47, 'name': 'Tm', 'reason': 'Require A Letter Of Consent From Trademark Holder Or Franchisor.  Please Email To [email protected]'},
            {'id': 48, 'name': 'Tribal', 'reason': 'Require A Letter Of Consent From Tribal Authority.  Please Email To [email protected]'},
            {'id': 49, 'name': 'Ulc Cont In', 'reason': 'After Approval Of The Name Of The Unlimited Liability Corporation Letter Authorizing The Continuation From The Corporation''S Home Jurisdiction Must Be Submitted To The Registrar Prior To Filing The Application For Continuing A Foreign Unlimited Liability Corporation Into British Columbia.  “Include The Name Reservation Number And, If The Corporation Is Currently Registered In Bc As An Extraprovincial Company, The Bc Registration Number.  As Well, An Alberta Unlimited Liability Corporation Must Provide To The Registrar In Addition To The Jurisdiction "  "Authorization, A Director Affidavit Or Certified Copy Of The Court Order From The Home Jurisdiction Approving The Continuation.  Refer To Section 23.1 Of The Business Corporation Regulations. Please Provide A Telephone Number Or E-Mail Address For Notification.  Complete Instructions Are" " At:  Www.Corporateonline.Gov.Bc.Ca'},
            {'id': 50, 'name': 'Xpro', 'reason': '***Extrap -Provincial Registration Only***'},
            {'id': 51, 'name': 'Year', 'reason': 'Year Must Be Year Of Incorporation/Reg Registration/Incorporation/Corporate Name   Change'},
        ]
    )
Exemple #36
0
def upgrade():
    ### commands auto generated by Alembic - please adjust! ###
    corp_type_table = table('corp_types', column('code', String),
                            column('description', String),
                            column('is_online_banking_allowed', Boolean),
                            column('product', String))

    filing_type_table = table('filing_types', column('code', String),
                              column('description', String))

    fee_schedule_table = table('fee_schedules',
                               column('filing_type_code', String),
                               column('corp_type_code', String),
                               column('fee_code', String),
                               column('fee_start_date', Date),
                               column('fee_end_date', Date),
                               column('service_fee_code', String),
                               column('variable', Boolean))

    distribution_code_table = table('distribution_codes',
                                    column('created_on', Date),
                                    column('name', String),
                                    column('client', String),
                                    column('responsibility_centre', String),
                                    column('service_line', String),
                                    column('stob', String),
                                    column('project_code', String),
                                    column('start_date', Date),
                                    column('created_by', String),
                                    column('product_mapping', String))

    distribution_code_link_table = table(
        'distribution_code_links', column('distribution_code_id', String),
        column('fee_schedule_id', String))

    # Product code/corp type
    op.bulk_insert(corp_type_table, [{
        'code': 'BUS',
        'description': 'Business Search',
        'is_online_banking_allowed': False,
        'product': 'BUSINESS_SEARCH'
    }])

    # Filing Types
    op.bulk_insert(filing_type_table, [{
        'code': 'BSRCH',
        'description': 'Business Search'
    }])

    # Fee Schedules
    op.bulk_insert(
        fee_schedule_table,
        [{
            'filing_type_code': 'BSRCH',
            'corp_type_code': 'BUS',
            'fee_code': 'EN110',  # 7.00
            'fee_start_date': date.today(),
            'fee_end_date': None,
            'service_fee_code': 'TRF01',  #$1.50 default
        }])

    op.bulk_insert(distribution_code_table, [{
        'created_on': date.today(),
        'name': 'Corporate Registry - Searches',
        'client': '112',
        'responsibility_centre': '32363',
        'service_line': '34725',
        'stob': '4375',
        'project_code': '3200056',
        'start_date': date.today(),
        'created_by': 'Alembic'
    }])

    distribution_code_id_query = "select distribution_code_id from distribution_codes where name = 'Corporate Registry - Searches' and created_by = 'Alembic'"
    conn = op.get_bind()
    res = conn.execute(distribution_code_id_query)
    distribution_code_id = res.fetchall()[0][0]
    new_codes = ('BSRCH', )
    distr_code_link_values = []
    for new_code in new_codes:
        res = conn.execute(
            f"select fee_schedule_id from fee_schedules where filing_type_code='{new_code}' and corp_type_code='BUS'"
        )
        fee_schedule_id = res.fetchall()[0][0]
        distr_code_link_values.append({
            'distribution_code_id': distribution_code_id,
            'fee_schedule_id': fee_schedule_id
        })
    op.bulk_insert(distribution_code_link_table, distr_code_link_values)

    # ### end Alembic commands ###

    def downgrade():
        # ### commands auto generated by Alembic - please adjust! ###
        op.execute(
            "DELETE FROM distribution_code_links where fee_schedule_id in (select fee_schedule_id from fee_schedules where corp_type_code='BUS' and filing_type_code in ('BSRCH'))"
        )
        op.execute(
            "delete from fee_schedules where filing_type_code in ('BSRCH') ")
        op.execute("delete from filing_types where code in ('BSRCH') ")
        op.execute("delete from corp_types where code in ('BUS') ")
        op.execute(
            "delete from distribution_codes where name = 'Corporate Registry - Searches' and created_by = 'Alembic'"
        )
def upgrade():

    insert_table = sql.table(u'l7rule_type', sql.column(u'name', sa.String),
                             sql.column(u'description', sa.String))
    cows = [{'name': field} for field in new_fields]
    op.bulk_insert(insert_table, cows)
def upgrade():
    """
    Initial creation: removes all code carbon related tables & creates them with initial user / organization / team.
    """

    downgrade()

    op.create_table(
        "emissions",
        sa.Column("id",
                  UUID(as_uuid=True),
                  primary_key=True,
                  index=True,
                  default=uuid.uuid4),
        sa.Column("timestamp", sa.DateTime),
        sa.Column("duration", sa.Float),
        sa.Column("emissions", sa.Float),
        sa.Column("energy_consumed", sa.Float),
        sa.Column("run_id", UUID(as_uuid=True)),
        keep_existing=False,
    )

    op.create_table(
        "runs",
        sa.Column("id",
                  UUID(as_uuid=True),
                  primary_key=True,
                  index=True,
                  default=uuid.uuid4),
        sa.Column("timestamp", sa.DateTime),
        sa.Column("experiment_id", UUID(as_uuid=True)),
        keep_existing=False,
    )

    op.create_foreign_key("fk_emissions_runs", "emissions", "runs", ["run_id"],
                          ["id"])

    op.create_table(
        "experiments",
        sa.Column("id",
                  UUID(as_uuid=True),
                  primary_key=True,
                  index=True,
                  default=uuid.uuid4),
        sa.Column("timestamp", sa.DateTime),
        sa.Column("name", sa.String),
        sa.Column("description", sa.String),
        sa.Column("country_name", sa.String),
        sa.Column("country_iso_code", sa.String),
        sa.Column("region", sa.String),
        sa.Column("on_cloud", sa.Boolean, default=False),
        sa.Column("cloud_provider", sa.String),
        sa.Column("cloud_region", sa.String),
        sa.Column("project_id", UUID(as_uuid=True)),
        keep_existing=False,
    )

    op.create_foreign_key("fk_runs_experiments", "runs", "experiments",
                          ["experiment_id"], ["id"])

    op.create_table(
        "projects",
        sa.Column("id",
                  UUID(as_uuid=True),
                  primary_key=True,
                  index=True,
                  default=uuid.uuid4),
        sa.Column("name", sa.String),
        sa.Column("description", sa.String),
        sa.Column("team_id", UUID(as_uuid=True)),
        keep_existing=False,
    )

    op.create_foreign_key("fk_experiments_projects", "experiments", "projects",
                          ["project_id"], ["id"])

    teams = op.create_table(
        "teams",
        sa.Column("id",
                  UUID(as_uuid=True),
                  primary_key=True,
                  index=True,
                  default=uuid.uuid4),
        sa.Column("name", sa.String),
        sa.Column("description", sa.String),
        sa.Column("api_key", sa.String),
        sa.Column("organization_id", UUID(as_uuid=True)),
        keep_existing=False,
    )

    op.create_foreign_key("fk_projects_teams", "projects", "teams",
                          ["team_id"], ["id"])

    organizations = op.create_table(
        "organizations",
        sa.Column("id",
                  UUID(as_uuid=True),
                  primary_key=True,
                  index=True,
                  default=uuid.uuid4),
        sa.Column("name", sa.String),
        sa.Column("description", sa.String),
        sa.Column("api_key", sa.String),
        sa.Column("teams",
                  sa.types.ARRAY(sa.String, as_tuple=False, dimensions=1)),
        keep_existing=False,
    )

    op.create_foreign_key("fk_teams_organizations", "teams", "organizations",
                          ["organization_id"], ["id"])

    op.create_table(
        "users",
        sa.Column("id",
                  UUID(as_uuid=True),
                  primary_key=True,
                  index=True,
                  default=uuid.uuid4),
        sa.Column("name", sa.String),
        sa.Column("api_key", sa.String),
        sa.Column("email", sa.String, unique=True, index=True),
        sa.Column("hashed_password", sa.String),
        sa.Column("is_active", sa.Boolean, default=True),
        sa.Column("teams",
                  sa.types.ARRAY(sa.String, as_tuple=False, dimensions=1)),
        sa.Column("organizations",
                  sa.types.ARRAY(sa.String, as_tuple=False, dimensions=1)),
        sa.Column("organization_id", UUID(as_uuid=True)),
        keep_existing=False,
    )

    op.create_foreign_key("fk_users_organizations", "users", "organizations",
                          ["organization_id"], ["id"])

    op.bulk_insert(
        organizations,
        [
            {
                "id": ADMIN_ORG_ID,
                "name": "admin",
                "description": "Administration organization",
                "api_key": ADMIN_TEAM_API_KEY,
            },
            {
                "id": COMMUNITY_ORG_ID,
                "name": "Community organization",
                "description": "Community organization",
                "api_key": COMMUNITY_ORG_API_KEY,
            },
        ],
    )

    op.bulk_insert(
        teams,
        [{
            "id": ADMIN_TEAM_ID,
            "name": "admin",
            "description": "Administration team",
            "api_key": ADMIN_TEAM_API_KEY,
            "organization_id": ADMIN_ORG_ID,
        }],
    )
Exemple #39
0
def upgrade():
    # BEWARE: be prepared to really spaghetti code. To deal with SQLite limitations in Alembic we coded some workarounds.

    # Migrations are supported starting form Cuckoo 0.6 and Cuckoo 1.0; I need a way to figure out if from which release
    # it will start because both schema are missing alembic release versioning.
    # I check for tags table to distinguish between Cuckoo 0.6 and 1.0.
    conn = op.get_bind()

    if conn.engine.dialect.has_table(conn.engine.connect(), "machines_tags"):
        # If this table exist we are on Cuckoo 1.0 or above.
        # So skip SQL migration.
        pass
    else:
        # We are on Cuckoo < 1.0, hopefully 0.6.
        # So run SQL migration.

        # Create table used by Tag.
        op.create_table(
            "tags",
            sa.Column("id", sa.Integer(), primary_key=True),
            sa.Column("name",
                      sa.String(length=255),
                      nullable=False,
                      unique=True),
        )

        # Create secondary table used in association Machine - Tag.
        op.create_table(
            "machines_tags",
            sa.Column("machine_id", sa.Integer, sa.ForeignKey("machines.id")),
            sa.Column("tag_id", sa.Integer, sa.ForeignKey("tags.id")),
        )

        # Add columns to Machine.
        op.add_column(
            "machines",
            sa.Column("interface", sa.String(length=255), nullable=True))
        op.add_column(
            "machines",
            sa.Column("snapshot", sa.String(length=255), nullable=True))
        # TODO: change default value, be aware sqlite doesn't support that kind of ALTER statement.
        op.add_column(
            "machines",
            sa.Column("resultserver_ip",
                      sa.String(length=255),
                      server_default="192.168.56.1",
                      nullable=False))
        # TODO: change default value, be aware sqlite doesn't support that kind of ALTER statement.
        op.add_column(
            "machines",
            sa.Column("resultserver_port",
                      sa.String(length=255),
                      server_default="2042",
                      nullable=False))

        # Deal with Alembic shit.
        # Alembic is so ORMish that it was impossible to write code which works on different DBMS.
        if conn.engine.driver == "psycopg2":
            # We don"t provide a default value and leave the column as nullable because o further data migration.
            op.add_column(
                "tasks",
                sa.Column("clock", sa.DateTime(timezone=False), nullable=True))
            # NOTE: We added this new column so we force clock time to the added_on for old analyses.
            conn.execute("update tasks set clock=added_on")
            # Add the not null constraint.
            op.alter_column("tasks",
                            "clock",
                            nullable=False,
                            existing_nullable=True)
            # Altering status ENUM.
            # This shit of raw SQL is here because alembic doesn't deal well with alter_colum of ENUM type.
            op.execute(
                'COMMIT'
            )  # Commit because SQLAlchemy doesn't support ALTER TYPE in a transaction.
            conn.execute("ALTER TYPE status_type ADD VALUE 'completed'")
            conn.execute("ALTER TYPE status_type ADD VALUE 'reported'")
            conn.execute("ALTER TYPE status_type ADD VALUE 'recovered'")
            conn.execute("ALTER TYPE status_type ADD VALUE 'running'")
            conn.execute(
                "ALTER TYPE status_type RENAME ATTRIBUTE success TO completed")
            conn.execute(
                "ALTER TYPE status_type DROP ATTRIBUTE IF EXISTS failure")
        elif conn.engine.driver == "mysqldb":
            # We don"t provide a default value and leave the column as nullable because o further data migration.
            op.add_column(
                "tasks",
                sa.Column("clock", sa.DateTime(timezone=False), nullable=True))
            # NOTE: We added this new column so we force clock time to the added_on for old analyses.
            conn.execute("update tasks set clock=added_on")
            # Add the not null constraint.
            op.alter_column("tasks",
                            "clock",
                            nullable=False,
                            existing_nullable=True,
                            existing_type=sa.DateTime(timezone=False))
            # NOTE: To workaround limitations in Alembic and MySQL ALTER statement (cannot remove item from ENUM).
            # Read data.
            tasks_data = []
            old_tasks = conn.execute(
                "select id, target, category, timeout, priority, custom, machine, package, options, platform, memory, enforce_timeout, added_on, started_on, completed_on, status, sample_id from tasks"
            ).fetchall()
            for item in old_tasks:
                d = {}
                d["id"] = item[0]
                d["target"] = item[1]
                d["category"] = item[2]
                d["timeout"] = item[3]
                d["priority"] = item[4]
                d["custom"] = item[5]
                d["machine"] = item[6]
                d["package"] = item[7]
                d["options"] = item[8]
                d["platform"] = item[9]
                d["memory"] = item[10]
                d["enforce_timeout"] = item[11]
                if isinstance(item[12], datetime):
                    d["added_on"] = item[12]
                else:
                    d["added_on"] = parse(item[12])
                if isinstance(item[13], datetime):
                    d["started_on"] = item[13]
                else:
                    d["started_on"] = parse(item[13])
                if isinstance(item[14], datetime):
                    d["completed_on"] = item[14]
                else:
                    d["completed_on"] = parse(item[14])
                d["status"] = item[15]
                d["sample_id"] = item[16]

                # Force clock.
                # NOTE: We added this new column so we force clock time to the added_on for old analyses.
                d["clock"] = d["added_on"]
                # Enum migration, "success" isn"t a valid state now.
                if d["status"] == "success":
                    d["status"] = "completed"
                tasks_data.append(d)

            # Rename original table.
            op.rename_table("tasks", "old_tasks")
            # Drop old table.
            op.drop_table("old_tasks")
            # Drop old Enum.
            sa.Enum(name="status_type").drop(op.get_bind(), checkfirst=False)
            # Create new table with 1.0 schema.
            op.create_table(
                "tasks", sa.Column("id", sa.Integer(), nullable=False),
                sa.Column("target", sa.String(length=255), nullable=False),
                sa.Column("category", sa.String(length=255), nullable=False),
                sa.Column("timeout",
                          sa.Integer(),
                          server_default="0",
                          nullable=False),
                sa.Column("priority",
                          sa.Integer(),
                          server_default="1",
                          nullable=False),
                sa.Column("custom", sa.String(length=255), nullable=True),
                sa.Column("machine", sa.String(length=255), nullable=True),
                sa.Column("package", sa.String(length=255), nullable=True),
                sa.Column("options", sa.String(length=255), nullable=True),
                sa.Column("platform", sa.String(length=255), nullable=True),
                sa.Column("memory",
                          sa.Boolean(),
                          nullable=False,
                          default=False),
                sa.Column("enforce_timeout",
                          sa.Boolean(),
                          nullable=False,
                          default=False),
                sa.Column("clock",
                          sa.DateTime(timezone=False),
                          server_default=sa.func.now(),
                          nullable=False),
                sa.Column("added_on",
                          sa.DateTime(timezone=False),
                          nullable=False),
                sa.Column("started_on",
                          sa.DateTime(timezone=False),
                          nullable=True),
                sa.Column("completed_on",
                          sa.DateTime(timezone=False),
                          nullable=True),
                sa.Column("status",
                          sa.Enum("pending",
                                  "running",
                                  "completed",
                                  "reported",
                                  "recovered",
                                  name="status_type"),
                          server_default="pending",
                          nullable=False),
                sa.Column("sample_id",
                          sa.Integer,
                          sa.ForeignKey("samples.id"),
                          nullable=True), sa.PrimaryKeyConstraint("id"))

            # Insert data.
            op.bulk_insert(db.Task.__table__, tasks_data)
        elif conn.engine.driver == "pysqlite":
            # Edit task status enumeration in Task.
            # NOTE: To workaround limitations in SQLite we have to create a temporary table, create the new schema and copy data.
            # Read data.
            tasks_data = []
            old_tasks = conn.execute(
                "select id, target, category, timeout, priority, custom, machine, package, options, platform, memory, enforce_timeout, added_on, started_on, completed_on, status, sample_id from tasks"
            ).fetchall()
            for item in old_tasks:
                d = {}
                d["id"] = item[0]
                d["target"] = item[1]
                d["category"] = item[2]
                d["timeout"] = item[3]
                d["priority"] = item[4]
                d["custom"] = item[5]
                d["machine"] = item[6]
                d["package"] = item[7]
                d["options"] = item[8]
                d["platform"] = item[9]
                d["memory"] = item[10]
                d["enforce_timeout"] = item[11]
                if isinstance(item[12], datetime):
                    d["added_on"] = item[12]
                else:
                    d["added_on"] = parse(item[12])
                if isinstance(item[13], datetime):
                    d["started_on"] = item[13]
                else:
                    d["started_on"] = parse(item[13])
                if isinstance(item[14], datetime):
                    d["completed_on"] = item[14]
                else:
                    d["completed_on"] = parse(item[14])
                d["status"] = item[15]
                d["sample_id"] = item[16]

                # Force clock.
                # NOTE: We added this new column so we force clock time to the added_on for old analyses.
                d["clock"] = d["added_on"]
                # Enum migration, "success" isn"t a valid state now.
                if d["status"] == "success":
                    d["status"] = "completed"
                tasks_data.append(d)

            # Rename original table.
            op.rename_table("tasks", "old_tasks")
            # Drop old table.
            op.drop_table("old_tasks")
            # Drop old Enum.
            sa.Enum(name="status_type").drop(op.get_bind(), checkfirst=False)
            # Create new table with 1.0 schema.
            op.create_table(
                "tasks", sa.Column("id", sa.Integer(), nullable=False),
                sa.Column("target", sa.String(length=255), nullable=False),
                sa.Column("category", sa.String(length=255), nullable=False),
                sa.Column("timeout",
                          sa.Integer(),
                          server_default="0",
                          nullable=False),
                sa.Column("priority",
                          sa.Integer(),
                          server_default="1",
                          nullable=False),
                sa.Column("custom", sa.String(length=255), nullable=True),
                sa.Column("machine", sa.String(length=255), nullable=True),
                sa.Column("package", sa.String(length=255), nullable=True),
                sa.Column("options", sa.String(length=255), nullable=True),
                sa.Column("platform", sa.String(length=255), nullable=True),
                sa.Column("memory",
                          sa.Boolean(),
                          nullable=False,
                          default=False),
                sa.Column("enforce_timeout",
                          sa.Boolean(),
                          nullable=False,
                          default=False),
                sa.Column("clock",
                          sa.DateTime(timezone=False),
                          server_default=sa.func.now(),
                          nullable=False),
                sa.Column("added_on",
                          sa.DateTime(timezone=False),
                          nullable=False),
                sa.Column("started_on",
                          sa.DateTime(timezone=False),
                          nullable=True),
                sa.Column("completed_on",
                          sa.DateTime(timezone=False),
                          nullable=True),
                sa.Column("status",
                          sa.Enum("pending",
                                  "running",
                                  "completed",
                                  "reported",
                                  "recovered",
                                  name="status_type"),
                          server_default="pending",
                          nullable=False),
                sa.Column("sample_id",
                          sa.Integer,
                          sa.ForeignKey("samples.id"),
                          nullable=True), sa.PrimaryKeyConstraint("id"))

            # Insert data.
            op.bulk_insert(db.Task.__table__, tasks_data)

    # Migrate mongo.
    mongo_upgrade()
Exemple #40
0
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    role_table = op.create_table(
        'Role', sa.Column('role', sa.String(length=50), nullable=False),
        sa.Column('change_owner',
                  mysql.TINYINT(display_width=1),
                  nullable=False),
        sa.Column('delete', mysql.TINYINT(display_width=1), nullable=False),
        sa.Column('change_permissions',
                  mysql.TINYINT(display_width=1),
                  nullable=False),
        sa.Column('change_data',
                  mysql.TINYINT(display_width=1),
                  nullable=False),
        sa.Column('read', mysql.TINYINT(display_width=1), nullable=False),
        sa.PrimaryKeyConstraint('role', name=op.f('pk_Role')))
    op.bulk_insert(role_table, [{
        'role': 'owner',
        'change_owner': 1,
        'delete': 1,
        'change_permissions': 1,
        'change_data': 1,
        'read': 1
    }, {
        'role': 'admin',
        'change_owner': 0,
        'delete': 0,
        'change_permissions': 0,
        'change_data': 1,
        'read': 1
    }, {
        'role': 'reader',
        'change_owner': 0,
        'delete': 0,
        'change_permissions': 0,
        'change_data': 0,
        'read': 1
    }])
    op.create_table(
        'TodoList', sa.Column('todolist_id',
                              sa.CHAR(length=36),
                              nullable=False),
        sa.Column('label', sa.String(length=255), nullable=False),
        sa.Column('description', sa.String(length=255), nullable=True),
        sa.Column('status',
                  sa.Enum('active', 'inactive', name='todoliststatus'),
                  nullable=False),
        sa.Column('priority',
                  sa.Enum('a', 'b', 'c', 'd', 'e', name='priority'),
                  nullable=False),
        sa.Column('created_ts', mysql.DATETIME(), nullable=False),
        sa.PrimaryKeyConstraint('todolist_id', name=op.f('pk_TodoList')))
    op.create_table(
        'TodoListStatusChangeLog',
        sa.Column('todolist_id', sa.CHAR(length=36), nullable=False),
        sa.Column('change_ts', mysql.DATETIME(), nullable=False),
        sa.Column('changed_by', sa.CHAR(length=36), nullable=False),
        sa.Column('status',
                  sa.Enum('active', 'inactive', name='todoliststatus'),
                  nullable=False),
        sa.ForeignKeyConstraint(
            ['changed_by'], ['User.user_id'],
            name=op.f('fk_TodoListStatusChangeLog_changed_by_User'),
            ondelete='cascade'),
        sa.ForeignKeyConstraint(
            ['todolist_id'], ['TodoList.todolist_id'],
            name=op.f('fk_TodoListStatusChangeLog_todolist_id_TodoList'),
            ondelete='cascade'),
        sa.PrimaryKeyConstraint('todolist_id',
                                'change_ts',
                                name=op.f('pk_TodoListStatusChangeLog')))
    op.create_table(
        'Task', sa.Column('task_id', sa.CHAR(length=36), nullable=False),
        sa.Column('parent_id', sa.CHAR(length=36), nullable=True),
        sa.Column('label', sa.String(length=255), nullable=False),
        sa.Column('description', sa.Text(), nullable=True),
        sa.Column('todolist_id', sa.CHAR(length=36), nullable=False),
        sa.Column('status',
                  sa.Enum('active', 'done', 'ready', name='nodestatus'),
                  nullable=False),
        sa.Column('priority',
                  sa.Enum('a', 'b', 'c', 'd', 'e', name='priority'),
                  nullable=False),
        sa.Column('created_ts', mysql.DATETIME(), nullable=False),
        sa.ForeignKeyConstraint(['parent_id'], ['Task.task_id'],
                                name=op.f('fk_Task_parent_id_Task'),
                                ondelete='cascade'),
        sa.ForeignKeyConstraint(['todolist_id'], ['TodoList.todolist_id'],
                                name=op.f('fk_Task_todolist_id_TodoList'),
                                ondelete='cascade'),
        sa.PrimaryKeyConstraint('task_id', name=op.f('pk_Task')))
    op.create_index(op.f('ix_Task_label'), 'Task', ['label'], unique=False)
    op.create_index(op.f('ix_Task_todolist_id'),
                    'Task', ['todolist_id'],
                    unique=False)
    op.create_table(
        'TaskStatusChangeLog',
        sa.Column('task_id', sa.CHAR(length=36), nullable=False),
        sa.Column('change_ts', mysql.DATETIME(), nullable=False),
        sa.Column('changed_by', sa.CHAR(length=36), nullable=False),
        sa.Column('status',
                  sa.Enum('active', 'done', 'ready', name='taskstatus'),
                  nullable=False),
        sa.ForeignKeyConstraint(
            ['changed_by'], ['User.user_id'],
            name=op.f('fk_TaskStatusChangeLog_changed_by_User'),
            ondelete='cascade'),
        sa.ForeignKeyConstraint(
            ['task_id'], ['Task.task_id'],
            name=op.f('fk_TaskStatusChangeLog_task_id_Task'),
            ondelete='cascade'),
        sa.PrimaryKeyConstraint('task_id',
                                'change_ts',
                                name=op.f('pk_TaskStatusChangeLog')))
    op.create_table(
        'TodoListCreator',
        sa.Column('todolist_id', sa.CHAR(length=36), nullable=False),
        sa.Column('created_by', sa.CHAR(length=36), nullable=False),
        sa.ForeignKeyConstraint(
            ['created_by'], ['User.user_id'],
            name=op.f('fk_TodoListCreator_created_by_User'),
            ondelete='cascade'),
        sa.ForeignKeyConstraint(
            ['todolist_id'], ['TodoList.todolist_id'],
            name=op.f('fk_TodoListCreator_todolist_id_TodoList'),
            ondelete='cascade'),
        sa.PrimaryKeyConstraint('todolist_id',
                                name=op.f('pk_TodoListCreator')))
    op.create_table(
        'UserTodoList', sa.Column('user_id',
                                  sa.CHAR(length=36),
                                  nullable=False),
        sa.Column('todolist_id', sa.CHAR(length=36), nullable=False),
        sa.Column('role', sa.String(length=50), nullable=False),
        sa.ForeignKeyConstraint(['role'], ['Role.role'],
                                name=op.f('fk_UserTodoList_role_Role'),
                                ondelete='cascade'),
        sa.ForeignKeyConstraint(
            ['todolist_id'], ['TodoList.todolist_id'],
            name=op.f('fk_UserTodoList_todolist_id_TodoList'),
            ondelete='cascade'),
        sa.ForeignKeyConstraint(['user_id'], ['User.user_id'],
                                name=op.f('fk_UserTodoList_user_id_User'),
                                ondelete='cascade'),
        sa.PrimaryKeyConstraint('user_id',
                                'todolist_id',
                                name=op.f('pk_UserTodoList')))
def _perform(upgrade):
    conn = op.get_bind()

    sample_list = conn.execute("SELECT id, file_size, file_type, md5, crc32, "
                               "sha1, sha256, sha512, ssdeep FROM samples")

    samples = []
    for sample in sample_list:
        samples.append({
            "id": sample[0],
            "file_size": sample[1],
            "file_type": sample[2],
            "md5": sample[3],
            "crc32": sample[4],
            "sha1": sample[5],
            "sha256": sample[6],
            "sha512": sample[7],
            "ssdeep": sample[8],
        })

    # PostgreSQL and MySQL have different names for the foreign key of
    # Task.sample_id -> Sample.id; for SQLite we don't drop/recreate the
    # foreign key.
    fkey_name = {
        "mysql": "tasks_ibfk_1",
        "postgresql": "tasks_sample_id_fkey",
    }

    fkey = fkey_name.get(db.Database(schema_check=False).engine.name)

    # First drop the foreign key.
    if fkey:
        op.drop_constraint(fkey, "tasks", type_="foreignkey")

    # Rename original table.
    op.rename_table("samples", "old_samples")

    # Drop old table.
    op.drop_table("old_samples")

    if upgrade:
        file_type = sa.Text()
    else:
        file_type = sa.String(255)

        # As downgrading implies trimming file_type's to 255 bytes we force
        # this for every available record.
        for sample in samples:
            sample["file_type"] = sample["file_type"][:255]

    # Create the new table with 1.2 schema.
    # Changelog:
    # * file_type changed its type from String(255) to Text().
    op.create_table("samples", sa.Column("id", sa.Integer(), nullable=False),
                    sa.Column("file_size", sa.Integer(), nullable=False),
                    sa.Column("file_type", file_type, nullable=False),
                    sa.Column("md5", sa.String(32), nullable=False),
                    sa.Column("crc32", sa.String(8), nullable=False),
                    sa.Column("sha1", sa.String(40), nullable=False),
                    sa.Column("sha256", sa.String(64), nullable=False),
                    sa.Column("sha512", sa.String(128), nullable=False),
                    sa.Column("ssdeep", sa.Text(), nullable=True),
                    sa.PrimaryKeyConstraint("id"))

    # Insert data.
    op.bulk_insert(db.Sample.__table__, samples)

    # Restore the indices.
    op.create_index("hash_index",
                    "samples", ["md5", "crc32", "sha1", "sha256", "sha512"],
                    unique=True)

    # Create the foreign key.
    if fkey:
        op.create_foreign_key(fkey, "tasks", "samples", ["sample_id"], ["id"])
def upgrade():
    environment = os.getenv('APP_ENV', None)
    data_trust_id = str(uuid4()).replace('-', '')
    admin_user_id = str(uuid4()).replace('-', '')

    if environment == 'PRODUCTION':
        data_trust_name = os.getenv('DATA_TRUST_NAME', 'New Data Trust')
        facet_redirect_uri = os.getenv('FACET_REDIRECT_URI',
                                       'http://*****:*****@brighthive.net',
                           'active': True,
                           'data_trust_id': data_trust_id,
                           'password_hash': hash_password('143DATATRUST341'),
                           'date_created': datetime.utcnow(),
                           'date_last_updated': datetime.utcnow()
                       }])

        facet_client_metadata = {
            'client_name': 'Facet',
            'token_endpoint_auth_method': 'none',
            'grant_types': ['authorization_code'],
            'response_types': ['code'],
            'client_uri': '',
            'redirect_uris': [facet_redirect_uri],
            'scope': ''
        }

        honeybadger_client_metadata = {
            'client_name': 'HoneyBadger',
            'token_endpoint_auth_method': 'none',
            'grant_types': ['authorization_code'],
            'response_types': ['code'],
            'client_uri': '',
            'redirect_uris': [honeybadger_redirect_uri],
            'scope': ''
        }

        op.bulk_insert(
            oauth2_client_table,
            [
                # Set up a public client (Facet)
                {
                    'client_id': 'ru2tFykoIcR6vSWpsLgnYTpg',
                    'client_secret': '',
                    'client_id_issued_at': int(time.time()),
                    'client_secret_expires_at': 0,
                    'client_metadata': json.dumps(facet_client_metadata),
                    'id': str(uuid4()).replace('-', ''),
                    'user_id': admin_user_id
                },
                # Set up a public client (HoneyBadger)
                {
                    'client_id': 'CFMdr5X9zAp0HwtoM8i8i7UA',
                    'client_secret': '',
                    'client_id_issued_at': int(time.time()),
                    'client_secret_expires_at': 0,
                    'client_metadata': json.dumps(honeybadger_client_metadata),
                    'id': str(uuid4()).replace('-', ''),
                    'user_id': admin_user_id
                }
            ])
    else:
        if environment == 'TESTING':
            # Set up data trust
            op.bulk_insert(data_trust_table,
                           [{
                               'id': data_trust_id,
                               'data_trust_name': 'Test Data Trust',
                               'date_created': datetime.utcnow(),
                               'date_last_updated': datetime.utcnow()
                           }])

            # Set up test user
            op.bulk_insert(user_table,
                           [{
                               'id': admin_user_id,
                               'username': '******',
                               'firstname': 'Test',
                               'lastname': 'User',
                               'organization': 'BrightHive',
                               'email_address': '*****@*****.**',
                               'active': True,
                               'data_trust_id': data_trust_id,
                               'password_hash': hash_password('passw0rd'),
                               'date_created': datetime.utcnow(),
                               'date_last_updated': datetime.utcnow()
                           }])
        # Set up test client
        elif not environment:
            op.bulk_insert(data_trust_table,
                           [{
                               'id': data_trust_id,
                               'data_trust_name': 'Sample Data Trust',
                               'date_created': datetime.utcnow(),
                               'date_last_updated': datetime.utcnow()
                           }])

            op.bulk_insert(
                user_table, [{
                    'id': admin_user_id,
                    'username': '******',
                    'firstname': 'BrightHive',
                    'lastname': 'Administrator',
                    'organization': 'BrightHive',
                    'email_address': '*****@*****.**',
                    'active': True,
                    'data_trust_id': data_trust_id,
                    'password_hash': hash_password('143DATATRUST341'),
                    'date_created': datetime.utcnow(),
                    'date_last_updated': datetime.utcnow()
                }])

        m2m_client_metadata = {
            'client_name': 'M2M Client',
            'token_endpoint_auth_method': 'client_secret_json',
            'grant_types': ['client_credentials'],
            'response_types': ['token'],
            'client_uri': 'http://localhost:8000',
            'scope': ''
        }

        public_client_metadata = {
            'client_name': 'Public Client',
            'token_endpoint_auth_method': 'none',
            'grant_types': ['authorization_code'],
            'response_types': ['code'],
            'client_uri': 'http://localhost:8001',
            'redirect_uris': ['http://localhost:8001/auth/redirect'],
            'scope': ''
        }

        op.bulk_insert(
            oauth2_client_table,
            [{
                'client_id': 'd84UZXW7QcB5ufaVT15C9BtO',
                'client_secret':
                'cTQfd67c5uN9df8g56U8T5CwbF9S0LDgl4imUDguKkrGSuzI',
                'client_id_issued_at': int(time.time()),
                'client_secret_expires_at': 0,
                'client_metadata': json.dumps(m2m_client_metadata),
                'id': '13c68e75d02a4c2280585f3a88549d39',
                'user_id': admin_user_id
            }, {
                'client_id': 'e84UZXW7QcB5ufaVT15C9BtO',
                'client_secret': '',
                'client_id_issued_at': int(time.time()),
                'client_secret_expires_at': 0,
                'client_metadata': json.dumps(public_client_metadata),
                'id': '23c68e75d02a4c2280585f3a88549d3b',
                'user_id': admin_user_id
            }])
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.create_table(
        'fights', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('winner_name', sa.String(length=32), nullable=False),
        sa.Column('beaten_name', sa.String(length=32), nullable=False),
        sa.Column('killed', sa.Boolean(), nullable=False),
        sa.Column('date', sa.DateTime(), nullable=True),
        sa.PrimaryKeyConstraint('id'))
    op.create_index(op.f('ix_fights_beaten_name'),
                    'fights', ['beaten_name'],
                    unique=False)
    op.create_index(op.f('ix_fights_killed'),
                    'fights', ['killed'],
                    unique=False)
    op.create_index(op.f('ix_fights_winner_name'),
                    'fights', ['winner_name'],
                    unique=False)
    groups_table = op.create_table(
        'groups',
        sa.Column('type',
                  sa.Enum('NONE',
                          'HUMAN',
                          'MYSTIC',
                          'MUTANT',
                          name='grouptype'),
                  nullable=False),
        sa.Column('enemies', sa.Integer(), nullable=True),
        sa.PrimaryKeyConstraint('type'))
    heroes_table = op.create_table(
        'heroes', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('public_id', sa.String(length=50), nullable=True),
        sa.Column('name', sa.String(length=32), nullable=True),
        sa.Column('password_hash', sa.String(length=128), nullable=True),
        sa.Column('health', sa.Integer(), nullable=True),
        sa.Column('permissions', sa.Integer(), nullable=True),
        sa.Column('group_id',
                  sa.Enum('NONE',
                          'HUMAN',
                          'MYSTIC',
                          'MUTANT',
                          name='grouptype'),
                  nullable=True),
        sa.Column('is_participant', sa.Boolean(), nullable=True),
        sa.ForeignKeyConstraint(
            ['group_id'],
            ['groups.type'],
        ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('public_id'))
    op.create_index(op.f('ix_heroes_name'), 'heroes', ['name'], unique=True)
    op.create_index(op.f('ix_heroes_password_hash'),
                    'heroes', ['password_hash'],
                    unique=False)
    # ### end Alembic commands ###

    # Seed data

    op.bulk_insert(groups_table, [{
        'type': GroupType.HUMAN.name
    }, {
        'type': GroupType.MYSTIC.name
    }, {
        'type': GroupType.MUTANT.name
    }, {
        'type': GroupType.NONE.name
    }])

    op.bulk_insert(heroes_table, [
        {
            'name': 'Grandmaster',
            'public_id': str(uuid.uuid4()),
            'password_hash': generate_password_hash('grandpass'),
            'health': 100,
            'permissions': Permission.ADMIN,
            'group_id': GroupType.NONE.name,
            'is_participant': False
        },
        {
            'name': 'Hulk',
            'public_id': str(uuid.uuid4()),
            'password_hash': generate_password_hash('Hulk'),
            'health': 100,
            'permissions': Permission.NORMAL,
            'group_id': GroupType.MUTANT.name,
            'is_participant': True
        },
        {
            'name': 'Guilotine',
            'public_id': str(uuid.uuid4()),
            'password_hash': generate_password_hash('12345'),
            'health': 100,
            'permissions': Permission.NORMAL,
            'group_id': GroupType.MYSTIC.name,
            'is_participant': True
        },
        {
            'name': 'Grzesiek',
            'public_id': str(uuid.uuid4()),
            'password_hash': generate_password_hash('misiek'),
            'health': 100,
            'permissions': Permission.NORMAL,
            'group_id': GroupType.HUMAN.name,
            'is_participant': True
        },
    ])
Exemple #44
0
def upgrade():
    # Create an ad-hoc table to use for the insert statement.
    item_table = table('item', column('model', String),
                       column('brand_id', Integer),
                       column('gender_id', Integer),
                       column('small_image_url', String),
                       column('medium_image_url', String),
                       column('type', String))

    # Red Chili Shoe List
    op.bulk_insert(item_table, [{
        'model': 'durango vcr',
        'brand_id': '9',
        'gender_id': '1',
        'type': 'rock'
    }, {
        'model': 'amp',
        'brand_id': '9',
        'gender_id': '1',
        'type': 'rock'
    }, {
        'model': 'atomyc',
        'brand_id': '9',
        'gender_id': '1',
        'type': 'rock'
    }, {
        'model': 'durango lace',
        'brand_id': '9',
        'gender_id': '1',
        'type': 'rock'
    }, {
        'model': 'voltage',
        'brand_id': '9',
        'gender_id': '1',
        'type': 'rock'
    }, {
        'model': 'fusion vcr',
        'brand_id': '9',
        'gender_id': '1',
        'type': 'rock'
    }, {
        'model': 'fusion vcr',
        'brand_id': '9',
        'gender_id': '2',
        'type': 'rock'
    }])

    # Lowa Climbing Shoe List
    op.bulk_insert(item_table, [{
        'model': 'renegade gtx',
        'brand_id': '19',
        'gender_id': '1',
        'type': 'mountain'
    }, {
        'model': 'camino gtx flex',
        'brand_id': '19',
        'gender_id': '1',
        'type': 'mountain'
    }, {
        'model': 'zephyr mid',
        'brand_id': '19',
        'gender_id': '1',
        'type': 'mountain'
    }, {
        'model': 'zephyr gtx mid tf',
        'brand_id': '19',
        'gender_id': '1',
        'type': 'mountain'
    }, {
        'model': 'zephyr gtx hi tf',
        'brand_id': '19',
        'gender_id': '1',
        'type': 'mountain'
    }, {
        'model': 'tibet gtx',
        'brand_id': '19',
        'gender_id': '1',
        'type': 'mountain'
    }, {
        'model': 'tibet gtx hi',
        'brand_id': '19',
        'gender_id': '1',
        'type': 'mountain'
    }, {
        'model': 'baffin pro',
        'brand_id': '19',
        'gender_id': '1',
        'type': 'mountain'
    }, {
        'model': 'renegade ice gtx',
        'brand_id': '19',
        'gender_id': '1',
        'type': 'mountain'
    }, {
        'model': 'trident ii gtx',
        'brand_id': '19',
        'gender_id': '1',
        'type': 'mountain'
    }, {
        'model': 'tiago gtx mid',
        'brand_id': '19',
        'gender_id': '1',
        'type': 'mountain'
    }, {
        'model': 'renegade gtx low',
        'brand_id': '19',
        'gender_id': '1',
        'type': 'mountain'
    }, {
        'model': 'vantage gtx mid',
        'brand_id': '19',
        'gender_id': '1',
        'type': 'mountain'
    }, {
        'model': 'arco gtx',
        'brand_id': '19',
        'gender_id': '1',
        'type': 'mountain'
    }, {
        'model': 'baffin pro ii',
        'brand_id': '19',
        'gender_id': '1',
        'type': 'mountain'
    }, {
        'model': 'renegade gtx mid',
        'brand_id': '19',
        'gender_id': '1',
        'type': 'mountain'
    }, {
        'model': 'bormio gtx qc',
        'brand_id': '19',
        'gender_id': '1',
        'type': 'mountain'
    }, {
        'model': 'renegade gtx mid',
        'brand_id': '19',
        'gender_id': '2',
        'type': 'mountain'
    }, {
        'model': 'mauria gtx flex',
        'brand_id': '19',
        'gender_id': '2',
        'type': 'mountain'
    }, {
        'model': 'lady light gtx',
        'brand_id': '19',
        'gender_id': '2',
        'type': 'mountain'
    }, {
        'model': 'bormio gtx mid',
        'brand_id': '19',
        'gender_id': '2',
        'type': 'mountain'
    }, {
        'model': 'vantage gtx mid',
        'brand_id': '19',
        'gender_id': '2',
        'type': 'mountain'
    }, {
        'model': 'phoenix gtx mid',
        'brand_id': '19',
        'gender_id': '2',
        'type': 'mountain'
    }, {
        'model': 'ferrox gtx mid',
        'brand_id': '19',
        'gender_id': '2',
        'type': 'mountain'
    }, {
        'model': 'sassa gtx mid',
        'brand_id': '19',
        'gender_id': '2',
        'type': 'mountain'
    }, {
        'model': 'tibet gtx',
        'brand_id': '19',
        'gender_id': '2',
        'type': 'mountain'
    }, {
        'model': 'levante gtx mid',
        'brand_id': '19',
        'gender_id': '2',
        'type': 'mountain'
    }])
Exemple #45
0
def upgrade():
    op.execute('CREATE EXTENSION IF NOT EXISTS pgcrypto')

    op.create_table('basis_set_family',
        sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(length=255), nullable=False),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('name')
    )
    op.create_table('pseudopotential_family',
        sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(length=255), nullable=False),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('name')
    )
    op.create_table('role',
        sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(length=255), nullable=False),
        sa.Column('description', sa.Text(), nullable=True),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('name')
    )
    op.create_table('structure',
        sa.Column('id', postgresql.UUID(as_uuid=True), server_default=sa.text('gen_random_uuid()'), nullable=False),
        sa.Column('name', sa.String(), nullable=False),
        sa.Column('ase_structure', sa.Text(), nullable=False),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('name')
    )
    op.create_table('structure_set',
        sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(length=255), nullable=False),
        sa.Column('description', sa.Text(), nullable=True),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('name')
    )
    op.create_table('task_status',
        sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(length=255), nullable=False),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('name')
    )
    op.create_table('test',
        sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('name', sa.String(length=255), nullable=False),
        sa.Column('description', sa.Text(), nullable=True),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('name')
    )
    op.create_table('user',
        sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('email', sa.String(length=255), nullable=False),
        sa.Column('password', sa.Text(), nullable=False),
        sa.Column('active', sa.Boolean(), nullable=False),
        sa.Column('confirmed_at', sa.DateTime(), nullable=True),
        sa.PrimaryKeyConstraint('id'),
        sa.UniqueConstraint('email')
    )
    op.create_table('basis_set',
        sa.Column('id', postgresql.UUID(as_uuid=True), server_default=sa.text('gen_random_uuid()'), nullable=False),
        sa.Column('element', sa.String(length=255), nullable=False),
        sa.Column('family_id', sa.Integer(), nullable=False),
        sa.Column('basis', sa.Text(), nullable=False),
        sa.ForeignKeyConstraint(['family_id'], ['basis_set_family.id'], ),
        sa.PrimaryKeyConstraint('id')
    )
    op.create_table('method',
        sa.Column('id', postgresql.UUID(as_uuid=True), server_default=sa.text('gen_random_uuid()'), nullable=False),
        sa.Column('code', sa.String(length=255), nullable=False),
        sa.Column('pseudopotential_id', sa.Integer(), nullable=False),
        sa.Column('basis_set_id', sa.Integer(), nullable=False),
        sa.Column('settings', postgresql.JSONB(), nullable=True),
        sa.ForeignKeyConstraint(['basis_set_id'], ['basis_set_family.id'], ),
        sa.ForeignKeyConstraint(['pseudopotential_id'], ['pseudopotential_family.id'], ),
        sa.PrimaryKeyConstraint('id')
    )
    op.create_table('pseudopotential',
        sa.Column('id', postgresql.UUID(as_uuid=True), server_default=sa.text('gen_random_uuid()'), nullable=False),
        sa.Column('element', sa.String(length=255), nullable=False),
        sa.Column('pseudo', sa.Text(), nullable=False),
        sa.Column('family_id', sa.Integer(), nullable=False),
        sa.Column('format', sa.String(length=255), nullable=False),
        sa.Column('converted_from_id', postgresql.UUID(as_uuid=True), nullable=True),
        sa.ForeignKeyConstraint(['converted_from_id'], ['pseudopotential.id'], ),
        sa.ForeignKeyConstraint(['family_id'], ['pseudopotential_family.id'], ),
        sa.PrimaryKeyConstraint('id')
    )
    op.create_table('structure_set_structure',
        sa.Column('structure_id', postgresql.UUID(as_uuid=True), nullable=False),
        sa.Column('set_id', sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(['set_id'], ['structure_set.id'], ),
        sa.ForeignKeyConstraint(['structure_id'], ['structure.id'], )
    )
    op.create_table('test_structure',
        sa.Column('test_id', sa.Integer(), nullable=False),
        sa.Column('structure_id', postgresql.UUID(as_uuid=True), nullable=False),
        sa.ForeignKeyConstraint(['structure_id'], ['structure.id'], ),
        sa.ForeignKeyConstraint(['test_id'], ['test.id'], )
    )
    op.create_table('user_role',
        sa.Column('user_id', sa.Integer(), nullable=False),
        sa.Column('role_id', sa.Integer(), nullable=False),
        sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
        sa.ForeignKeyConstraint(['user_id'], ['user.id'], )
    )
    op.create_table('task',
        sa.Column('id', postgresql.UUID(as_uuid=True), server_default=sa.text('gen_random_uuid()'), nullable=False),
        sa.Column('structure_id', postgresql.UUID(as_uuid=True), nullable=False),
        sa.Column('method_id', postgresql.UUID(as_uuid=True), nullable=False),
        sa.Column('status_id', sa.Integer(), nullable=False),
        sa.Column('test_id', sa.Integer(), nullable=False),
        sa.Column('ctime', sa.DateTime(), nullable=False),
        sa.Column('mtime', sa.DateTime(), nullable=False),
        sa.Column('machine', sa.String(length=255), nullable=False),
        sa.Column('priority', sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(['method_id'], ['method.id'], ),
        sa.ForeignKeyConstraint(['status_id'], ['task_status.id'], ),
        sa.ForeignKeyConstraint(['structure_id'], ['structure.id'], ),
        sa.ForeignKeyConstraint(['test_id'], ['test.id'], ),
        sa.PrimaryKeyConstraint('id')
    )
    op.create_table('test_result',
        sa.Column('id', postgresql.UUID(as_uuid=True), server_default=sa.text('gen_random_uuid()'), nullable=False),
        sa.Column('ctime', sa.DateTime(), nullable=False),
        sa.Column('test_id', sa.Integer(), nullable=False),
        sa.Column('method_id', postgresql.UUID(as_uuid=True), nullable=False),
        sa.Column('result_data', postgresql.JSONB(), nullable=True),
        sa.ForeignKeyConstraint(['method_id'], ['method.id'], ),
        sa.ForeignKeyConstraint(['test_id'], ['test.id'], ),
        sa.PrimaryKeyConstraint('id')
    )
    op.create_table('result',
        sa.Column('id', postgresql.UUID(as_uuid=True), server_default=sa.text('gen_random_uuid()'), nullable=False),
        sa.Column('energy', sa.Float(), nullable=False),
        sa.Column('task_id', postgresql.UUID(as_uuid=True), nullable=False),
        sa.Column('filename', sa.String(length=255), nullable=True),
        sa.Column('data', postgresql.JSONB(), nullable=True),
        sa.ForeignKeyConstraint(['task_id'], ['task.id'], ),
        sa.PrimaryKeyConstraint('id')
    )

    task_status_table = table('task_status',
        column('id', sa.Integer),
        column('name', sa.String)
        )

    op.bulk_insert(task_status_table,
            [
                {'id': 1, 'name': "new"},
                {'id': 2, 'name': "pending"},
                {'id': 3, 'name': "running"},
                {'id': 4, 'name': "done"},
                {'id': 5, 'name': "error"},
                {'id': 7, 'name': "running-remote"},
                {'id': 6, 'name': "deferred"},
                {'id': 8, 'name': "cancelled"},
            ]
        )

    basis_set_family_table = table(
        'basis_set_family',
        column('id', sa.Integer),
        column('name', sa.String)
    )

    op.bulk_insert(
        basis_set_family_table,
        [
            {'id':   1, 'name': "SZV-GTH"},
            {'id':   2, 'name': "DZV-GTH"},
            {'id':   3, 'name': "DZVP-GTH"},
            {'id':   4, 'name': "TZVP-GTH"},
            {'id':   5, 'name': "TZV2P-GTH"},
            {'id':   6, 'name': "QZV2P-GTH"},
            {'id':   7, 'name': "QZV3P-GTH"},
            {'id':   8, 'name': "aug-DZVP-GTH"},
            {'id':   9, 'name': "aug-TZVP-GTH"},
            {'id':  10, 'name': "aug-TZV2P-GTH"},
            {'id':  11, 'name': "aug-QZV2P-GTH"},
            {'id':  12, 'name': "aug-QZV3P-GTH"},
            {'id':  13, 'name': "6-31G*"},
            {'id':  14, 'name': "6-311ppG3f2d"},
            {'id':  15, 'name': "6-31ppG3f2d"},
            {'id':  16, 'name': "TZVP-pob"},
            {'id':  17, 'name': "DZVP-MOLOPT-SR-GTH"},
            {'id':  18, 'name': "DZVP-MOLOPT-GTH"},
            {'id': 257, 'name': "TZV2PX-MOLOPT-GTH"},
            {'id': 258, 'name': "pc-1"},
            {'id': 259, 'name': "pc-2"},
            {'id': 260, 'name': "pc-3"},
            {'id': 261, 'name': "pc-4"},
            {'id': 202, 'name': "TZVP-MOLOPT-GTH"},
            {'id':  55, 'name': "planewave"},
            {'id': 164, 'name': "DZ-ANO"},
            {'id': 165, 'name': "DZVP-ALL"},
        ]
    )

    pseudopotential_family_table = table(
        'pseudopotential_family',
        column('id', sa.Integer),
        column('name', sa.String)
    )

    op.bulk_insert(
        pseudopotential_family_table,
        [
            {'id':  1, 'name': "GTH-PBE"},
            {'id':  2, 'name': "GTH-NLCC-PBE"},
            {'id':  3, 'name': "GTH-NLCC2015-PBE"},
            {'id':  4, 'name': "ALL"},
            {'id': 13, 'name': "all-electron"},
        ]
    )
def upgrade():
    conn = op.get_bind()
    maindb_meta = MainDB.base.metadata
    maindb_meta.bind = conn

    op.alter_column('tv_shows', 'indexer_id', new_column_name='series_id')
    op.alter_column('tv_shows',
                    'indexer',
                    new_column_name='series_provider_id')
    op.alter_column('tv_shows', 'dvdorder', new_column_name='dvd_order')
    op.alter_column('tv_episodes', 'showid', new_column_name='series_id')
    op.alter_column('tv_episodes', 'indexer_id', new_column_name='episode_id')
    op.alter_column('tv_episodes',
                    'indexer',
                    new_column_name='series_provider_id')
    op.alter_column('imdb_info', 'indexer_id', new_column_name='series_id')

    for item in SeriesProviderID:
        conn.execute(
            f'UPDATE tv_shows SET series_provider_id = "{item.name}" WHERE series_provider_id = {item.value}'
        )
        conn.execute(
            f'UPDATE tv_episodes SET series_provider_id = "{item.name}" WHERE series_provider_id = {item.value}'
        )

    for item in EpisodeStatus:
        conn.execute(
            f'UPDATE tv_shows SET default_ep_status = "{item.name}" WHERE default_ep_status = {item.value}'
        )
        conn.execute(
            f'UPDATE tv_episodes SET status = "{item.name}" WHERE status = {item.value}'
        )

    with op.batch_alter_table('tv_shows') as batch_op:
        batch_op.alter_column('series_provider_id',
                              type_=sa.Enum(SeriesProviderID))
        batch_op.alter_column('default_ep_status',
                              type_=sa.Enum(EpisodeStatus))
        batch_op.alter_column('quality', type_=IntFlag(Qualities))

    with op.batch_alter_table('tv_episodes') as batch_op:
        batch_op.alter_column('series_provider_id',
                              type_=sa.Enum(SeriesProviderID))
        batch_op.alter_column('status', type_=sa.Enum(EpisodeStatus))

    tv_episodes_results = []
    for x in conn.execute('SELECT * FROM tv_episodes'):
        x = dict(x)

        if 'airdate' in x:
            try:
                x['airdate'] = datetime.datetime.strptime(
                    x['airdate'], '%Y-%m-%d')
            except ValueError:
                continue

        if 'subtitles_lastsearch' in x:
            try:
                x['subtitles_lastsearch'] = datetime.datetime.now()
            except ValueError:
                continue

        tv_episodes_results.append(x)

    blacklist_results = []
    for x in conn.execute('SELECT * FROM blacklist'):
        x = dict(x)

        x['series_provider_id'] = SeriesProviderID.THETVDB

        blacklist_results.append(x)

    whitelist_results = []
    for x in conn.execute('SELECT * FROM whitelist'):
        x = dict(x)

        x['series_provider_id'] = SeriesProviderID.THETVDB

        whitelist_results.append(x)

    imdb_info_results = []
    for x in conn.execute('SELECT * FROM imdb_info'):
        x = dict(x)

        x['last_update'] = datetime.datetime.now()

        imdb_info_results.append(x)

    op.drop_table('indexer_mapping')
    op.drop_table('tv_episodes')
    op.drop_table('imdb_info')
    op.drop_table('blacklist')
    op.drop_table('whitelist')
    op.drop_table('history')
    op.drop_table('failed_snatch_history')
    op.drop_table('failed_snatches')

    sa.Table('series_provider_mapping', maindb_meta, autoload=True).create()
    sa.Table('tv_episodes', maindb_meta, autoload=True).create()
    sa.Table('imdb_info', maindb_meta, autoload=True).create()
    sa.Table('blacklist', maindb_meta, autoload=True).create()
    sa.Table('whitelist', maindb_meta, autoload=True).create()
    sa.Table('history', maindb_meta, autoload=True).create()
    sa.Table('failed_snatch_history', maindb_meta, autoload=True).create()
    sa.Table('failed_snatches', maindb_meta, autoload=True).create()

    tv_episodes = sa.Table('tv_episodes', maindb_meta, autoload=True)
    imdb_info = sa.Table('imdb_info', maindb_meta, autoload=True)
    blacklist = sa.Table('blacklist', maindb_meta, autoload=True)
    whitelist = sa.Table('whitelist', maindb_meta, autoload=True)

    op.bulk_insert(tv_episodes, tv_episodes_results)
    op.bulk_insert(imdb_info, imdb_info_results)
    op.bulk_insert(blacklist, blacklist_results)
    op.bulk_insert(whitelist, whitelist_results)
Exemple #47
0
def upgrade():
    # Create lookup tables
    op.create_table(u'health_monitor_type',
                    sa.Column(u'name', sa.String(30), primary_key=True),
                    sa.Column(u'description', sa.String(255), nullable=True))

    # Create temporary table for table data seeding
    insert_table = sql.table(u'health_monitor_type',
                             sql.column(u'name', sa.String),
                             sql.column(u'description', sa.String))

    op.bulk_insert(insert_table, [{
        'name': 'HTTP'
    }, {
        'name': 'HTTPS'
    }, {
        'name': 'TCP'
    }])

    op.create_table(u'protocol',
                    sa.Column(u'name', sa.String(30), primary_key=True),
                    sa.Column(u'description', sa.String(255), nullable=True))

    insert_table = sql.table(u'protocol', sql.column(u'name', sa.String),
                             sql.column(u'description', sa.String))

    op.bulk_insert(insert_table, [{
        'name': 'HTTP'
    }, {
        'name': 'HTTPS'
    }, {
        'name': 'TCP'
    }])

    op.create_table(u'algorithm',
                    sa.Column(u'name', sa.String(30), primary_key=True),
                    sa.Column(u'description', sa.String(255), nullable=True))

    insert_table = sql.table(u'algorithm', sql.column(u'name', sa.String),
                             sql.column(u'description', sa.String))

    op.bulk_insert(insert_table, [{
        'name': 'ROUND_ROBIN'
    }, {
        'name': 'LEAST_CONNECTIONS'
    }, {
        'name': 'SOURCE_IP'
    }])

    op.create_table(u'session_persistence_type',
                    sa.Column(u'name', sa.String(30), primary_key=True),
                    sa.Column(u'description', sa.String(255), nullable=True))

    insert_table = sql.table(u'session_persistence_type',
                             sql.column(u'name', sa.String),
                             sql.column(u'description', sa.String))

    op.bulk_insert(insert_table, [{
        'name': 'SOURCE_IP'
    }, {
        'name': 'HTTP_COOKIE'
    }, {
        'name': 'APP_COOKIE'
    }])

    op.create_table(u'provisioning_status',
                    sa.Column(u'name', sa.String(30), primary_key=True),
                    sa.Column(u'description', sa.String(255), nullable=True))

    insert_table = sql.table(u'provisioning_status',
                             sql.column(u'name', sa.String),
                             sql.column(u'description', sa.String))

    op.bulk_insert(insert_table, [{
        'name': 'ACTIVE'
    }, {
        'name': 'PENDING_CREATE'
    }, {
        'name': 'PENDING_UPDATE'
    }, {
        'name': 'PENDING_DELETE'
    }, {
        'name': 'DELETED'
    }, {
        'name': 'ERROR'
    }])

    op.create_table(u'operating_status',
                    sa.Column(u'name', sa.String(30), primary_key=True),
                    sa.Column(u'description', sa.String(255), nullable=True))

    insert_table = sql.table(u'operating_status',
                             sql.column(u'name', sa.String),
                             sql.column(u'description', sa.String))

    op.bulk_insert(insert_table, [{
        'name': 'ONLINE'
    }, {
        'name': 'OFFLINE'
    }, {
        'name': 'DEGRADED'
    }, {
        'name': 'ERROR'
    }])

    op.create_table(
        u'pool', sa.Column(u'tenant_id', sa.String(255), nullable=True),
        sa.Column(u'id', sa.String(36), nullable=False),
        sa.Column(u'name', sa.String(255), nullable=True),
        sa.Column(u'description', sa.String(255), nullable=True),
        sa.Column(u'protocol', sa.String(16), nullable=False),
        sa.Column(u'lb_algorithm', sa.String(16), nullable=False),
        sa.Column(u'operating_status', sa.String(16), nullable=False),
        sa.Column(u'enabled', sa.Boolean(), nullable=False),
        sa.PrimaryKeyConstraint(u'id'),
        sa.ForeignKeyConstraint([u'protocol'], [u'protocol.name'],
                                name=u'fk_pool_protocol_name'),
        sa.ForeignKeyConstraint([u'lb_algorithm'], [u'algorithm.name'],
                                name=u'fk_pool_algorithm_name'),
        sa.ForeignKeyConstraint([u'operating_status'],
                                [u'operating_status.name'],
                                name=u'fk_pool_operating_status_name'))

    op.create_table(
        u'health_monitor', sa.Column(u'pool_id', sa.String(36),
                                     nullable=False),
        sa.Column(u'type', sa.String(36), nullable=False),
        sa.Column(u'delay', sa.Integer(), nullable=False),
        sa.Column(u'timeout', sa.Integer(), nullable=False),
        sa.Column(u'fall_threshold', sa.Integer(), nullable=False),
        sa.Column(u'rise_threshold', sa.Integer(), nullable=False),
        sa.Column(u'http_method', sa.String(16), nullable=True),
        sa.Column(u'url_path', sa.String(255), nullable=True),
        sa.Column(u'expected_codes', sa.String(64), nullable=True),
        sa.Column(u'enabled', sa.Boolean(), nullable=False),
        sa.PrimaryKeyConstraint(u'pool_id'),
        sa.ForeignKeyConstraint([u'pool_id'], [u'pool.id'],
                                name=u'fk_health_monitor_pool_id'),
        sa.ForeignKeyConstraint(
            [u'type'], [u'health_monitor_type.name'],
            name=u'fk_health_monitor_health_monitor_type_name'))

    op.create_table(
        u'session_persistence',
        sa.Column(u'pool_id', sa.String(36), nullable=False),
        sa.Column(u'type', sa.String(16), nullable=False),
        sa.Column(u'cookie_name', sa.String(255), nullable=True),
        sa.ForeignKeyConstraint(
            [u'type'], [u'session_persistence_type.name'],
            name=u'fk_session_persistence_session_persistence_type_name'),
        sa.ForeignKeyConstraint([u'pool_id'], [u'pool.id'],
                                name=u'fk_session_persistence_pool_id'),
        sa.PrimaryKeyConstraint(u'pool_id'))

    op.create_table(
        u'member', sa.Column(u'tenant_id', sa.String(255), nullable=True),
        sa.Column(u'id', sa.String(36), nullable=False),
        sa.Column(u'pool_id', sa.String(36), nullable=False),
        sa.Column(u'subnet_id', sa.String(36), nullable=True),
        sa.Column(u'address', sa.String(64), nullable=False),
        sa.Column(u'protocol_port', sa.Integer(), nullable=False),
        sa.Column(u'weight', sa.Integer(), nullable=True),
        sa.Column(u'operating_status', sa.String(16), nullable=False),
        sa.Column(u'enabled', sa.Boolean(), nullable=False),
        sa.PrimaryKeyConstraint(u'id'),
        sa.ForeignKeyConstraint([u'pool_id'], [u'pool.id'],
                                name=u'fk_member_pool_id'),
        sa.ForeignKeyConstraint([u'operating_status'],
                                [u'operating_status.name'],
                                name=u'fk_member_operating_status_name'),
        sa.UniqueConstraint(u'pool_id',
                            u'address',
                            u'protocol_port',
                            name=u'uq_member_pool_id_address_protocol_port'))

    op.create_table(
        u'load_balancer', sa.Column(u'tenant_id',
                                    sa.String(255),
                                    nullable=True),
        sa.Column(u'id', sa.String(36), nullable=False),
        sa.Column(u'name', sa.String(255), nullable=True),
        sa.Column(u'description', sa.String(255), nullable=True),
        sa.Column(u'provisioning_status', sa.String(16), nullable=False),
        sa.Column(u'operating_status', sa.String(16), nullable=False),
        sa.Column(u'enabled', sa.Boolean(), nullable=False),
        sa.PrimaryKeyConstraint(u'id'),
        sa.ForeignKeyConstraint(
            [u'provisioning_status'], [u'provisioning_status.name'],
            name=u'fk_load_balancer_provisioning_status_name'),
        sa.ForeignKeyConstraint(
            [u'operating_status'], [u'operating_status.name'],
            name=u'fk_load_balancer_operating_status_name'))

    op.create_table(
        u'vip', sa.Column(u'load_balancer_id', sa.String(36), nullable=False),
        sa.Column(u'ip_address', sa.String(36), nullable=True),
        sa.Column(u'net_port_id', sa.String(36), nullable=True),
        sa.Column(u'subnet_id', sa.String(36), nullable=True),
        sa.Column(u'floating_ip_id', sa.String(36), nullable=True),
        sa.Column(u'floating_ip_network_id', sa.String(36), nullable=True),
        sa.PrimaryKeyConstraint(u'load_balancer_id'),
        sa.ForeignKeyConstraint([u'load_balancer_id'], [u'load_balancer.id'],
                                name=u'fk_vip_load_balancer_id'))

    op.create_table(
        u'listener', sa.Column(u'tenant_id', sa.String(255), nullable=True),
        sa.Column(u'id', sa.String(36), nullable=False),
        sa.Column(u'name', sa.String(255), nullable=True),
        sa.Column(u'description', sa.String(255), nullable=True),
        sa.Column(u'protocol', sa.String(16), nullable=False),
        sa.Column(u'protocol_port', sa.Integer(), nullable=False),
        sa.Column(u'connection_limit', sa.Integer(), nullable=True),
        sa.Column(u'load_balancer_id', sa.String(36), nullable=True),
        sa.Column(u'tls_certificate_id', sa.String(36), nullable=True),
        sa.Column(u'default_pool_id', sa.String(36), nullable=True),
        sa.Column(u'provisioning_status', sa.String(16), nullable=False),
        sa.Column(u'operating_status', sa.String(16), nullable=False),
        sa.Column(u'enabled', sa.Boolean(), nullable=False),
        sa.ForeignKeyConstraint([u'load_balancer_id'], [u'load_balancer.id'],
                                name=u'fk_listener_load_balancer_id'),
        sa.ForeignKeyConstraint([u'default_pool_id'], [u'pool.id'],
                                name=u'fk_listener_pool_id'),
        sa.ForeignKeyConstraint([u'protocol'], [u'protocol.name'],
                                name=u'fk_listener_protocol_name'),
        sa.ForeignKeyConstraint([u'provisioning_status'],
                                [u'provisioning_status.name'],
                                name=u'fk_listener_provisioning_status_name'),
        sa.ForeignKeyConstraint([u'operating_status'],
                                [u'operating_status.name'],
                                name=u'fk_listener_operating_status_name'),
        sa.UniqueConstraint(u'default_pool_id',
                            name=u'uq_listener_default_pool_id'),
        sa.UniqueConstraint(
            u'load_balancer_id',
            u'protocol_port',
            name=u'uq_listener_load_balancer_id_protocol_port'),
        sa.PrimaryKeyConstraint(u'id'))

    op.create_table(
        u'sni', sa.Column(u'listener_id', sa.String(36), nullable=False),
        sa.Column(u'tls_container_id', sa.String(36), nullable=False),
        sa.Column(u'position', sa.Integer, nullable=True),
        sa.ForeignKeyConstraint([u'listener_id'], [u'listener.id'],
                                name=u'fk_sni_listener_id'),
        sa.PrimaryKeyConstraint(u'listener_id', u'tls_container_id'))

    op.create_table(
        u'listener_statistics',
        sa.Column(u'listener_id', sa.String(36), nullable=False),
        sa.Column(u'bytes_in', sa.BigInteger(), nullable=False),
        sa.Column(u'bytes_out', sa.BigInteger(), nullable=False),
        sa.Column(u'active_connections', sa.Integer(), nullable=False),
        sa.Column(u'total_connections', sa.BigInteger(), nullable=False),
        sa.PrimaryKeyConstraint(u'listener_id'),
        sa.ForeignKeyConstraint([u'listener_id'], [u'listener.id'],
                                name=u'fk_listener_statistics_listener_id'))

    op.create_table(
        u'amphora',
        # id should come from the service providing the amphora (i.e. nova)
        sa.Column(u'id', sa.String(36), nullable=False, autoincrement=False),
        sa.Column(u'host_id', sa.String(36), nullable=False),
        sa.Column(u'status', sa.String(36), nullable=False),
        sa.PrimaryKeyConstraint(u'id'),
        sa.ForeignKeyConstraint([u'status'], [u'provisioning_status.name'],
                                name=u'fk_container_provisioning_status_name'))

    op.create_table(
        u'load_balancer_amphora',
        sa.Column(u'amphora_id', sa.String(36), nullable=False),
        sa.Column(u'load_balancer_id', sa.String(36), nullable=False),
        sa.ForeignKeyConstraint(
            [u'load_balancer_id'], [u'load_balancer.id'],
            name=u'fk_load_balancer_amphora_load_balancer_id'),
        sa.ForeignKeyConstraint([u'amphora_id'], [u'amphora.id'],
                                name=u'fk_load_balancer_amphora_id'),
        sa.PrimaryKeyConstraint(u'amphora_id', u'load_balancer_id'))
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.add_column(
        'fee_schedules',
        sa.Column('variable',
                  sa.Boolean(),
                  nullable=True,
                  comment='Flag to indicate if the fee is variable'))
    op.alter_column('payment_line_items',
                    'fee_schedule_id',
                    existing_type=sa.INTEGER(),
                    nullable=False)

    distribution_code_link_table = table(
        'distribution_code_links', column('distribution_code_id', String),
        column('fee_schedule_id', String))
    filing_type_table = table('filing_types', column('code', String),
                              column('description', String))
    fee_codes_table = table('fee_codes', column('code', String),
                            column('amount', Float))
    fee_schedule_table = table('fee_schedules',
                               column('filing_type_code', String),
                               column('corp_type_code', String),
                               column('fee_code', String),
                               column('fee_start_date', Date),
                               column('fee_end_date', Date),
                               column('service_fee_code', String),
                               column('variable', Boolean))

    op.bulk_insert(filing_type_table,
                   [{
                       'code': 'CSBVFEE',
                       'description': 'CSB Filing'
                   }, {
                       'code': 'CSBSRCH',
                       'description': 'CSB File Search'
                   }, {
                       'code': 'CSBPDOC',
                       'description': 'CSB Document Requisition'
                   }, {
                       'code': 'CSCRMTFC',
                       'description': 'CSB Criminal Search'
                   }])

    op.bulk_insert(fee_codes_table, [{'code': 'EN115', 'amount': 6}])

    op.bulk_insert(fee_schedule_table, [{
        'filing_type_code': 'CSBVFEE',
        'corp_type_code': 'CSO',
        'fee_code': 'EN107',
        'fee_start_date': date.today(),
        'fee_end_date': None,
        'service_fee_code': None,
        'variable': True
    }, {
        'filing_type_code': 'CSBSRCH',
        'corp_type_code': 'CSO',
        'fee_code': 'EN115',
        'fee_start_date': date.today(),
        'fee_end_date': None,
        'service_fee_code': None,
        'variable': False
    }, {
        'filing_type_code': 'CSBPDOC',
        'corp_type_code': 'CSO',
        'fee_code': 'EN114',
        'fee_start_date': date.today(),
        'fee_end_date': None,
        'service_fee_code': None,
        'variable': False
    }, {
        'filing_type_code': 'CSCRMTFC',
        'corp_type_code': 'CSO',
        'fee_code': 'EN107',
        'fee_start_date': date.today(),
        'fee_end_date': None,
        'service_fee_code': None,
        'variable': False
    }])

    # Now find out the distribution code for other CSBFILE and map it to them.
    distribution_code_id_query = "select dc.distribution_code_id from distribution_codes dc " \
                                 "where upper(dc.name) = upper('Courts Online') " \
                                 "and dc.start_date <= CURRENT_DATE " \
                                 "and (dc.end_date is null or dc.end_date > CURRENT_DATE)"
    conn = op.get_bind()
    res = conn.execute(distribution_code_id_query)
    if (res_fetch := res.fetchall()) and res_fetch[0]:
        distribution_code_id = res_fetch[0][0]
        res = conn.execute(
            "select fee_schedule_id from fee_schedules where corp_type_code='CSO' and "
            "filing_type_code in ('CSBVFEE', 'CSBSRCH', 'CSBPDOC', 'CSCRMTFC')"
        )

        distr_code_links = []
        for result in res.fetchall():
            fee_schedule_id = result[0]
            distr_code_links.append({
                'distribution_code_id': distribution_code_id,
                'fee_schedule_id': fee_schedule_id
            })
        op.bulk_insert(distribution_code_link_table, distr_code_links)
def upgrade() -> None:
    schema = config["schema"]
    schema_static = config["schema_static"]
    parentschema = config.get("parentschema")
    srid = config.get("srid")

    engine = op.get_bind().engine
    if type(engine).__name__ != "MockConnection" and op.get_context().dialect.has_table(
        engine, "functionality", schema=schema
    ):
        return

    op.create_table(
        "functionality",
        Column("id", Integer, primary_key=True),
        Column("name", Unicode, nullable=False),
        Column("value", Unicode, nullable=False),
        Column("description", Unicode),
        schema=schema,
    )
    op.create_table(
        "treeitem",
        Column("type", String(10), nullable=False),
        Column("id", Integer, primary_key=True),
        Column("name", Unicode),
        Column("order", Integer, nullable=False),
        Column("metadataURL", Unicode),
        schema=schema,
    )
    op.create_table(
        "restrictionarea",
        Column("id", Integer, primary_key=True),
        Column("name", Unicode),
        Column("description", Unicode),
        Column("readwrite", Boolean, default=False),
        schema=schema,
    )
    op.execute(
        "SELECT AddGeometryColumn('%(schema)s', 'restrictionarea', "
        "'area', %(srid)s, 'POLYGON', 2)" % {"schema": schema, "srid": srid}
    )
    op.create_table(
        "shorturl",
        Column("id", Integer, primary_key=True),
        Column("url", Unicode(1000)),
        Column("ref", String(20), index=True, unique=True, nullable=False),
        Column("creator_email", Unicode(200)),
        Column("creation", DateTime),
        Column("last_hit", DateTime),
        Column("nb_hits", Integer),
        schema=schema_static,
    )

    op.create_table(
        "role",
        Column("id", Integer, primary_key=True),
        Column("name", Unicode, unique=True, nullable=False),
        Column("description", Unicode),
        schema=schema,
    )
    op.execute(
        "SELECT AddGeometryColumn('%(schema)s', 'role', "
        "'extent', %(srid)s, 'POLYGON', 2)" % {"schema": schema, "srid": srid}
    )
    role = Table("role", MetaData(), Column("name", Unicode, unique=True, nullable=False), schema=schema)
    op.bulk_insert(role, [{"name": "role_admin"}])

    op.create_table(
        "layer",
        Column("id", Integer, ForeignKey(schema + ".treeitem.id"), primary_key=True),
        Column("public", Boolean, default=True),
        Column("inMobileViewer", Boolean, default=True),
        Column("inDesktopViewer", Boolean, default=True),
        Column("isChecked", Boolean, default=True),
        Column("icon", Unicode),
        Column("layerType", Unicode(12)),
        Column("url", Unicode),
        Column("imageType", Unicode(10)),
        Column("style", Unicode),
        Column("dimensions", Unicode),
        Column("matrixSet", Unicode),
        Column("wmsUrl", Unicode),
        Column("wmsLayers", Unicode),
        Column("queryLayers", Unicode),
        Column("kml", Unicode),
        Column("isSingleTile", Boolean),
        Column("legend", Boolean, default=True),
        Column("legendImage", Unicode),
        Column("legendRule", Unicode),
        Column("isLegendExpanded", Boolean, default=False),
        Column("minResolution", Float),
        Column("maxResolution", Float),
        Column("disclaimer", Unicode),
        Column("identifierAttributeField", Unicode),
        Column("geoTable", Unicode),
        Column("excludeProperties", Unicode),
        Column("timeMode", Unicode(8)),
        schema=schema,
    )
    op.create_table(
        "role_restrictionarea",
        Column("role_id", Integer, ForeignKey(schema + ".role.id"), primary_key=True),
        Column("restrictionarea_id", Integer, ForeignKey(schema + ".restrictionarea.id"), primary_key=True),
        schema=schema,
    )
    op.create_table(
        "tsearch",
        Column("id", Integer, primary_key=True),
        Column("label", Unicode),
        Column("layer_name", Unicode),
        Column("role_id", Integer, ForeignKey(schema + ".role.id"), nullable=True),
        Column("public", Boolean, server_default="true"),
        Column("ts", TsVector),
        Column("params", Unicode, nullable=True),
        schema=schema,
    )
    op.execute(
        "SELECT AddGeometryColumn('%(schema)s', 'tsearch', 'the_geom', "
        "%(srid)s, 'GEOMETRY', 2)" % {"schema": schema, "srid": srid}
    )
    op.create_index("tsearch_ts_idx", "tsearch", ["ts"], schema=schema, postgresql_using="gin")
    op.create_table(
        "treegroup",
        Column("id", Integer, ForeignKey(schema + ".treeitem.id"), primary_key=True),
        schema=schema,
    )

    op.create_table(
        "user",
        Column("type", String(10), nullable=False),
        Column("id", Integer, primary_key=True),
        Column("username", Unicode, unique=True, nullable=False),
        Column("password", Unicode, nullable=False),
        Column("email", Unicode, nullable=False),
        Column("is_password_changed", Boolean, default=False),
        Column("role_id", Integer, ForeignKey(schema + ".role.id"), nullable=False),
        schema=schema,
    )
    if parentschema is not None and parentschema != "":
        op.add_column(
            "user", Column("parent_role_id", Integer, ForeignKey(parentschema + ".role.id")), schema=schema
        )
    op.execute(
        "INSERT INTO %(schema)s.user (type, username, email, password, role_id) "
        "(SELECT 'user', 'admin', '*****@*****.**', '%(pass)s', r.id "
        "FROM %(schema)s.role AS r "
        "WHERE r.name = 'role_admin')" % {"schema": schema, "pass": sha1("admin".encode("utf-8")).hexdigest()}
    )

    op.create_table(
        "role_functionality",
        Column("role_id", Integer, ForeignKey(schema + ".role.id"), primary_key=True),
        Column("functionality_id", Integer, ForeignKey(schema + ".functionality.id"), primary_key=True),
        schema=schema,
    )
    op.create_table(
        "user_functionality",
        Column("user_id", Integer, ForeignKey(schema + ".user.id"), primary_key=True),
        Column("functionality_id", Integer, ForeignKey(schema + ".functionality.id"), primary_key=True),
        schema=schema,
    )
    op.create_table(
        "layergroup",
        Column("id", Integer, ForeignKey(schema + ".treegroup.id"), primary_key=True),
        Column("isExpanded", Boolean),
        Column("isInternalWMS", Boolean),
        # children have radio button instance of check box
        Column("isBaseLayer", Boolean),
        schema=schema,
    )
    op.create_table(
        "layer_restrictionarea",
        Column("layer_id", Integer, ForeignKey(schema + ".layer.id"), primary_key=True),
        Column("restrictionarea_id", Integer, ForeignKey(schema + ".restrictionarea.id"), primary_key=True),
        schema=schema,
    )
    op.create_table(
        "layergroup_treeitem",
        Column("treegroup_id", Integer, ForeignKey(schema + ".treegroup.id"), primary_key=True),
        Column("treeitem_id", Integer, ForeignKey(schema + ".treeitem.id"), primary_key=True),
        schema=schema,
    )
    op.create_table(
        "theme",
        Column("id", Integer, ForeignKey(schema + ".treegroup.id"), primary_key=True),
        Column("icon", Unicode),
        Column("inMobileViewer", Boolean, default=False),
        Column("inDesktopViewer", Boolean, default=True),
        schema=schema,
    )
    op.create_table(
        "theme_functionality",
        Column("theme_id", Integer, ForeignKey(schema + ".theme.id"), primary_key=True),
        Column("functionality_id", Integer, ForeignKey(schema + ".functionality.id"), primary_key=True),
        schema=schema,
    )

    op.execute(
        'INSERT INTO {schema}.treeitem (type, name, "order") '
        "VALUES ('group', 'background', 0)".format(schema=schema)
    )
    op.execute(
        "INSERT INTO {schema}.treegroup (id) SELECT id " "FROM {schema}.treeitem".format(schema=schema)
    )
    op.execute(
        "INSERT INTO {schema}.layergroup (id) SELECT id " "FROM {schema}.treeitem".format(schema=schema)
    )
Exemple #50
0
def _perform(upgrade):
    conn = op.get_bind()

    # Read data.
    tasks_data = []
    old_tasks = conn.execute("select id, target, category, timeout, priority, custom, machine, package, options, platform, memory, enforce_timeout, clock, added_on, started_on, completed_on, status, sample_id from tasks").fetchall()

    for item in old_tasks:
        d = {}
        d["id"] = item[0]
        d["target"] = item[1]
        d["category"] = item[2]
        d["timeout"] = item[3]
        d["priority"] = item[4]
        d["custom"] = item[5]
        d["machine"] = item[6]
        d["package"] = item[7]
        d["options"] = item[8]
        d["platform"] = item[9]
        d["memory"] = item[10]
        d["enforce_timeout"] = item[11]

        if isinstance(item[12], datetime):
            d["clock"] = item[12]
        elif item[12]:
            d["clock"] = parse(item[12])
        else:
            d["clock"] = None

        if isinstance(item[13], datetime):
            d["added_on"] = item[13]
        elif item[13]:
            d["added_on"] = parse(item[13])
        else:
            d["added_on"] = None

        if isinstance(item[14], datetime):
            d["started_on"] = item[14]
        elif item[14]:
            d["started_on"] = parse(item[14])
        else:
            d["started_on"] = None

        if isinstance(item[15], datetime):
            d["completed_on"] = item[15]
        elif item[15]:
            d["completed_on"] = parse(item[15])
        else:
            d["completed_on"] = None

        d["status"] = item[16]
        d["sample_id"] = item[17]

        if upgrade:
            # Columns for statistics (via Thorsten's statistics page)
            d["dropped_files"] = None
            d["running_processes"] = None
            d["api_calls"] = None
            d["domains"] = None
            d["signatures_total"] = None
            d["signatures_alert"] = None
            d["files_written"] = None
            d["registry_keys_modified"] = None
            d["crash_issues"] = None
            d["anti_issues"] = None
            d["analysis_started_on"] = None
            d["analysis_finished_on"] = None
            d["processing_started_on"] = None
            d["processing_finished_on"] = None
            d["signatures_started_on"] = None
            d["signatures_finished_on"] = None
            d["reporting_started_on"] = None
            d["reporting_finished_on"] = None

            d["timedout"] = False
            d["machine_id"] = None

        tasks_data.append(d)

    if conn.engine.driver == "mysqldb":
        # Disable foreign key checking to migrate table avoiding checks.
        op.execute('SET foreign_key_checks = 0')

    # Drop old table.
    op.drop_table("tasks")

    # Create table with 1.2 schema.
    if upgrade:
        op.create_table(
            "tasks",
            sa.Column("id", sa.Integer(), nullable=False),
            sa.Column("target", sa.Text(), nullable=False),
            sa.Column("category", sa.String(length=255), nullable=False),
            sa.Column("timeout", sa.Integer(), server_default="0", nullable=False),
            sa.Column("priority", sa.Integer(), server_default="1", nullable=False),
            sa.Column("custom", sa.String(length=255), nullable=True),
            sa.Column("machine", sa.String(length=255), nullable=True),
            sa.Column("package", sa.String(length=255), nullable=True),
            sa.Column("options", sa.String(length=255), nullable=True),
            sa.Column("platform", sa.String(length=255), nullable=True),
            sa.Column("memory", sa.Boolean(), nullable=False, default=False),
            sa.Column("enforce_timeout", sa.Boolean(), nullable=False, default=False),
            sa.Column("clock", sa.DateTime(timezone=False), default=datetime.now, nullable=False),
            sa.Column("added_on", sa.DateTime(timezone=False), nullable=False),
            sa.Column("started_on", sa.DateTime(timezone=False), nullable=True),
            sa.Column("completed_on", sa.DateTime(timezone=False), nullable=True),
            sa.Column("status", sa.Enum("pending", "running", "completed", "reported", "recovered", "failed_analysis", "failed_processing", "failed_reporting", name="status_type"), server_default="pending", nullable=False),
            sa.Column("sample_id", sa.Integer, sa.ForeignKey("samples.id"), nullable=True),

            sa.Column("dropped_files", sa.Integer(), nullable=True),
            sa.Column("running_processes", sa.Integer(), nullable=True),
            sa.Column("api_calls", sa.Integer(), nullable=True),
            sa.Column("domains", sa.Integer(), nullable=True),
            sa.Column("signatures_total", sa.Integer(), nullable=True),
            sa.Column("signatures_alert", sa.Integer(), nullable=True),
            sa.Column("files_written", sa.Integer(), nullable=True),
            sa.Column("registry_keys_modified", sa.Integer(), nullable=True),
            sa.Column("crash_issues", sa.Integer(), nullable=True),
            sa.Column("anti_issues", sa.Integer(), nullable=True),
            sa.Column("analysis_started_on", sa.DateTime(timezone=False), nullable=True),
            sa.Column("analysis_finished_on", sa.DateTime(timezone=False), nullable=True),
            sa.Column("processing_started_on", sa.DateTime(timezone=False), nullable=True),
            sa.Column("processing_finished_on", sa.DateTime(timezone=False), nullable=True),
            sa.Column("signatures_started_on", sa.DateTime(timezone=False), nullable=True),
            sa.Column("signatures_finished_on", sa.DateTime(timezone=False), nullable=True),
            sa.Column("reporting_started_on", sa.DateTime(timezone=False), nullable=True),
            sa.Column("reporting_finished_on", sa.DateTime(timezone=False), nullable=True),
            sa.Column("timedout", sa.Boolean(), nullable=False, default=False),
            sa.Column("machine_id", sa.Integer(), nullable=True),

            sa.PrimaryKeyConstraint("id")
        )
    else:
        op.create_table(
            "tasks",
            sa.Column("id", sa.Integer(), nullable=False),
            sa.Column("target", sa.Text(), nullable=False),
            sa.Column("category", sa.String(length=255), nullable=False),
            sa.Column("timeout", sa.Integer(), server_default="0", nullable=False),
            sa.Column("priority", sa.Integer(), server_default="1", nullable=False),
            sa.Column("custom", sa.String(length=255), nullable=True),
            sa.Column("machine", sa.String(length=255), nullable=True),
            sa.Column("package", sa.String(length=255), nullable=True),
            sa.Column("options", sa.String(length=255), nullable=True),
            sa.Column("platform", sa.String(length=255), nullable=True),
            sa.Column("memory", sa.Boolean(), nullable=False, default=False),
            sa.Column("enforce_timeout", sa.Boolean(), nullable=False, default=False),
            sa.Column("clock", sa.DateTime(timezone=False), default=datetime.now, nullable=False),
            sa.Column("added_on", sa.DateTime(timezone=False), nullable=False),
            sa.Column("started_on", sa.DateTime(timezone=False), nullable=True),
            sa.Column("completed_on", sa.DateTime(timezone=False), nullable=True),
            sa.Column("status", sa.Enum("pending", "running", "completed", "reported", "recovered", "failed_analysis", "failed_processing", "failed_reporting", name="status_type"), server_default="pending", nullable=False),
            sa.Column("sample_id", sa.Integer, sa.ForeignKey("samples.id"), nullable=True),
            sa.PrimaryKeyConstraint("id")
        )

    if conn.engine.driver == "mysqldb":
        op.execute('COMMIT')

    # Insert data.
    op.bulk_insert(db.Task.__table__, tasks_data)

    if conn.engine.driver == "mysqldb":
        # Enable foreign key.
        op.execute('SET foreign_key_checks = 1')
Exemple #51
0
def upgrade():
    notification_types_table = table(
        'notification_types',
        column('id', sa.Integer),
        column('name', sa.String),
        column('description', sa.Text),
        column('template', sa.String),
        column('instant', sa.Boolean),
        column('advance_notice', sa.Integer),
        column('advance_notice_end', sa.Integer),
        column('created_at', sa.DateTime),
        column('modified_by_id', sa.Integer),
        column('updated_at', sa.DateTime),
        column('context_id', sa.Integer),
    )

    op.bulk_insert(
        notification_types_table,
        [
            # cycle created notifictions
            {
                "name":
                "cycle_created",
                "description":
                ("Notify workflow members that a one time workflow "
                 "has been started and send them their assigned "
                 "tasks."),
                "template":
                "cycle_created",
                "advance_notice":
                0,
                "instant":
                False,
            },
            {
                "name":
                "manual_cycle_created",
                "description":
                ("Notify workflow members that a one time workflow "
                 "has been started and send them their assigned "
                 "tasks."),
                "template":
                "manual_cycle_created",
                "advance_notice":
                0,
                "instant":
                True,
            },

            # cycle task due in notifications
            {
                "name": "cycle_task_due_in",
                "description":
                "Notify task assignee his task is due in X days",
                "template": "cycle_task_due_in",
                "advance_notice": 1,
                "instant": False,
            },
            {
                "name": "one_time_cycle_task_due_in",
                "description":
                "Notify task assignee his task is due in X days",
                "template": "cycle_task_due_in",
                "advance_notice": 1,
                "instant": False,
            },
            {
                "name": "weekly_cycle_task_due_in",
                "description":
                "Notify task assignee his task is due in X days",
                "template": "cycle_task_due_in",
                "advance_notice": 1,
                "instant": False,
            },
            {
                "name": "monthly_cycle_task_due_in",
                "description":
                "Notify task assignee his task is due in X days",
                "template": "cycle_task_due_in",
                "advance_notice": 1,
                "instant": False,
            },
            {
                "name": "quarterly_cycle_task_due_in",
                "description":
                "Notify task assignee his task is due in X days",
                "template": "cycle_task_due_in",
                "advance_notice": 1,
                "instant": False,
            },
            {
                "name": "annually_cycle_task_due_in",
                "description":
                "Notify task assignee his task is due in X days",
                "template": "cycle_task_due_in",
                "advance_notice": 1,
                "instant": False,
            },
            {
                "name": "cycle_task_due_today",
                "description": "Notify task assignee his task is due today",
                "template": "cycle_task_due_today",
                "advance_notice": 0,
                "instant": False,
            },

            # reassigned notifications
            {
                "name": "cycle_task_reassigned",
                "description": "Notify task assignee his task is due today",
                "template": "cycle_task_due_today",
                "advance_notice": 0,
                "instant": True,
            },
            {
                "name": "task_group_assignee_change",
                "description": "Email owners on task group assignee change.",
                "template": "task_group_assignee_change",
                "advance_notice": 0,
                "instant": True,
            },

            # declined
            {
                "name": "cycle_task_declined",
                "description": "Notify task assignee his task is due today",
                "template": "cycle_task_due_today",
                "advance_notice": 0,
                "instant": True,
            },

            # all cycle tasks finished
            {
                "name":
                "all_cycle_tasks_completed",
                "description":
                ("Notify workflow owner when all cycle tasks in one"
                 " cycle have been completed and verified"),
                "template":
                "weekly_workflow_starts_in",
                "advance_notice":
                1,
                "instant":
                True,
            },

            # workflow starts in notifications
            {
                "name": "weekly_workflow_starts_in",
                "description":
                "Advanced notification for a recurring workflow.",
                "template": "weekly_workflow_starts_in",
                "advance_notice": 1,
                "instant": False,
            },
            {
                "name": "monthly_workflow_starts_in",
                "description":
                "Advanced notification for a recurring workflow.",
                "template": "monthly_workflow_starts_in",
                "advance_notice": 3,
                "instant": False,
            },
            {
                "name": "quarterly_workflow_starts_in",
                "description":
                "Advanced notification for a recurring workflow.",
                "template": "quaterly_workflow_starts_in",
                "advance_notice": 7,
                "instant": False,
            },
            {
                "name": "annually_workflow_starts_in",
                "description":
                "Advanced notification for a recurring workflow.",
                "template": "annual_workflow_starts_in",
                "advance_notice": 15,
                "instant": False,
            },
        ])
Exemple #52
0
def data_upgrades():
    tbl = sa.table(*TABLE[:-1])

    op.bulk_insert(tbl, [DEMO_SCEP_CONFIG])
def upgrade():
    ### commands auto generated by Alembic - please adjust! ###
    op.drop_column('sales_order_line', 'actual_amount')
    op.drop_column('sales_order_line', 'adjust_amount')
    op.drop_column('sales_order_line', 'original_amount')
    preference_table = op.create_table(
        'preference', sa.Column('id', sa.Integer(), nullable=False),
        sa.Column('def_so_incoming_type_id', sa.Integer(), nullable=False),
        sa.Column('def_so_incoming_status_id', sa.Integer(), nullable=False),
        sa.Column('def_so_exp_type_id', sa.Integer(), nullable=False),
        sa.Column('def_so_exp_status_id', sa.Integer(), nullable=False),
        sa.Column('def_po_logistic_exp_status_id',
                  sa.Integer(),
                  nullable=False),
        sa.Column('def_po_logistic_exp_type_id', sa.Integer(), nullable=False),
        sa.Column('def_po_goods_exp_status_id', sa.Integer(), nullable=False),
        sa.Column('def_po_goods_exp_type_id', sa.Integer(), nullable=False),
        sa.Column('remark', sa.Text(), nullable=True),
        sa.ForeignKeyConstraint(
            ['def_so_incoming_type_id'],
            ['enum_values.id'],
        ),
        sa.ForeignKeyConstraint(
            ['def_so_incoming_status_id'],
            ['enum_values.id'],
        ), sa.ForeignKeyConstraint(
            ['def_so_exp_type_id'],
            ['enum_values.id'],
        ),
        sa.ForeignKeyConstraint(
            ['def_so_exp_status_id'],
            ['enum_values.id'],
        ),
        sa.ForeignKeyConstraint(
            ['def_po_logistic_exp_status_id'],
            ['enum_values.id'],
        ),
        sa.ForeignKeyConstraint(
            ['def_po_logistic_exp_type_id'],
            ['enum_values.id'],
        ),
        sa.ForeignKeyConstraint(
            ['def_po_goods_exp_status_id'],
            ['enum_values.id'],
        ),
        sa.ForeignKeyConstraint(
            ['def_po_goods_exp_type_id'],
            ['enum_values.id'],
        ), sa.PrimaryKeyConstraint('id'))
    op.bulk_insert(preference_table, [
        {
            'id': 1,
            'def_so_incoming_type_id': 12,
            'def_so_incoming_status_id': 10,
            'def_so_exp_type_id': 9,
            'def_so_exp_status_id': 6,
            'def_po_logistic_exp_status_id': 6,
            'def_po_logistic_exp_type_id': 7,
            'def_po_goods_exp_status_id': 6,
            'def_po_goods_exp_type_id': 8
        },
    ],
                   multiinsert=False)
    from sqlalchemy.sql import text
    op.get_bind().execute(
        text("ALTER SEQUENCE preference_id_seq RESTART WITH 2;"))
Exemple #54
0
def upgrade():
    # Create an ad-hoc table to use for the insert statement.
    item_table = table('item',
                       column('model', String),
                       column('brand_id', Integer),
                       column('gender_id', Integer),
                       column('small_image_url', String),
                       column('medium_image_url', String),
                       column('type', String)
                       )

    # Asolo
    op.bulk_insert(item_table,
                   [
                       {'model': 'mont blanc gv', 'brand_id': '18',
                           'gender_id': '1', 'type': 'mountain'}
                   ]
                   )

    # Boreal
    op.bulk_insert(item_table,
                   [
                       {'model': 'synergy', 'brand_id': '6',
                        'gender_id': '3', 'type': 'rock'}
                   ]
                   )

    # La Sportiva
    op.bulk_insert(item_table,
                   [
                       {'model': 'G5', 'brand_id': '3',
                        'gender_id': '3', 'type': 'mountain'},
                       {'model': 'G2', 'brand_id': '3',
                        'gender_id': '3', 'type': 'mountain'}
                   ]
                   )

    # Lowa
    op.bulk_insert(item_table,
                   [
                       {'model': 'alpine pro gtx', 'brand_id': '19',
                        'gender_id': '1', 'type': 'mountain'}
                   ]
                   )

    # Zamberlan
    op.bulk_insert(item_table,
                   [
                       {'model': 'mountain pro evo gtx', 'brand_id': '20',
                        'gender_id': '1', 'type': 'mountain'},
                       {'model': 'mountain pro evo gtx', 'brand_id': '20',
                        'gender_id': '2', 'type': 'mountain'},
                       {'model': 'karka evo RR', 'brand_id': '20',
                        'gender_id': '1', 'type': 'mountain'},
                       {'model': 'eiger evo RR', 'brand_id': '20',
                        'gender_id': '1', 'type': 'mountain'},
                       {'model': 'everest evo RR', 'brand_id': '20',
                        'gender_id': '1', 'type': 'mountain'},
                       {'model': 'expert pro gtx RR', 'brand_id': '20',
                        'gender_id': '1', 'type': 'mountain'},
                       {'model': 'sparrow RR', 'brand_id': '20',
                        'gender_id': '2', 'type': 'approach'},
                       {'model': 'intrepid RR', 'brand_id': '20',
                        'gender_id': '1', 'type': 'approach'},
                   ]
                   )

    # Evolv
    op.bulk_insert(item_table,
                   [
                       {'model': 'zender', 'brand_id': '1',
                        'gender_id': '3', 'type': 'approach'}
                   ]
                   )

    # Mad Rock
    op.bulk_insert(item_table,
                   [
                       {'model': 'frenzy ez', 'brand_id': '4',
                        'gender_id': '3', 'type': 'rock'},
                       {'model': 'frenzy lace', 'brand_id': '4',
                                                'gender_id': '3', 'type': 'rock'},
                       {'model': 'banshee', 'brand_id': '4',
                        'gender_id': '2', 'type': 'rock'},
                       {'model': 'mugen tech 2.0', 'brand_id': '4',
                        'gender_id': '3', 'type': 'rock'},
                       {'model': 'maniac', 'brand_id': '4',
                        'gender_id': '3', 'type': 'rock'}
                   ]
                   )
Exemple #55
0
def _perform(upgrade):
    conn = op.get_bind()

    # Deal with Alembic shit.
    # Alembic is so ORMish that it was impossible to write code which works on different DBMS.
    if conn.engine.driver == "psycopg2":
        # Altering status ENUM.
        # This shit of raw SQL is here because alembic doesn't deal well with alter_colum of ENUM type.
        # Commit because SQLAlchemy doesn't support ALTER TYPE in a transaction.
        op.execute('COMMIT')
        if upgrade:
            conn.execute("ALTER TYPE status_type ADD VALUE 'failed_reporting'")
        else:
            conn.execute(
                "ALTER TYPE status_type DROP ATTRIBUTE IF EXISTS failed_reporting"
            )
    else:
        # Read data.
        tasks_data = []
        old_tasks = conn.execute(
            "select id, target, category, timeout, priority, custom, machine, package, options, platform, memory, enforce_timeout, clock, added_on, started_on, completed_on, status, sample_id from tasks"
        ).fetchall()
        for item in old_tasks:
            d = {}
            d["id"] = item[0]
            d["target"] = item[1]
            d["category"] = item[2]
            d["timeout"] = item[3]
            d["priority"] = item[4]
            d["custom"] = item[5]
            d["machine"] = item[6]
            d["package"] = item[7]
            d["options"] = item[8]
            d["platform"] = item[9]
            d["memory"] = item[10]
            d["enforce_timeout"] = item[11]

            if isinstance(item[12], datetime):
                d["clock"] = item[12]
            elif item[12]:
                d["clock"] = parse(item[12])
            else:
                d["clock"] = None

            if isinstance(item[13], datetime):
                d["added_on"] = item[13]
            elif item[13]:
                d["added_on"] = parse(item[13])
            else:
                d["added_on"] = None

            if isinstance(item[14], datetime):
                d["started_on"] = item[14]
            elif item[14]:
                d["started_on"] = parse(item[14])
            else:
                d["started_on"] = None

            if isinstance(item[15], datetime):
                d["completed_on"] = item[15]
            elif item[15]:
                d["completed_on"] = parse(item[15])
            else:
                d["completed_on"] = None

            d["status"] = item[16]
            d["sample_id"] = item[17]

            tasks_data.append(d)
        if conn.engine.driver == "mysqldb":
            # Disable foreign key checking to migrate table avoiding checks.
            op.execute('SET foreign_key_checks = 0')

            # Drop old table.
            op.drop_table("tasks")

            # Drop old Enum.
            sa.Enum(name="status_type").drop(op.get_bind(), checkfirst=False)
            # Create table with 1.2 schema.
            if upgrade:
                op.create_table(
                    "tasks", sa.Column("id", sa.Integer(), nullable=False),
                    sa.Column("target", sa.String(length=255), nullable=False),
                    sa.Column("category",
                              sa.String(length=255),
                              nullable=False),
                    sa.Column("timeout",
                              sa.Integer(),
                              server_default="0",
                              nullable=False),
                    sa.Column("priority",
                              sa.Integer(),
                              server_default="1",
                              nullable=False),
                    sa.Column("custom", sa.String(length=255), nullable=True),
                    sa.Column("machine", sa.String(length=255), nullable=True),
                    sa.Column("package", sa.String(length=255), nullable=True),
                    sa.Column("options", sa.String(length=255), nullable=True),
                    sa.Column("platform", sa.String(length=255),
                              nullable=True),
                    sa.Column("memory",
                              sa.Boolean(),
                              nullable=False,
                              default=False),
                    sa.Column("enforce_timeout",
                              sa.Boolean(),
                              nullable=False,
                              default=False),
                    sa.Column("clock",
                              sa.DateTime(timezone=False),
                              default=datetime.now,
                              nullable=False),
                    sa.Column("added_on",
                              sa.DateTime(timezone=False),
                              nullable=False),
                    sa.Column("started_on",
                              sa.DateTime(timezone=False),
                              nullable=True),
                    sa.Column("completed_on",
                              sa.DateTime(timezone=False),
                              nullable=True),
                    sa.Column("status",
                              sa.Enum("pending",
                                      "running",
                                      "completed",
                                      "reported",
                                      "recovered",
                                      "failed_analysis",
                                      "failed_processing",
                                      "failed_reporting",
                                      name="status_type"),
                              server_default="pending",
                              nullable=False),
                    sa.Column("sample_id",
                              sa.Integer,
                              sa.ForeignKey("samples.id"),
                              nullable=True), sa.PrimaryKeyConstraint("id"))
            else:
                op.create_table(
                    "tasks", sa.Column("id", sa.Integer(), nullable=False),
                    sa.Column("target", sa.String(length=255), nullable=False),
                    sa.Column("category",
                              sa.String(length=255),
                              nullable=False),
                    sa.Column("timeout",
                              sa.Integer(),
                              server_default="0",
                              nullable=False),
                    sa.Column("priority",
                              sa.Integer(),
                              server_default="1",
                              nullable=False),
                    sa.Column("custom", sa.String(length=255), nullable=True),
                    sa.Column("machine", sa.String(length=255), nullable=True),
                    sa.Column("package", sa.String(length=255), nullable=True),
                    sa.Column("options", sa.String(length=255), nullable=True),
                    sa.Column("platform", sa.String(length=255),
                              nullable=True),
                    sa.Column("memory",
                              sa.Boolean(),
                              nullable=False,
                              default=False),
                    sa.Column("enforce_timeout",
                              sa.Boolean(),
                              nullable=False,
                              default=False),
                    sa.Column("clock",
                              sa.DateTime(timezone=False),
                              default=datetime.now,
                              nullable=False),
                    sa.Column("added_on",
                              sa.DateTime(timezone=False),
                              nullable=False),
                    sa.Column("started_on",
                              sa.DateTime(timezone=False),
                              nullable=True),
                    sa.Column("completed_on",
                              sa.DateTime(timezone=False),
                              nullable=True),
                    sa.Column("status",
                              sa.Enum("pending",
                                      "running",
                                      "completed",
                                      "reported",
                                      "recovered",
                                      "failed_analysis",
                                      "failed_processing",
                                      name="status_type"),
                              server_default="pending",
                              nullable=False),
                    sa.Column("sample_id",
                              sa.Integer,
                              sa.ForeignKey("samples.id"),
                              nullable=True), sa.PrimaryKeyConstraint("id"))
            op.execute('COMMIT')

            # Insert data.
            op.bulk_insert(db.Task.__table__, tasks_data)
            # Enable foreign key.
            op.execute('SET foreign_key_checks = 1')

        else:
            op.drop_table("tasks")

            # Create table with 1.2 schema.
            if upgrade:
                op.create_table(
                    "tasks", sa.Column("id", sa.Integer(), nullable=False),
                    sa.Column("target", sa.String(length=255), nullable=False),
                    sa.Column("category",
                              sa.String(length=255),
                              nullable=False),
                    sa.Column("timeout",
                              sa.Integer(),
                              server_default="0",
                              nullable=False),
                    sa.Column("priority",
                              sa.Integer(),
                              server_default="1",
                              nullable=False),
                    sa.Column("custom", sa.String(length=255), nullable=True),
                    sa.Column("machine", sa.String(length=255), nullable=True),
                    sa.Column("package", sa.String(length=255), nullable=True),
                    sa.Column("options", sa.String(length=255), nullable=True),
                    sa.Column("platform", sa.String(length=255),
                              nullable=True),
                    sa.Column("memory",
                              sa.Boolean(),
                              nullable=False,
                              default=False),
                    sa.Column("enforce_timeout",
                              sa.Boolean(),
                              nullable=False,
                              default=False),
                    sa.Column("clock",
                              sa.DateTime(timezone=False),
                              default=datetime.now,
                              nullable=False),
                    sa.Column("added_on",
                              sa.DateTime(timezone=False),
                              nullable=False),
                    sa.Column("started_on",
                              sa.DateTime(timezone=False),
                              nullable=True),
                    sa.Column("completed_on",
                              sa.DateTime(timezone=False),
                              nullable=True),
                    sa.Column("status",
                              sa.Enum("pending",
                                      "running",
                                      "completed",
                                      "reported",
                                      "recovered",
                                      "failed_analysis",
                                      "failed_processing",
                                      "failed_reporting",
                                      name="status_type"),
                              server_default="pending",
                              nullable=False),
                    sa.Column("sample_id",
                              sa.Integer,
                              sa.ForeignKey("samples.id"),
                              nullable=True), sa.PrimaryKeyConstraint("id"))
            else:
                op.create_table(
                    "tasks", sa.Column("id", sa.Integer(), nullable=False),
                    sa.Column("target", sa.String(length=255), nullable=False),
                    sa.Column("category",
                              sa.String(length=255),
                              nullable=False),
                    sa.Column("timeout",
                              sa.Integer(),
                              server_default="0",
                              nullable=False),
                    sa.Column("priority",
                              sa.Integer(),
                              server_default="1",
                              nullable=False),
                    sa.Column("custom", sa.String(length=255), nullable=True),
                    sa.Column("machine", sa.String(length=255), nullable=True),
                    sa.Column("package", sa.String(length=255), nullable=True),
                    sa.Column("options", sa.String(length=255), nullable=True),
                    sa.Column("platform", sa.String(length=255),
                              nullable=True),
                    sa.Column("memory",
                              sa.Boolean(),
                              nullable=False,
                              default=False),
                    sa.Column("enforce_timeout",
                              sa.Boolean(),
                              nullable=False,
                              default=False),
                    sa.Column("clock",
                              sa.DateTime(timezone=False),
                              default=datetime.now,
                              nullable=False),
                    sa.Column("added_on",
                              sa.DateTime(timezone=False),
                              nullable=False),
                    sa.Column("started_on",
                              sa.DateTime(timezone=False),
                              nullable=True),
                    sa.Column("completed_on",
                              sa.DateTime(timezone=False),
                              nullable=True),
                    sa.Column("status",
                              sa.Enum("pending",
                                      "running",
                                      "completed",
                                      "reported",
                                      "recovered",
                                      "failed_analysis",
                                      "failed_processing",
                                      name="status_type"),
                              server_default="pending",
                              nullable=False),
                    sa.Column("sample_id",
                              sa.Integer,
                              sa.ForeignKey("samples.id"),
                              nullable=True), sa.PrimaryKeyConstraint("id"))

            # Insert data.
            op.bulk_insert(db.Task.__table__, tasks_data)
Exemple #56
0
def upgrade():
    op.bulk_insert(countries, [
        {
            "name": "Afghanistan",
            "code": "AFG"
        },
        {
            "name": "Åland Islands",
            "code": "ALA"
        },
        {
            "name": "Albania",
            "code": "ALB"
        },
        {
            "name": "Algeria",
            "code": "DZA"
        },
        {
            "name": "American Samoa",
            "code": "ASM"
        },
        {
            "name": "Andorra",
            "code": "AND"
        },
        {
            "name": "Angola",
            "code": "AGO"
        },
        {
            "name": "Anguilla",
            "code": "AIA"
        },
        {
            "name": "Antarctica",
            "code": "ATA"
        },
        {
            "name": "Antigua and Barbuda",
            "code": "ATG"
        },
        {
            "name": "Argentina",
            "code": "ARG"
        },
        {
            "name": "Armenia",
            "code": "ARM"
        },
        {
            "name": "Aruba",
            "code": "ABW"
        },
        {
            "name": "Australia",
            "code": "AUS"
        },
        {
            "name": "Austria",
            "code": "AUT"
        },
        {
            "name": "Azerbaijan",
            "code": "AZE"
        },
        {
            "name": "Bahamas",
            "code": "BHS"
        },
        {
            "name": "Bahrain",
            "code": "BHR"
        },
        {
            "name": "Bangladesh",
            "code": "BGD"
        },
        {
            "name": "Barbados",
            "code": "BRB"
        },
        {
            "name": "Belarus",
            "code": "BLR"
        },
        {
            "name": "Belgium",
            "code": "BEL"
        },
        {
            "name": "Belize",
            "code": "BLZ"
        },
        {
            "name": "Benin",
            "code": "BEN"
        },
        {
            "name": "Bermuda",
            "code": "BMU"
        },
        {
            "name": "Bhutan",
            "code": "BTN"
        },
        {
            "name": "Bolivia (Plurinational State of)",
            "code": "BOL"
        },
        {
            "name": "Bonaire, Sint Eustatius and Saba",
            "code": "BES"
        },
        {
            "name": "Bosnia and Herzegovina",
            "code": "BIH"
        },
        {
            "name": "Botswana",
            "code": "BWA"
        },
        {
            "name": "Bouvet Island",
            "code": "BVT"
        },
        {
            "name": "Brazil",
            "code": "BRA"
        },
        {
            "name": "British Indian Ocean Territory",
            "code": "IOT"
        },
        {
            "name": "Brunei Darussalam",
            "code": "BRN"
        },
        {
            "name": "Bulgaria",
            "code": "BGR"
        },
        {
            "name": "Burkina Faso",
            "code": "BFA"
        },
        {
            "name": "Burundi",
            "code": "BDI"
        },
        {
            "name": "Cambodia",
            "code": "KHM"
        },
        {
            "name": "Cameroon",
            "code": "CMR"
        },
        {
            "name": "Canada",
            "code": "CAN"
        },
        {
            "name": "Cabo Verde",
            "code": "CPV"
        },
        {
            "name": "Cayman Islands",
            "code": "CYM"
        },
        {
            "name": "Central African Republic",
            "code": "CAF"
        },
        {
            "name": "Chad",
            "code": "TCD"
        },
        {
            "name": "Chile",
            "code": "CHL"
        },
        {
            "name": "China",
            "code": "CHN"
        },
        {
            "name": "Christmas Island",
            "code": "CXR"
        },
        {
            "name": "Cocos (Keeling) Islands",
            "code": "CCK"
        },
        {
            "name": "Colombia",
            "code": "COL"
        },
        {
            "name": "Comoros",
            "code": "COM"
        },
        {
            "name": "Congo",
            "code": "COG"
        },
        {
            "name": "Congo (Democratic Republic of the)",
            "code": "COD"
        },
        {
            "name": "Cook Islands",
            "code": "C*K"
        },
        {
            "name": "Costa Rica",
            "code": "CRI"
        },
        {
            "name": "Côte d'Ivoire",
            "code": "CIV"
        },
        {
            "name": "Croatia",
            "code": "HRV"
        },
        {
            "name": "Cuba",
            "code": "CUB"
        },
        {
            "name": "Curaçao",
            "code": "CUW"
        },
        {
            "name": "Cyprus",
            "code": "CYP"
        },
        {
            "name": "Czech Republic",
            "code": "CZE"
        },
        {
            "name": "Denmark",
            "code": "DNK"
        },
        {
            "name": "Djibouti",
            "code": "DJI"
        },
        {
            "name": "Dominica",
            "code": "DMA"
        },
        {
            "name": "Dominican Republic",
            "code": "DOM"
        },
        {
            "name": "Ecuador",
            "code": "ECU"
        },
        {
            "name": "Egypt",
            "code": "EGY"
        },
        {
            "name": "El Salvador",
            "code": "SLV"
        },
        {
            "name": "Equatorial Guinea",
            "code": "GNQ"
        },
        {
            "name": "Eritrea",
            "code": "ERI"
        },
        {
            "name": "Estonia",
            "code": "EST"
        },
        {
            "name": "Ethiopia",
            "code": "ETH"
        },
        {
            "name": "Falkland Islands (Malvinas)",
            "code": "FLK"
        },
        {
            "name": "Faroe Islands",
            "code": "FRO"
        },
        {
            "name": "Fiji",
            "code": "FJI"
        },
        {
            "name": "Finland",
            "code": "FIN"
        },
        {
            "name": "France",
            "code": "FRA"
        },
        {
            "name": "French Guiana",
            "code": "GUF"
        },
        {
            "name": "French Polynesia",
            "code": "PYF"
        },
        {
            "name": "French Southern Territories",
            "code": "ATF"
        },
        {
            "name": "Gabon",
            "code": "GAB"
        },
        {
            "name": "Gambia",
            "code": "GMB"
        },
        {
            "name": "Georgia",
            "code": "GEO"
        },
        {
            "name": "Germany",
            "code": "DEU"
        },
        {
            "name": "Ghana",
            "code": "GHA"
        },
        {
            "name": "Gibraltar",
            "code": "GIB"
        },
        {
            "name": "Greece",
            "code": "GRC"
        },
        {
            "name": "Greenland",
            "code": "GRL"
        },
        {
            "name": "Grenada",
            "code": "GRD"
        },
        {
            "name": "Guadeloupe",
            "code": "GLP"
        },
        {
            "name": "Guam",
            "code": "GUM"
        },
        {
            "name": "Guatemala",
            "code": "GTM"
        },
        {
            "name": "Guernsey",
            "code": "GGY"
        },
        {
            "name": "Guinea",
            "code": "GIN"
        },
        {
            "name": "Guinea-Bissau",
            "code": "GNB"
        },
        {
            "name": "Guyana",
            "code": "GUY"
        },
        {
            "name": "Haiti",
            "code": "HTI"
        },
        {
            "name": "Heard Island and McDonald Islands",
            "code": "HMD"
        },
        {
            "name": "Holy See",
            "code": "VAT"
        },
        {
            "name": "Honduras",
            "code": "HND"
        },
        {
            "name": "Hong Kong",
            "code": "HKG"
        },
        {
            "name": "Hungary",
            "code": "HUN"
        },
        {
            "name": "Iceland",
            "code": "ISL"
        },
        {
            "name": "India",
            "code": "IND"
        },
        {
            "name": "Indonesia",
            "code": "IDN"
        },
        {
            "name": "Iran (Islamic Republic of)",
            "code": "IRN"
        },
        {
            "name": "Iraq",
            "code": "IRQ"
        },
        {
            "name": "Ireland",
            "code": "IRL"
        },
        {
            "name": "Isle of Man",
            "code": "IMN"
        },
        {
            "name": "Israel",
            "code": "ISR"
        },
        {
            "name": "Italy",
            "code": "ITA"
        },
        {
            "name": "Jamaica",
            "code": "JAM"
        },
        {
            "name": "Japan",
            "code": "JPN"
        },
        {
            "name": "Jersey",
            "code": "JEY"
        },
        {
            "name": "Jordan",
            "code": "JOR"
        },
        {
            "name": "Kazakhstan",
            "code": "KAZ"
        },
        {
            "name": "Kenya",
            "code": "KEN"
        },
        {
            "name": "Kiribati",
            "code": "KIR"
        },
        {
            "name": "Korea (Democratic People's Republic of)",
            "code": "PRK"
        },
        {
            "name": "Korea (Republic of)",
            "code": "KOR"
        },
        {
            "name": "Kuwait",
            "code": "KWT"
        },
        {
            "name": "Kyrgyzstan",
            "code": "KGZ"
        },
        {
            "name": "Lao People's Democratic Republic",
            "code": "LAO"
        },
        {
            "name": "Latvia",
            "code": "LVA"
        },
        {
            "name": "Lebanon",
            "code": "LBN"
        },
        {
            "name": "Lesotho",
            "code": "LSO"
        },
        {
            "name": "Liberia",
            "code": "LBR"
        },
        {
            "name": "Libya",
            "code": "LBY"
        },
        {
            "name": "Liechtenstein",
            "code": "LIE"
        },
        {
            "name": "Lithuania",
            "code": "LTU"
        },
        {
            "name": "Luxembourg",
            "code": "LUX"
        },
        {
            "name": "Macao",
            "code": "MAC"
        },
        {
            "name": "Macedonia (the former Yugoslav Republic of)",
            "code": "MKD"
        },
        {
            "name": "Madagascar",
            "code": "MDG"
        },
        {
            "name": "Malawi",
            "code": "MWI"
        },
        {
            "name": "Malaysia",
            "code": "MYS"
        },
        {
            "name": "Maldives",
            "code": "MDV"
        },
        {
            "name": "Mali",
            "code": "MLI"
        },
        {
            "name": "Malta",
            "code": "MLT"
        },
        {
            "name": "Marshall Islands",
            "code": "MHL"
        },
        {
            "name": "Martinique",
            "code": "MTQ"
        },
        {
            "name": "Mauritania",
            "code": "MRT"
        },
        {
            "name": "Mauritius",
            "code": "MUS"
        },
        {
            "name": "Mayotte",
            "code": "MYT"
        },
        {
            "name": "Mexico",
            "code": "MEX"
        },
        {
            "name": "Micronesia (Federated States of)",
            "code": "FSM"
        },
        {
            "name": "Moldova (Republic of)",
            "code": "MDA"
        },
        {
            "name": "Monaco",
            "code": "MCO"
        },
        {
            "name": "Mongolia",
            "code": "MNG"
        },
        {
            "name": "Montenegro",
            "code": "MNE"
        },
        {
            "name": "Montserrat",
            "code": "MSR"
        },
        {
            "name": "Morocco",
            "code": "MAR"
        },
        {
            "name": "Mozambique",
            "code": "MOZ"
        },
        {
            "name": "Myanmar",
            "code": "MMR"
        },
        {
            "name": "Namibia",
            "code": "NAM"
        },
        {
            "name": "Nauru",
            "code": "NRU"
        },
        {
            "name": "Nepal",
            "code": "NPL"
        },
        {
            "name": "Netherlands",
            "code": "NLD"
        },
        {
            "name": "New Caledonia",
            "code": "NCL"
        },
        {
            "name": "New Zealand",
            "code": "NZL"
        },
        {
            "name": "Nicaragua",
            "code": "NIC"
        },
        {
            "name": "Niger",
            "code": "NER"
        },
        {
            "name": "Nigeria",
            "code": "NGA"
        },
        {
            "name": "Niue",
            "code": "NIU"
        },
        {
            "name": "Norfolk Island",
            "code": "NFK"
        },
        {
            "name": "Northern Mariana Islands",
            "code": "MNP"
        },
        {
            "name": "Norway",
            "code": "NOR"
        },
        {
            "name": "Oman",
            "code": "OMN"
        },
        {
            "name": "Pakistan",
            "code": "PAK"
        },
        {
            "name": "Palau",
            "code": "PLW"
        },
        {
            "name": "Palestine, State of",
            "code": "PSE"
        },
        {
            "name": "Panama",
            "code": "PAN"
        },
        {
            "name": "Papua New Guinea",
            "code": "PNG"
        },
        {
            "name": "Paraguay",
            "code": "PRY"
        },
        {
            "name": "Peru",
            "code": "PER"
        },
        {
            "name": "Philippines",
            "code": "PHL"
        },
        {
            "name": "Pitcairn",
            "code": "PCN"
        },
        {
            "name": "Poland",
            "code": "POL"
        },
        {
            "name": "Portugal",
            "code": "PRT"
        },
        {
            "name": "Puerto Rico",
            "code": "PRI"
        },
        {
            "name": "Qatar",
            "code": "QAT"
        },
        {
            "name": "Réunion",
            "code": "REU"
        },
        {
            "name": "Romania",
            "code": "ROU"
        },
        {
            "name": "Russian Federation",
            "code": "RUS"
        },
        {
            "name": "Rwanda",
            "code": "RWA"
        },
        {
            "name": "Saint Barthélemy",
            "code": "BLM"
        },
        {
            "name": "Saint Helena, Ascension and Tristan da Cunha",
            "code": "SHN"
        },
        {
            "name": "Saint Kitts and Nevis",
            "code": "KNA"
        },
        {
            "name": "Saint Lucia",
            "code": "LCA"
        },
        {
            "name": "Saint Martin (French part)",
            "code": "MAF"
        },
        {
            "name": "Saint Pierre and Miquelon",
            "code": "SPM"
        },
        {
            "name": "Saint Vincent and the Grenadines",
            "code": "VCT"
        },
        {
            "name": "Samoa",
            "code": "WSM"
        },
        {
            "name": "San Marino",
            "code": "SMR"
        },
        {
            "name": "Sao Tome and Principe",
            "code": "STP"
        },
        {
            "name": "Saudi Arabia",
            "code": "SAU"
        },
        {
            "name": "Senegal",
            "code": "SEN"
        },
        {
            "name": "Serbia",
            "code": "SRB"
        },
        {
            "name": "Seychelles",
            "code": "SYC"
        },
        {
            "name": "Sierra Leone",
            "code": "SLE"
        },
        {
            "name": "Singapore",
            "code": "SGP"
        },
        {
            "name": "Sint Maarten (Dutch part)",
            "code": "SXM"
        },
        {
            "name": "Slovakia",
            "code": "SVK"
        },
        {
            "name": "Slovenia",
            "code": "SVN"
        },
        {
            "name": "Solomon Islands",
            "code": "SLB"
        },
        {
            "name": "Somalia",
            "code": "SOM"
        },
        {
            "name": "South Africa",
            "code": "ZAF"
        },
        {
            "name": "South Georgia and the South Sandwich Islands",
            "code": "SGS"
        },
        {
            "name": "South Sudan",
            "code": "SSD"
        },
        {
            "name": "Spain",
            "code": "ESP"
        },
        {
            "name": "Sri Lanka",
            "code": "LKA"
        },
        {
            "name": "Sudan",
            "code": "SDN"
        },
        {
            "name": "Suriname",
            "code": "SUR"
        },
        {
            "name": "Svalbard and Jan Mayen",
            "code": "SJM"
        },
        {
            "name": "Swaziland",
            "code": "SWZ"
        },
        {
            "name": "Sweden",
            "code": "SWE"
        },
        {
            "name": "Switzerland",
            "code": "CHE"
        },
        {
            "name": "Syrian Arab Republic",
            "code": "SYR"
        },
        {
            "name": "Taiwan, Province of China",
            "code": "TWN"
        },
        {
            "name": "Tajikistan",
            "code": "TJK"
        },
        {
            "name": "Tanzania, United Republic of",
            "code": "TZA"
        },
        {
            "name": "Thailand",
            "code": "THA"
        },
        {
            "name": "Timor-Leste",
            "code": "TLS"
        },
        {
            "name": "Togo",
            "code": "TGO"
        },
        {
            "name": "Tokelau",
            "code": "TKL"
        },
        {
            "name": "Tonga",
            "code": "TON"
        },
        {
            "name": "Trinidad and Tobago",
            "code": "TTO"
        },
        {
            "name": "Tunisia",
            "code": "TUN"
        },
        {
            "name": "Turkey",
            "code": "TUR"
        },
        {
            "name": "Turkmenistan",
            "code": "TKM"
        },
        {
            "name": "Turks and Caicos Islands",
            "code": "TCA"
        },
        {
            "name": "Tuvalu",
            "code": "TUV"
        },
        {
            "name": "Uganda",
            "code": "UGA"
        },
        {
            "name": "Ukraine",
            "code": "UKR"
        },
        {
            "name": "United Arab Emirates",
            "code": "ARE"
        },
        {
            "name": "United Kingdom of Great Britain and Northern Ireland",
            "code": "GBR"
        },
        {
            "name": "United States of America",
            "code": "USA"
        },
        {
            "name": "United States Minor Outlying Islands",
            "code": "UMI"
        },
        {
            "name": "Uruguay",
            "code": "URY"
        },
        {
            "name": "Uzbekistan",
            "code": "UZB"
        },
        {
            "name": "Vanuatu",
            "code": "VUT"
        },
        {
            "name": "Venezuela (Bolivarian Republic of)",
            "code": "VEN"
        },
        {
            "name": "Viet Nam",
            "code": "VNM"
        },
        {
            "name": "Virgin Islands (British)",
            "code": "VGB"
        },
        {
            "name": "Virgin Islands (U.S.)",
            "code": "VIR"
        },
        {
            "name": "Wallis and Futuna",
            "code": "WLF"
        },
        {
            "name": "Western Sahara",
            "code": "ESH"
        },
        {
            "name": "Yemen",
            "code": "YEM"
        },
        {
            "name": "Zambia",
            "code": "ZMB"
        },
        {
            "name": "Zimbabwe",
            "code": "ZWE"
        },
    ])
def upgrade():
  basic_objects_editable = [
      'Categorization',
      'Category',
      'Control',
      'ControlControl',
      'ControlSection',
      'Cycle',
      'DataAsset',
      'Directive',
        'Contract',
        'Policy',
        'Regulation',
      'DirectiveControl',
      'Document',
      'Facility',
      'Help',
      'Market',
      'Objective',
      'ObjectControl'
      'ObjectDocument',
      'ObjectObjective',
      'ObjectPerson',
      'ObjectSection',
      'Option',
      'OrgGroup',
      'PopulationSample',
      'Product',
      'Project',
      'Relationship',
      'RelationshipType',
      'Section',
      'SystemOrProcess',
        'System',
        'Process',
      'SystemControl',
      'SystemSysetm',
      ]
  basic_objects_readable = list(basic_objects_editable)
  basic_objects_readable.extend([
      'Person',
      'Program',
      'ProgramControl',
      'ProgramDirective',
      'Role',
      #'UserRole', ?? why?
      'Person',
      ])

  current_datetime = datetime.now()
  op.bulk_insert(roles_table,
      [
        { 'name': 'Reader',
          'description': 'This role grants a user basic, read-only, access '\
              'permission to a gGRC instance.',
          'permissions_json': json.dumps({
            'read': basic_objects_readable,
            }),
          'created_at': current_datetime,
          'updated_at': current_datetime,
        },
        { 'name': 'ObjectEditor',
          'description': 'This role grants a user basic object creation and '\
              'editing permission.',
          'permissions_json': json.dumps({
            'create': basic_objects_editable,
            'read':   basic_objects_readable,
            'update': basic_objects_editable,
            'delete': basic_objects_editable,
            }),
          'created_at': current_datetime,
          'updated_at': current_datetime,
        },
        { 'name': 'ProgramCreator',
          'description': 'This role grants a user the permission to create '\
              'public and private programs.',
          'permissions_json': json.dumps({
            'create': ['Program',],
            }),
          'created_at': current_datetime,
          'updated_at': current_datetime,
        },
      ])
def upgrade():
    op.create_table('site_parameter_baseline',
                    sa.Column('pk', sa.Integer, primary_key=True),
                    sa.Column('node', sa.String(100), nullable=False),
                    sa.Column('site', sa.String(200), nullable=False),
                    sa.Column('mo', sa.String(100), nullable=False, default=0),
                    sa.Column('parameter',
                              sa.String(100),
                              nullable=False,
                              default=0),
                    sa.Column('bvalue',
                              sa.String(200),
                              nullable=False,
                              default=0),
                    sa.Column('nvalue',
                              sa.String(100),
                              nullable=False,
                              default=0),
                    sa.Column('vendor',
                              sa.String(100),
                              nullable=False,
                              default=0),
                    sa.Column('technology',
                              sa.String(100),
                              nullable=False,
                              default=0),
                    sa.Column('age', sa.Integer, nullable=False, default=0),
                    sa.Column('modified_by', sa.Integer),
                    sa.Column('added_by', sa.Integer),
                    sa.Column('date_added',
                              sa.TIMESTAMP,
                              default=sa.func.now(),
                              onupdate=sa.func.now()),
                    sa.Column('date_modified',
                              sa.TIMESTAMP,
                              default=sa.func.now()),
                    schema=u'network_audit')
    op.execute(
        'ALTER SEQUENCE  network_audit.site_parameter_baseline_pk_seq RENAME TO seq_site_parameter_baseline_pk'
    )

    audit_categories = sa.sql.table(
        'audit_categories',
        sa.Column('pk',
                  sa.Integer,
                  sa.Sequence('seq_audit_categories_pk', ),
                  primary_key=True,
                  nullable=False),
        sa.Column('name', sa.String(255), nullable=False),
        sa.Column('notes', sa.Text, nullable=False),
        sa.Column('parent_pk', sa.Integer, nullable=False, default=0),
        sa.Column('in_built', sa.Boolean, default=False),
        sa.Column('modified_by', sa.Integer, default=0),
        sa.Column('added_by', sa.Integer, default=0),
        sa.Column('date_added',
                  sa.TIMESTAMP,
                  default=sa.func.now(),
                  onupdate=sa.func.now()),
        sa.Column('date_modified', sa.TIMESTAMP, default=sa.func.now()),
    )

    audit_rules = sa.sql.table(
        'audit_rules',
        sa.Column('pk',
                  sa.Integer,
                  sa.Sequence('seq_audit_categories_pk', ),
                  primary_key=True,
                  nullable=False),
        sa.Column('name', sa.String(255), nullable=False),
        sa.Column('notes', sa.Text, nullable=False),
        sa.Column('category_pk', sa.Integer, nullable=False, default=0),
        sa.Column('in_built', sa.Boolean, default=False),
        sa.Column('table_name', sa.String(255), nullable=False),
        sa.Column('sql', sa.Text, nullable=False),
        sa.Column('modified_by', sa.Integer),
        sa.Column('added_by', sa.Integer),
        sa.Column('date_added',
                  sa.TIMESTAMP,
                  default=sa.func.now(),
                  onupdate=sa.func.now()),
        sa.Column('date_modified', sa.TIMESTAMP, default=sa.func.now()),
    )

    connection = op.get_bind()
    r = connection.execute(audit_categories.select().where(
        audit_categories.c.name == 'Network Baseline'))

    category_pk = 0
    for row in r:
        category_pk = row['pk']

    op.bulk_insert(audit_rules, [
        {
            'name': 'Site Parameter Discrepancies',
            'category_pk': category_pk,
            'in_built': True,
            'table_name': 'baseline_site_parameters',
            'sql': 'SELECT * FROM network_audit.baseline_site_parameters',
            'notes': 'Network Baseline Discrepancies for Site parameters'
        },
    ])
Exemple #59
0
def upgrade():
    mintos_transaction_types = op.create_table(
        'p2p_lending_platforms_mintos_transaction_types',
        sa.Column('id', sa.INTEGER, primary_key=True, nullable=False),
        sa.Column('name', sa.String, unique=True, nullable=False))

    op.bulk_insert(mintos_transaction_types, [
        {
            'name': 'Deposits'
        },
        {
            'name': 'Withdrawals'
        },
        {
            'name': 'Withdrawal cancelled'
        },
        {
            'name': 'Cashback bonus'
        },
        {
            'name': 'Refer a friend bonus'
        },
        {
            'name': 'Outgoing currency exchange transaction'
        },
        {
            'name': 'Incoming currency exchange transaction'
        },
        {
            'name': 'FX commission'
        },
        {
            'name': 'Investment in loan'
        },
        {
            'name': 'Investment in loan  - Invest & Access'
        },
        {
            'name': 'Loan payment: principal received'
        },
        {
            'name': 'Loan payment: interest received'
        },
        {
            'name': 'Loan agreement amended: principal received'
        },
        {
            'name': 'Loan agreement amended: interest received'
        },
        {
            'name': 'Loan early repayment: principal received'
        },
        {
            'name': 'Loan early repayment: interest received'
        },
        {
            'name': 'Loan agreement terminated: principal received'
        },
        {
            'name': 'Loan agreement terminated: interest received'
        },
        {
            'name': 'Loan buyback: principal received'
        },
        {
            'name': 'Loan buyback: interest received'
        },
        {
            'name': 'Loan buyback: late payment interest received'
        },
        {
            'name': 'Loan agreement extended: principal received'
        },
        {
            'name': 'Loan agreement extended: interest received'
        },
        {
            'name': 'Loan late fees received'
        },
        {
            'name': 'Other: principal received'
        },
        {
            'name': 'Other: interest received'
        },
        {
            'name': 'Other: late payment interest received'
        },
        {
            'name': 'Secondary market transaction'
        },
        {
            'name': 'Secondary market transaction  - Invest & Access'
        },
        {
            'name': 'Discount for secondary market transaction'
        },
        {
            'name': 'Premium for secondary market transaction'
        },
        {
            'name': 'Cumulative repurchases of loan parts'
        },
    ])
def upgrade():
    # Create an ad-hoc table to use for the insert statement.
    # brand 14 ocun
    brand_table = table('brand', column('name', String),
                        column('sizing', String))
    op.bulk_insert(brand_table, [{'name': 'ocun', 'sizing': 'EUR'}])

    # Create an ad-hoc table to use for the insert statement.
    item_table = table('item', column('model', String),
                       column('brand_id', Integer),
                       column('gender_id', Integer))
    # Ocun List
    op.bulk_insert(item_table, [{
        'model': 'diamond',
        'brand_id': '14',
        'gender_id': '3'
    }, {
        'model': 'ozone',
        'brand_id': '14',
        'gender_id': '3'
    }, {
        'model': 'ozone plus',
        'brand_id': '14',
        'gender_id': '3'
    }, {
        'model': 'ozone lady',
        'brand_id': '14',
        'gender_id': '2'
    }, {
        'model': 'oxi',
        'brand_id': '14',
        'gender_id': '3'
    }, {
        'model': 'rebel qc',
        'brand_id': '14',
        'gender_id': '3'
    }, {
        'model': 'rebel lu',
        'brand_id': '14',
        'gender_id': '3'
    }, {
        'model': 'pearl lu',
        'brand_id': '14',
        'gender_id': '3'
    }, {
        'model': 'strike qc',
        'brand_id': '14',
        'gender_id': '3'
    }, {
        'model': 'strike lu',
        'brand_id': '14',
        'gender_id': '3'
    }, {
        'model': 'crest qc',
        'brand_id': '14',
        'gender_id': '3'
    }, {
        'model': 'crest lu',
        'brand_id': '14',
        'gender_id': '3'
    }, {
        'model': 'rental qc',
        'brand_id': '14',
        'gender_id': '3'
    }, {
        'model': 'hero qc',
        'brand_id': '14',
        'gender_id': '4'
    }])