class QuoteByDaySession(TokyoCEHistoricalDBBaseModel): """ Database model for "Quotes for All Products by Session (Day Session)" """ __tablename__ = 'quotes_by_day_session' _db_configuration = TokyoCEHistoricalDbConnectionConfig() update_date = orm.Column(orm.Date, primary_key=True) update_time = orm.Column(orm.Time, primary_key=True) trade_date = orm.Column(orm.Date, primary_key=True) institutions_code = orm.Column(orm.VARCHAR(10)) trade_type = orm.Column(orm.VARCHAR(255)) product_code = orm.Column(orm.Text, primary_key=True) contract_month = orm.Column(orm.VARCHAR(20), primary_key=True) strike_price = orm.Column(orm.FLOAT(precision=15, decimal_return_scale=2)) at_the_money_flag = orm.Column(orm.VARCHAR(10)) volume_fix_flag = orm.Column(orm.VARCHAR(10)) settlement_flag = orm.Column(orm.VARCHAR(10)) session_end_flag = orm.Column(orm.VARCHAR(10)) start_price = orm.Column(orm.FLOAT(precision=15, decimal_return_scale=2)) high_price = orm.Column(orm.FLOAT(precision=15, decimal_return_scale=2)) low_price = orm.Column(orm.FLOAT(precision=15, decimal_return_scale=2)) current_price = orm.Column(orm.FLOAT(precision=15, decimal_return_scale=2)) last_settlement_price = orm.Column( orm.FLOAT(precision=15, decimal_return_scale=2)) offset_from_previous_day = orm.Column( orm.FLOAT(precision=15, decimal_return_scale=2)) irrelevant_column = orm.Column(orm.Text) settlement_price = orm.Column( orm.FLOAT(precision=15, decimal_return_scale=2)) volume = orm.Column(orm.FLOAT(precision=15, decimal_return_scale=2)) volume_total_by_products = orm.Column( orm.FLOAT(precision=15, decimal_return_scale=2))
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'object_house', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('developer_id', sa.INTEGER(), nullable=True), sa.Column('housing_complex_id', sa.INTEGER(), nullable=True), sa.Column('house_id', sa.INTEGER(), nullable=True), sa.Column('room', sa.INTEGER(), nullable=True), sa.Column('square', sa.FLOAT(), nullable=True), sa.Column('price', sa.FLOAT(), nullable=True), sa.Column('price_meter', sa.FLOAT(), nullable=True), sa.Column('floor', sa.INTEGER(), nullable=True), sa.Column('floor_number', sa.INTEGER(), nullable=True), sa.Column('house_number', sa.INTEGER(), nullable=True), sa.Column('section_number', sa.INTEGER(), nullable=True), sa.Column('type_studio', sa.VARCHAR(length=16), nullable=True), sa.Column('type', sa.VARCHAR(length=32), nullable=True), sa.Column('decoration', sa.VARCHAR(length=16), nullable=True), sa.Column('price_discont', sa.FLOAT(), nullable=True), sa.Column('source', sa.VARCHAR(length=128), nullable=True), sa.Column('house_name', sa.VARCHAR(length=128), nullable=True), sa.ForeignKeyConstraint( ['developer_id'], ['developer.id'], ), sa.ForeignKeyConstraint( ['house_id'], ['house.id'], ), sa.ForeignKeyConstraint( ['housing_complex_id'], ['housing_complex.id'], ), sa.PrimaryKeyConstraint('id'))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('agc_guide_offset', sa.Column('agc_exposure_id', sa.Integer(), autoincrement=False, nullable=False, comment='AGC exposure number identifier'), sa.Column('guide_ra', sa.FLOAT(), nullable=True, comment='The designed FoV R.A. center [deg.]'), sa.Column('guide_dec', sa.FLOAT(), nullable=True, comment='The designed FoV Dec. center [deg.]'), sa.Column('guide_pa', sa.REAL(), nullable=True, comment='The designed FoV PA [deg.]'), sa.Column('guide_delta_ra', sa.REAL(), nullable=True, comment='The calculated FoV R.A. offset [arcsec.]'), sa.Column('guide_delta_dec', sa.REAL(), nullable=True, comment='The calculated FoV Dec. offset [arcsec.]'), sa.Column('guide_delta_insrot', sa.REAL(), nullable=True, comment='The calculated InsRot offset [arcsec.]'), sa.Column('guide_delta_az', sa.REAL(), nullable=True, comment='The calculated Az offset [arcsec.] (optional)'), sa.Column('guide_delta_el', sa.REAL(), nullable=True, comment='The calculated El offset [arcsec.] (optional)'), sa.Column('guide_delta_z', sa.REAL(), nullable=True, comment='The calculated focus offset [mm]'), sa.Column('guide_delta_z1', sa.REAL(), nullable=True, comment='The calculated focus offset for AGC1 [mm]'), sa.Column('guide_delta_z2', sa.REAL(), nullable=True, comment='The calculated focus offset for AGC2 [mm]'), sa.Column('guide_delta_z3', sa.REAL(), nullable=True, comment='The calculated focus offset for AGC3 [mm]'), sa.Column('guide_delta_z4', sa.REAL(), nullable=True, comment='The calculated focus offset for AGC4 [mm]'), sa.Column('guide_delta_z5', sa.REAL(), nullable=True, comment='The calculated focus offset for AGC5 [mm]'), sa.Column('guide_delta_z6', sa.REAL(), nullable=True, comment='The calculated focus offset for AGC6 [mm]'), sa.ForeignKeyConstraint(['agc_exposure_id'], ['agc_exposure.agc_exposure_id'], ), sa.PrimaryKeyConstraint('agc_exposure_id') ) op.alter_column('agc_data', 'image_moment_00_pix', existing_type=sa.REAL(), comment='', existing_nullable=True) op.add_column('agc_exposure', sa.Column('measurement_algorithm', sa.String(), nullable=True, comment='Spot measurement algorithm (windowed/sep)')) op.add_column('agc_exposure', sa.Column('version_actor', sa.String(), nullable=True, comment='Version of the actor')) op.add_column('agc_exposure', sa.Column('version_instdata', sa.String(), nullable=True, comment='Version of the pfs_instdata')) op.add_column('mcs_exposure', sa.Column('measurement_algorithm', sa.String(), nullable=True, comment='Spot measurement algorithm (windowed/sep)')) op.add_column('mcs_exposure', sa.Column('version_actor', sa.String(), nullable=True, comment='Version of the actor')) op.add_column('mcs_exposure', sa.Column('version_instdata', sa.String(), nullable=True, comment='Version of the pfs_instdata'))
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('daily_reports', schema=None) as batch_op: batch_op.add_column(sa.Column('deaths', sa.INTEGER(), nullable=True)) batch_op.add_column( sa.Column('province', sa.VARCHAR(length=100), nullable=True)) batch_op.add_column(sa.Column('confirmed', sa.INTEGER(), nullable=True)) batch_op.add_column(sa.Column('recovered', sa.INTEGER(), nullable=True)) batch_op.drop_column('total_recovered') batch_op.drop_column('total_deaths') batch_op.drop_column('total_confirmed') batch_op.drop_column('total_active') batch_op.drop_column('new_recovered') batch_op.drop_column('new_deaths') batch_op.drop_column('new_confirmed') batch_op.drop_column('increase_rate') batch_op.drop_column('death_rate') op.create_table( 'latest_report', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('country', sa.VARCHAR(length=100), nullable=True), sa.Column('report_date', sa.DATE(), nullable=True), sa.Column('total_confirmed', sa.INTEGER(), nullable=True), sa.Column('total_deaths', sa.INTEGER(), nullable=True), sa.Column('total_recovered', sa.INTEGER(), nullable=True), sa.Column('active', sa.INTEGER(), nullable=True), sa.Column('new_confirmed', sa.INTEGER(), nullable=True), sa.Column('new_deaths', sa.INTEGER(), nullable=True), sa.Column('new_recovered', sa.INTEGER(), nullable=True), sa.Column('death_rate', sa.FLOAT(), nullable=True), sa.Column('increase_rate', sa.FLOAT(), nullable=True), sa.PrimaryKeyConstraint('id'))
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'minor_planet', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('magnitude_H', sa.FLOAT(), nullable=True), sa.Column('magnitude_G', sa.FLOAT(), nullable=True), sa.Column('epoch', sa.VARCHAR(length=6), nullable=True), sa.Column('mean_anomaly_degrees', sa.FLOAT(), nullable=True), sa.Column('argument_of_perihelion_degrees', sa.FLOAT(), nullable=True), sa.Column('longitude_of_ascending_node_degrees', sa.FLOAT(), nullable=True), sa.Column('inclination_degrees', sa.FLOAT(), nullable=True), sa.Column('eccentricity', sa.FLOAT(), nullable=True), sa.Column('mean_daily_motion_degrees', sa.FLOAT(), nullable=True), sa.Column('semimajor_axis_au', sa.FLOAT(), nullable=True), sa.Column('uncertainty', sa.VARCHAR(length=6), nullable=True), sa.Column('reference', sa.VARCHAR(length=10), nullable=True), sa.Column('observations', sa.INTEGER(), nullable=True), sa.Column('oppositions', sa.INTEGER(), nullable=True), sa.Column('observation_period', sa.VARCHAR(length=9), nullable=True), sa.Column('rms_residual_arcseconds', sa.FLOAT(), nullable=True), sa.Column('coarse_perturbers', sa.VARCHAR(length=4), nullable=True), sa.Column('precise_perturbers', sa.VARCHAR(length=4), nullable=True), sa.Column('computer_name', sa.VARCHAR(length=11), nullable=True), sa.Column('hex_flags', sa.VARCHAR(length=5), nullable=True), sa.Column('designation', sa.VARCHAR(length=30), nullable=True), sa.Column('last_observation_date', sa.VARCHAR(length=9), nullable=True), sa.PrimaryKeyConstraint('id', name='pk_minor_planet')) op.drop_table('minor_planets')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_constraint(None, 'run', type_='foreignkey') op.drop_column('run', 'user_id') op.create_table( 'lap', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('avg_speed', sa.NUMERIC(), nullable=True), sa.Column('max_speed', sa.NUMERIC(), nullable=True), sa.Column('start_position_lat', sa.FLOAT(), nullable=True), sa.Column('start_position_long', sa.FLOAT(), nullable=True), sa.Column('end_position_lat', sa.FLOAT(), nullable=True), sa.Column('end_position_long', sa.FLOAT(), nullable=True), sa.Column('total_ascent', sa.NUMERIC(), nullable=True), sa.Column('total_descent', sa.NUMERIC(), nullable=True), sa.Column('total_distance', sa.NUMERIC(), nullable=True), sa.Column('start_time', sa.DATETIME(), nullable=True), sa.Column('total_timer_time', sa.NUMERIC(), nullable=True), sa.Column('total_elapsed_time', sa.NUMERIC(), nullable=True), sa.Column('run_id', sa.INTEGER(), nullable=True), sa.ForeignKeyConstraint( ['run_id'], ['run.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('timezone', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('name', sa.VARCHAR(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('day', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('sunrise', sa.DATETIME(), nullable=True), sa.Column('sunset', sa.DATETIME(), nullable=True), sa.Column('date', sa.DATE(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('race', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('name', sa.VARCHAR(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name'))
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('lenses', schema=None) as batch_op: batch_op.alter_column('diameter_inch', existing_type=sa.FLOAT(), nullable=False) batch_op.alter_column('magnification', existing_type=sa.FLOAT(), nullable=False) with op.batch_alter_table('filters', schema=None) as batch_op: batch_op.alter_column('diameter_inch', existing_type=sa.FLOAT(), nullable=False) with op.batch_alter_table('eyepieces', schema=None) as batch_op: batch_op.alter_column('diameter_inch', existing_type=sa.FLOAT(), nullable=False) batch_op.alter_column('fov_deg', existing_type=sa.INTEGER(), nullable=False) batch_op.alter_column('focal_length_mm', existing_type=sa.FLOAT(), nullable=False)
class LocationModel(Base): __tablename__ = 'Localizacoes' id = db.Column(db.Integer, primary_key=True) lat = db.Column(db.FLOAT(precision=32, decimal_return_scale=None)) long = db.Column(db.FLOAT(precision=32, decimal_return_scale=None)) setor_cens = db.Column(db.Integer(), nullable=False) area_pond = db.Column(db.Integer(), nullable=False) endereco = db.Column(db.String(300), nullable=False) numero = db.Column(db.String(10), nullable=False) referencia = db.Column(db.String(300)) bairro_id = db.Column(db.Integer, db.ForeignKey('Bairros.id'), nullable=False) def __init__(self, lat, long, setor_cens, area_pond, endereco, numero, referencia, bairro_id): self.lat = lat self.long = long self.setor_cens = setor_cens self.area_pond = area_pond self.endereco = endereco self.numero = numero self.referencia = referencia self.bairro_id = bairro_id def json_children(self): ParentBairro = BairroModel.search_bairro_id( self.bairro_id).json_children() return { 'id': self.id, 'latitude': self.lat, 'long': self.long, 'setor_censitario': self.setor_cens, 'area_ponderada': self.area_pond, 'endereco': self.endereco, 'numero': self.numero, 'referencia': self.referencia, 'bairro': ParentBairro } @classmethod def search_by_name(cls, name): return cls.query.filter_by(endereco=name).first() def search_by_lat_long(cls, lat, long): return cls.query.filter_by(lat=lat, long=long).first() @classmethod def search_location_by_id(cls, id): return cls.query.filter_by(id=id).first() def save_to_db(self): Base.session.add(self) Base.session.commit() def delete_from_db(self): Base.session.delete(self) Base.session.commit()
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### # op.drop_table('hbu_algeria') # op.drop_table('zero_price_level') # op.drop_table('hbu_saudi') with op.batch_alter_table('init_data', schema=None) as batch_op: batch_op.alter_column('base_price', existing_type=sa.FLOAT(), nullable=False) batch_op.alter_column('group_brand', existing_type=sa.TEXT(), nullable=False) batch_op.alter_column('group_manufacturer', existing_type=sa.TEXT(), nullable=False) batch_op.alter_column('group_segments', existing_type=sa.TEXT(), nullable=False) batch_op.alter_column('is_active', existing_type=sa.BIGINT(), nullable=False) batch_op.alter_column('new_price', existing_type=sa.FLOAT(), nullable=False) batch_op.alter_column('project_id', existing_type=sa.BIGINT(), nullable=False) batch_op.alter_column('sku_name', existing_type=sa.TEXT(), nullable=False) batch_op.create_unique_constraint(batch_op.f('uq_init_data_sku_name'), ['sku_name']) batch_op.create_foreign_key( batch_op.f('fk_init_data_project_id_project'), 'project', ['project_id'], ['id'])
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column('position', 'total_return_pct', existing_type=sa.FLOAT(), nullable=False) op.alter_column('position', 'today_return_pct', existing_type=sa.FLOAT(), nullable=False) op.alter_column('position', 'last_update', existing_type=sa.DATETIME(), nullable=False) op.alter_column('portfolio', 'total_return_pct', existing_type=sa.FLOAT(), nullable=False) op.alter_column('portfolio', 'today_return_pct', existing_type=sa.FLOAT(), nullable=False) op.alter_column('portfolio', 'last_update', existing_type=sa.DATETIME(), nullable=False)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### # op.drop_table('hbu_saudi') # op.drop_table('zero_price_level') with op.batch_alter_table('project', schema=None) as batch_op: batch_op.alter_column('msu', existing_type=sa.FLOAT(), nullable=False) batch_op.alter_column('vsu', existing_type=sa.FLOAT(), nullable=False)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('jira_projects', sa.Column('id', sa.Integer(), nullable=False), sa.Column('project_key', sa.VARCHAR(length=256), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('project_key') ) op.create_table('jira_velocity', sa.Column('id', sa.Integer(), nullable=True), sa.Column('project_key', sa.Integer(), nullable=False), sa.Column('sprint_id', sa.INTEGER(), nullable=False), sa.Column('name', sa.VARCHAR(length=256), nullable=False), sa.Column('commitment', sa.FLOAT(), nullable=False), sa.Column('completed', sa.FLOAT(), nullable=False), sa.Column('start_at', sa.TIMESTAMP(), nullable=False), sa.Column('end_at', sa.TIMESTAMP(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.ForeignKeyConstraint(['project_key'], ['jira_projects.id'], ), sa.PrimaryKeyConstraint('sprint_id'), sa.UniqueConstraint('sprint_id') ) op.create_table('jira_work_stat', sa.Column('id', sa.Integer(), nullable=False), sa.Column('project_id', sa.Integer(), nullable=False), sa.Column('sprint_id', sa.Integer(), nullable=False), sa.Column('worklog', sa.INTEGER(), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=False), sa.ForeignKeyConstraint(['project_id'], ['jira_projects.id'], ), sa.ForeignKeyConstraint(['sprint_id'], ['jira_velocity.sprint_id'], ), sa.PrimaryKeyConstraint('id') )
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('shift_report', sa.Column('y_emittance_li20', sa.FLOAT(), nullable=True)) op.add_column('shift_report', sa.Column('x_emittance_li20', sa.FLOAT(), nullable=True)) op.drop_column('shift_report', 'y_rms_li20') op.drop_column('shift_report', 'x_rms_li20')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'solar_nodes', sa.Column('longitude', sa.FLOAT(), nullable=False), sa.Column('latitude', sa.FLOAT(), nullable=False), sa.PrimaryKeyConstraint('longitude', 'latitude', sqlite_on_conflict='IGNORE'))
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('list_stats', schema=None) as batch_op: batch_op.add_column( sa.Column('cur_yr_sub_pct', sa.FLOAT(), nullable=True)) batch_op.add_column( sa.Column('cur_yr_sub_open_rt', sa.FLOAT(), nullable=True)) batch_op.drop_column('cur_yr_inactive_pct')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'search', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('search_query', sa.VARCHAR(length=512), nullable=True), sa.Column('formatted_query', sa.VARCHAR(length=256), nullable=True), sa.Column('latitude', sa.FLOAT(), nullable=True), sa.Column('longitude', sa.FLOAT(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('search_query')) op.drop_table('location')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('user_table_archive', schema=None) as batch_op: batch_op.add_column(sa.Column('price', sa.FLOAT(), nullable=True)) with op.batch_alter_table('user_table', schema=None) as batch_op: batch_op.add_column(sa.Column('price', sa.FLOAT(), nullable=True)) batch_op.drop_column('closed') with op.batch_alter_table('user_product', schema=None) as batch_op: batch_op.add_column(sa.Column('price', sa.FLOAT(), nullable=True))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('courses', sa.Column('uuid', postgresql.UUID(), nullable=False), sa.Column('metadata_sequence_number', sa.INTEGER(), nullable=False), sa.Column('sequence_number', sa.INTEGER(), nullable=False), sa.PrimaryKeyConstraint('uuid') ) op.create_index(op.f('ix_courses_metadata_sequence_number'), 'courses', ['metadata_sequence_number'], unique=True) op.create_table('ecosystem_matrices', sa.Column('uuid', postgresql.UUID(), nullable=False), sa.Column('Q_ids', postgresql.ARRAY(postgresql.UUID()), nullable=False), sa.Column('C_ids', postgresql.ARRAY(postgresql.UUID()), nullable=False), sa.Column('d_data', postgresql.ARRAY(sa.FLOAT()), nullable=False), sa.Column('w_data', postgresql.ARRAY(sa.FLOAT()), nullable=False), sa.Column('w_row', postgresql.ARRAY(sa.INTEGER()), nullable=False), sa.Column('w_col', postgresql.ARRAY(sa.INTEGER()), nullable=False), sa.Column('h_mask_data', postgresql.ARRAY(sa.BOOLEAN()), nullable=False), sa.Column('h_mask_row', postgresql.ARRAY(sa.INTEGER()), nullable=False), sa.Column('h_mask_col', postgresql.ARRAY(sa.INTEGER()), nullable=False), sa.PrimaryKeyConstraint('uuid') ) op.create_table('ecosystems', sa.Column('uuid', postgresql.UUID(), nullable=False), sa.Column('metadata_sequence_number', sa.INTEGER(), nullable=False), sa.Column('sequence_number', sa.INTEGER(), nullable=False), sa.PrimaryKeyConstraint('uuid') ) op.create_index(op.f('ix_ecosystems_metadata_sequence_number'), 'ecosystems', ['metadata_sequence_number'], unique=True) op.create_table('pages', sa.Column('uuid', postgresql.UUID(), nullable=False), sa.Column('ecosystem_uuid', postgresql.UUID(), nullable=False), sa.Column('exercise_uuids', postgresql.ARRAY(postgresql.UUID()), nullable=False), sa.PrimaryKeyConstraint('uuid') ) op.create_index(op.f('ix_pages_ecosystem_uuid'), 'pages', ['ecosystem_uuid'], unique=False) op.create_table('responses', sa.Column('uuid', postgresql.UUID(), nullable=False), sa.Column('course_uuid', postgresql.UUID(), nullable=False), sa.Column('ecosystem_uuid', postgresql.UUID(), nullable=False), sa.Column('trial_uuid', postgresql.UUID(), nullable=False), sa.Column('student_uuid', postgresql.UUID(), nullable=False), sa.Column('exercise_uuid', postgresql.UUID(), nullable=False), sa.Column('is_correct', sa.BOOLEAN(), nullable=False), sa.Column('is_real_response', sa.BOOLEAN(), nullable=False), sa.Column('responded_at', postgresql.TIMESTAMP(), nullable=False), sa.PrimaryKeyConstraint('uuid') ) op.create_index(op.f('ix_responses_course_uuid'), 'responses', ['course_uuid'], unique=False) op.create_index(op.f('ix_responses_ecosystem_uuid'), 'responses', ['ecosystem_uuid'], unique=False) op.create_index(op.f('ix_responses_exercise_uuid'), 'responses', ['exercise_uuid'], unique=False) op.create_index(op.f('ix_responses_student_uuid'), 'responses', ['student_uuid'], unique=False) op.create_index(op.f('ix_responses_trial_uuid'), 'responses', ['trial_uuid'], unique=True)
def upgrade(): op.create_table( 'artifact_set_members', sa.Column('set_id', sa.VARCHAR(length=40), nullable=False), sa.Column('artifact_id', sa.VARCHAR(length=40), nullable=False), sa.PrimaryKeyConstraint('set_id', 'artifact_id'), ) op.create_table( 'artifact_sets', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('set_id', sa.VARCHAR(length=40), nullable=True), sa.Column('name', sa.VARCHAR(length=1000), nullable=True), sa.Column('created_at', pg.TIMESTAMP(), nullable=True), sa.PrimaryKeyConstraint('id'), ) op.create_table( 'runs', sa.Column('id', sa.VARCHAR(length=40), nullable=False), sa.Column('hostname', sa.VARCHAR(length=256), nullable=True), sa.Column('info', pg.JSONB(), nullable=True), sa.Column('created_at', pg.TIMESTAMP(), nullable=True), sa.PrimaryKeyConstraint('id'), ) op.create_table( 'artifacts', sa.Column('id', sa.VARCHAR(length=40), nullable=False), sa.Column('value_id', sa.VARCHAR(length=50), nullable=True), sa.Column('run_id', sa.VARCHAR(length=40), nullable=True), sa.Column('name', sa.VARCHAR(length=1000), nullable=True), sa.Column('version', sa.INTEGER(), nullable=True), sa.Column('fn_module', sa.VARCHAR(length=100), nullable=True), sa.Column('fn_name', sa.VARCHAR(length=100), nullable=True), sa.Column('composite', sa.BOOLEAN(), nullable=True), sa.Column('value_id_duration', sa.FLOAT(), nullable=True), sa.Column('compute_duration', sa.FLOAT(), nullable=True), sa.Column('hash_duration', sa.FLOAT(), nullable=True), sa.Column('computed_at', pg.TIMESTAMP(), nullable=True), sa.Column('added_at', pg.TIMESTAMP(), nullable=True), sa.Column('input_artifact_ids', pg.ARRAY(pg.VARCHAR(length=40)), nullable=True), sa.Column('inputs_json', pg.JSONB(), nullable=True), sa.Column('serializer', sa.VARCHAR(length=128), nullable=True), sa.Column('load_kwargs', pg.JSONB(), nullable=True), sa.Column('dump_kwargs', pg.JSONB(), nullable=True), sa.Column('custom_fields', pg.JSONB(), nullable=True), sa.ForeignKeyConstraint( ['run_id'], ['runs.id'], ), sa.PrimaryKeyConstraint('id'), )
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### with op.batch_alter_table('user', schema=None) as batch_op: batch_op.alter_column('networth', existing_type=sa.Numeric(scale=2), type_=sa.FLOAT(), existing_nullable=True) with op.batch_alter_table('account', schema=None) as batch_op: batch_op.alter_column('account_networth', existing_type=sa.Numeric(scale=2), type_=sa.FLOAT(), existing_nullable=True)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'jira-velocity', sa.Column('id', sa.Integer(), nullable=False), sa.Column('project_key', sa.VARCHAR(length=256), nullable=False), sa.Column('sprint_id', sa.INTEGER(), nullable=False), sa.Column('name', sa.VARCHAR(length=256), nullable=False), sa.Column('commitment', sa.FLOAT(), nullable=False), sa.Column('completed', sa.FLOAT(), nullable=False), sa.Column('start_at', sa.TIMESTAMP(), nullable=False), sa.Column('end_at', sa.TIMESTAMP(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name'), sa.UniqueConstraint('sprint_id'))
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('product', sa.Column('selling_price', sa.FLOAT(), nullable=True)) op.add_column('product', sa.Column('stock_price', sa.FLOAT(), nullable=True)) op.drop_column('product', 'stock_price_child') op.drop_column('product', 'stock_price_baby') op.drop_column('product', 'stock_price_adult') op.drop_column('product', 'selling_price_child') op.drop_column('product', 'selling_price_baby') op.drop_column('product', 'selling_price_adult')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'params', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('behavioral_risk_reduction', sa.FLOAT(), nullable=True), sa.Column('potential_isolation_effectiveness', sa.FLOAT(), nullable=True), sa.Column('run_id', sa.VARCHAR(), nullable=True), sa.ForeignKeyConstraint( ['run_id'], ['runs.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('steps', sa.Column('id', sa.VARCHAR(), nullable=False), sa.Column('time', sa.FLOAT(), nullable=True), sa.Column('susceptible', sa.FLOAT(), nullable=True), sa.Column('exposed', sa.FLOAT(), nullable=True), sa.Column('infected', sa.FLOAT(), nullable=True), sa.Column('recovered', sa.FLOAT(), nullable=True), sa.Column('deaths', sa.FLOAT(), nullable=True), sa.Column('results_id', sa.VARCHAR(), nullable=True), sa.ForeignKeyConstraint( ['results_id'], ['results.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('results', sa.Column('id', sa.VARCHAR(), nullable=False), sa.Column('json_data', sa.VARCHAR(), nullable=True), sa.Column('run_id', sa.VARCHAR(), nullable=True), sa.ForeignKeyConstraint( ['run_id'], ['runs.id'], ), sa.PrimaryKeyConstraint('id')) op.drop_index(op.f('ix_users_username'), table_name='users') op.drop_index(op.f('ix_users_email'), table_name='users') op.drop_table('users')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('transaction_history', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('user_id', sa.INTEGER(), nullable=True), sa.Column('symbol', sa.VARCHAR(length=20), nullable=True), sa.Column('shares', sa.INTEGER(), nullable=True), sa.Column('price', sa.FLOAT(), nullable=True), sa.Column('cost', sa.FLOAT(), nullable=True), sa.Column('date_time', sa.DATETIME(), nullable=True), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id') ) op.drop_table('transations')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column('transaction_history', 'shares', existing_type=sa.INTEGER(), nullable=False) op.alter_column('transaction_history', 'price', existing_type=sa.FLOAT(), nullable=False) op.alter_column('transaction_history', 'date_time', existing_type=sa.DATETIME(), nullable=False) op.alter_column('transaction_history', 'cost', existing_type=sa.FLOAT(), nullable=False)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('stock_transaction', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('date', sa.DATE(), nullable=True), sa.Column('symbol', sa.VARCHAR(length=60), nullable=True), sa.Column('quantity', sa.FLOAT(), nullable=True), sa.Column('price_per_share', sa.FLOAT(), nullable=True), sa.Column('transaction_fee', sa.FLOAT(), nullable=True), sa.Column('transaction_type', sa.VARCHAR(length=60), nullable=True), sa.Column('user_id', sa.INTEGER(), nullable=True), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id') )
def upgrade(): op.create_table( "bars_1_min", sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), sa.Column("ts", sa.Integer), sa.Column("exchange", sa.CHAR(8)), sa.Column("symbol", sa.CHAR(12)), sa.Column("open", sa.FLOAT(8)), sa.Column("high", sa.FLOAT(8)), sa.Column("low", sa.FLOAT(8)), sa.Column("close", sa.FLOAT(8)), sa.Column("volume", sa.FLOAT(8)), sa.Column("optional1", sa.FLOAT(8)) ) op.create_table( "perpfunding", sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), sa.Column("ts", sa.Integer), sa.Column("exchange", sa.CHAR(8), nullable=False), sa.Column("symbol", sa.CHAR(12), nullable=False), sa.Column("ts", sa.Integer, nullable=False), sa.Column("value", sa.FLOAT(8), nullable=False), sa.Column("optional1", sa.FLOAT(8)) ) pass
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### new_table = op.create_table('invoices_item', sa.Column('id', sa.Integer(), nullable=False), sa.Column('description', sa.VARCHAR(), nullable=True), sa.Column('count', sa.FLOAT(), nullable=True), sa.Column('cost', sa.FLOAT(), nullable=True), sa.Column('parent_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['parent_id'], ['invoices.id'], ), sa.PrimaryKeyConstraint('id'), mysql_engine='InnoDB') # Migrate existing INvoice Data info to the new Invoice Item table. # Request all of the old info. conn = op.get_bind() res = conn.execute("select id, invoice_data from Invoices") results = res.fetchall() # Prepare an old_info object to insert into the new farminfo table. old_info = [{ 'invoice_data': json.loads(r[1])['ITEMS'], 'invoice_id': r[0] } for r in results] new_data = list() for invoice in old_info: ID = invoice['invoice_id'] for item in invoice['invoice_data']: _storage = dict() content = invoice['invoice_data'][item] _storage['description'] = content['comment'] _storage['count'] = content['count'] _storage['cost'] = content['price'] _storage['parent_id'] = int(ID) new_data.append(_storage) # We need to get some Dataprovcessing on the go to change migrate the Data ... # Insert old_info into new farminfo table. op.bulk_insert(new_table, new_data)
def ToAlc(clazz) -> type: """ 需要从nn定义的table结构转换成alc的结构 """ ti = GetTableInfo(clazz) if not ti: return None fullnm = '_arc_' + clazz.__module__ + '_' + clazz.__name__ fullnm = fullnm.replace('.', '_') if fullnm in _alc_cache: return _alc_cache[fullnm] # 创建一个新类 defs = {'__tablename__': ti.table} # 添加字段定义 fps = GetFieldInfos(clazz) for k in fps: fp: FieldOption = fps[k] cols = [] kwcols = {} dc = None if fp.string: if fp.len: dc = alc.VARCHAR(fp.len) else: dc = alc.TEXT() elif fp.integer: dc = alc.INT() elif fp.double or fp.number: dc = alc.FLOAT() elif fp.boolean: dc = mysqltypes.TINYINT(1) elif fp.json or fp.array or fp.map: dc = alc.JSON() elif fp.intfloat: dc = alc.FLOAT() cols.append(dc) if fp.primary: kwcols['primary_key'] = True if fp.notnull: kwcols['nullable'] = False if fp.autoinc: kwcols['autoincrement'] = True defs[k] = alc.Column(*cols, **kwcols) clz = type(fullnm, (declarative_base(), ), defs) _alc_cache[fullnm] = clz return clz
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('warehouse', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('x', sa.FLOAT(), nullable=True), sa.Column('y', sa.FLOAT(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('edges', sa.Column('node_1', sa.INTEGER(), nullable=True), sa.Column('node_2', sa.INTEGER(), nullable=True), sa.ForeignKeyConstraint( ['node_1'], ['warehouse.id'], ), sa.ForeignKeyConstraint( ['node_2'], ['warehouse.id'], ))