def get_columns(self, connection, table_name, schema=None, **kw): # Extend types supported by PrestoDialect as defined in PyHive type_map = { 'bigint': sql_types.BigInteger, 'integer': sql_types.Integer, 'boolean': sql_types.Boolean, 'double': sql_types.Float, 'varchar': sql_types.String, 'timestamp': sql_types.TIMESTAMP, 'date': sql_types.DATE, 'array<bigint>': sql_types.ARRAY(sql_types.Integer), 'array<varchar>': sql_types.ARRAY(sql_types.String) } rows = self._get_table_columns(connection, table_name, schema) result = [] for row in rows: try: coltype = type_map[row.Type] except KeyError: logger.warn("Did not recognize type '%s' of column '%s'" % (row.Type, row.Column)) coltype = sql_types.NullType result.append({ 'name': row.Column, 'type': coltype, # newer Presto no longer includes this column 'nullable': getattr(row, 'Null', True), 'default': None, }) return result
def test_generic_array_type(self): eq_ignore_whitespace( autogenerate.render._repr_type( types.ARRAY(Integer), self.autogen_context ), "sa.ARRAY(sa.Integer())", ) eq_ignore_whitespace( autogenerate.render._repr_type( types.ARRAY(DateTime(timezone=True)), self.autogen_context ), "sa.ARRAY(sa.DateTime(timezone=True))", ) assert ( "from sqlalchemy.dialects import postgresql" not in self.autogen_context.imports ) eq_ignore_whitespace( autogenerate.render._repr_type( types.ARRAY(BYTEA, as_tuple=True, dimensions=2), self.autogen_context, ), "sa.ARRAY(postgresql.BYTEA(), as_tuple=True, dimensions=2)", ) assert ( "from sqlalchemy.dialects import postgresql" in self.autogen_context.imports )
class SQLTable(base): __tablename__ = "items" data_id = Column(types.INTEGER, primary_key=True) item_version = Column(types.VARCHAR) item_item = Column(types.INTEGER) item_name = Column(types.VARCHAR) item_description = Column(types.VARCHAR) item_colloq = Column(types.VARCHAR) item_plaintext = Column(types.VARCHAR) item_into = Column(types.ARRAY(types.INTEGER)) item_image = Column(types.JSON) item_gold = Column(types.JSON) item_tags = Column(types.ARRAY(types.VARCHAR)) item_maps = Column(types.JSON) item_stats = Column(types.JSON) item_from = Column(types.ARRAY(types.INTEGER)) item_depth = Column(types.INTEGER) item_effect = Column(types.JSON) item_hideFromAll = Column(types.BOOLEAN) item_stacks = Column(types.DECIMAL) item_consumed = Column(types.BOOLEAN) item_inStore = Column(types.BOOLEAN) item_consumeOnFull = Column(types.BOOLEAN) item_specialRecipe = Column(types.DECIMAL) item_requiredChampion = Column(types.VARCHAR) item_requiredAlly = Column(types.VARCHAR) item_group = Column(types.VARCHAR) item_altimages = Column(types.JSON)
class Tunnel(db.Model): # type: ignore id = db.Column(db.Integer, primary_key=True) port = db.Column(types.ARRAY(types.String())) allocated_tcp_ports = db.Column(types.ARRAY(types.Integer())) subdomain_id = db.Column(db.Integer, db.ForeignKey("subdomain.id")) ssh_port = db.Column(db.Integer) job_id = db.Column(db.String(64)) ip_address = db.Column(db.String(32)) subdomain = db.relationship("Subdomain", backref="tunnel", lazy="joined") user = association_proxy("subdomain", "user") def __repr__(self): return "<Tunnel {} {}>".format(self.subdomain, self.job_id)
def test_querying_table(metadata): """ Create an object for test table. """ # When using pytest-xdist, we don't want concurrent table creations # across test processes so we assign a unique name for table based on # the current worker id. worker_id = os.environ.get('PYTEST_XDIST_WORKER', 'master') return Table( 'test_querying_table_' + worker_id, metadata, Column('id', types.Integer, autoincrement=True, primary_key=True), Column('t_string', types.String(60), onupdate='updated'), Column('t_list', types.ARRAY(types.String(60))), Column('t_enum', types.Enum(MyEnum)), Column('t_int_enum', types.Enum(MyIntEnum)), Column('t_datetime', types.DateTime()), Column('t_date', types.DateTime()), Column('t_interval', types.Interval()), Column('uniq_uuid', PG_UUID, nullable=False, unique=True, default=uuid4), )
def get_columns(self, connection, table_name, schema=None, **kw): table = self._get_table(connection, table_name, schema) columns = self._get_columns_helper(table.schema, []) result = [] for col in columns: try: coltype = _type_map[col.field_type] except KeyError: util.warn("Did not recognize type '%s' of column '%s'" % (col.field_type, col.name)) coltype = types.NullType result.append({ "name": col.name, "type": types.ARRAY(coltype) if col.mode == "REPEATED" else coltype, "nullable": col.mode == "NULLABLE" or col.mode == "REPEATED", "comment": col.description, "default": None, }) return result
class ServiceLine(DBModel): __tablename__ = 'service_line' __table_args__ = {"schema": 'gentem'} updated_at = Column(types.DateTime, default=datetime.utcnow()) procedure_code = Column(types.Integer) charges = Column(types.Float) units = Column(types.Integer) modifiers = Column(types.ARRAY(types.String(255))) diagnosis_codes = Column(types.ARRAY(types.String(255))) claim_id = Column(types.String(36), ForeignKey('gentem.claim.id')) service_date_from = Column(types.Date) service_date_to = Column(types.Date) place_of_service = Column(types.String(255)) emergency_indicator = Column(types.Boolean) id_qualifier = Column(types.String(255)) rendering_provider_id = Column(types.String(255)) epsdt_family_plan = Column(types.String(255))
class Item(Base): __tablename__ = 'item' id = Column(types.Integer, primary_key=True) str = Column(types.String) date = Column(types.Date) time = Column(types.Time) datetime = Column(types.DateTime) data = Column(types.LargeBinary) boolean = Column(types.Boolean) array = Column(types.ARRAY(types.Interval))
def write_asic_details(driver, engine, linked_id, company_name): try: df = extract_asic_details(driver, linked_id, company_name) inspector = inspect(engine) types = {'linked_id': st.Integer(), 'company_name': st.Text()} table_exists = 'asx' in inspector.get_table_names(schema="asic") current_cols_sql = """SELECT column_name FROM information_schema.columns WHERE table_schema = 'asic' AND table_name = 'asx' """ current_cols = pd.read_sql(current_cols_sql, engine)['column_name'].tolist() for col in df.columns: if(re.search('(^date_|_date$|_date_)', col)): types[col] = st.Date() if(col not in current_cols and table_exists): new_col_sql = "ALTER TABLE asic.asx ADD COLUMN " + col + " DATE" engine.execute(new_col_sql) elif(col == 'former_names'): types[col] = st.ARRAY(st.Text(), dimensions = 1) if(col not in current_cols and table_exists): new_col_sql = "ALTER TABLE asic.asx ADD COLUMN " + col + " TEXT[]" engine.execute(new_col_sql) else: types[col] = st.Text() if(col not in current_cols and table_exists): new_col_sql = "ALTER TABLE asic.asx ADD COLUMN " + col + " TEXT" engine.execute(new_col_sql) df.to_sql('asx', engine, schema="asic", if_exists="append", index=False, dtype = types) return(True) except: return(False)
def make_table(df): engine = create_engine( 'postgresql://*****:*****@localhost:5432/test') table = df.to_sql('data', engine, if_exists='replace', index=False, dtype={ 'Country': types.String(), 'Confirmed': types.ARRAY(types.Integer), 'StartDate': types.String(), 'EndDate': types.String(), 'CurrConfirmed': types.Integer(), 'Death': types.ARRAY(types.Integer), 'CurrDeath': types.Integer(), 'Recovery': types.ARRAY(types.Integer), 'CurrEecovery': types.Integer(), 'Active': types.ARRAY(types.Integer), 'CurrActive': types.Integer(), 'Rank': types.Integer() }) with connection.cursor() as cursor: cursor.execute('ALTER TABLE data ADD PRIMARY KEY ("Country");')
def stdtype_to_sqltype(stdtype): import sqlalchemy.types as sqltypes if isinstance(stdtype, stdtypes.StringType): return sqltypes.VARCHAR(length=stdtype.max_len) if 0 < stdtype.max_len < 65536 else sqltypes.TEXT() if isinstance(stdtype, stdtypes.BoolType): return sqltypes.BOOLEAN() if isinstance(stdtype, stdtypes.DateType): return sqltypes.DATE() if stdtype.only_date else sqltypes.TIMESTAMP() if isinstance(stdtype, stdtypes.IntegerType): return sqltypes.BIGINT() if stdtype.length > 11 else sqltypes.INTEGER() if isinstance(stdtype, stdtypes.DecimalType): return sqltypes.DECIMAL() if isinstance(stdtype, stdtypes.ArrayType): return sqltypes.ARRAY(item_type=stdtype.item_type)
class FaceEncoding(db.Model): id = db.Column(db.Integer, primary_key=True) player_id = db.Column('player_id', db.String(16), db.ForeignKey('player.slack_id')) player = db.relationship('Player', back_populates='face_encodings', lazy='select') encoding = db.Column(types.ARRAY(db.Float)) def serialize(self): return { "id": self.id, "player_name": self.player.username, "face_encoding": self.encoding }
def get_columns(self, connection, table_name, schema=None, **kw): table = self._get_table(connection, table_name, schema) columns = table.schema result = [] for col in columns: try: coltype = _type_map[col.field_type] except KeyError: util.warn("Did not recognize type '%s' of column '%s'" % (col.field_type, col.name)) result.append({ 'name': col.name, 'type': types.ARRAY(coltype) if col.mode == 'REPEATED' else coltype, 'nullable': col.mode == 'NULLABLE' or col.mode == 'REPEATED', 'default': None, }) return result
class BizCircle(AlchemyMixin, Base): __tablename__ = 'biz_circles' id = Column(types.Integer, primary_key=True) # 一个商圈可能靠近多个行政区, 如: 西城区、东城区下都出现了安定门 city_id = Column(types.Integer, ForeignKey(City.id), nullable=False) district_id = Column(types.ARRAY(types.Integer, dimensions=1), nullable=False) name = Column(types.String(32), nullable=False) quan_pin = Column(types.String(100), nullable=False) communities_count = Column(types.Integer, nullable=False, default=0) updated_at = Column(types.DateTime, nullable=False, default=datetime.now) communities_updated_at = Column(types.DateTime) def __init__(self, city_id, district_id, info): self.id = int(info['bizcircle_id']) self.city_id = city_id self.district_id = [district_id] self.name = info['bizcircle_name'] self.quan_pin = info['bizcircle_quanpin']
def get_columns(self, connection, table_name, schema=None, **kw): table = self._get_table(connection, table_name, schema) columns = self._get_columns_helper(table.schema, []) result = [] for col in columns: try: coltype = _type_map[col.field_type] except KeyError: util.warn("Did not recognize type '%s' of column '%s'" % (col.field_type, col.name)) coltype = types.NullType if col.field_type.endswith("NUMERIC"): coltype = coltype(precision=col.precision, scale=col.scale) elif col.field_type == "STRING" or col.field_type == "BYTES": coltype = coltype(col.max_length) result.append({ "name": col.name, "type": types.ARRAY(coltype) if col.mode == "REPEATED" else coltype, "nullable": col.mode == "NULLABLE" or col.mode == "REPEATED", "comment": col.description, "default": None, "precision": col.precision, "scale": col.scale, "max_length": col.max_length, }) return result
}, { "name": "nested_record.record.name", "type": types.String(), "nullable": True, "default": None, }, { "name": "nested_record.record.age", "type": types.Integer(), "nullable": True, "default": None, }, { "name": "array", "type": types.ARRAY(types.Integer()), "nullable": True, "default": None, }, ] @pytest.fixture(scope="session") def engine(): engine = create_engine("bigquery://", echo=True) return engine @pytest.fixture(scope="session") def dialect(): return BigQueryDialect()
class Broup(db.Model): """ A Bro group """ __tablename__ = 'Broup' id = db.Column(db.Integer, primary_key=True) broup_id = db.Column(db.Integer, unique=False) bro_id = db.Column(db.Integer, db.ForeignKey('Bro.id')) bro_ids = db.Column(types.ARRAY(db.Integer)) bro_admin_ids = db.Column(types.ARRAY(db.Integer)) broup_name = db.Column(db.String) alias = db.Column(db.String) broup_description = db.Column(db.String) broup_colour = db.Column(db.String) room_name = db.Column(db.String) last_message_read_time_bro = db.Column(db.DateTime, index=True, default=datetime.utcnow) last_time_activity = db.Column(db.DateTime, index=True, default=datetime.utcnow) unread_messages = db.Column(db.Integer) mute = db.Column(db.Boolean, default=False) mute_timestamp = db.Column(db.DateTime) removed = db.Column(db.Boolean, default=False) is_left = db.Column(db.Boolean, default=False) def update_last_message_read_time_bro(self): self.last_message_read_time_bro = datetime.utcnow() def get_last_message_read_time_bro(self): return self.last_message_read_time_bro def update_unread_messages(self): self.unread_messages += 1 def read_messages(self): self.unread_messages = 0 def get_admins(self): return self.bro_admin_ids def get_alias(self): return self.alias def set_admins(self, bro_admin_ids): self.bro_admin_ids = bro_admin_ids def get_bro_id(self): return self.bro_id def get_broup_name(self): return self.broup_name def set_broup_name(self, broup_name): self.broup_name = broup_name def get_broup_colour(self): return self.broup_colour def add_participant(self, bro_id): if self.bro_ids is None: self.bro_ids = [] old_bros = self.bro_ids new_bros = [] for old in old_bros: new_bros.append(old) new_bros.append(bro_id) self.bro_ids = new_bros def get_participants(self): return self.bro_ids def set_participants(self, bro_ids): self.bro_ids = bro_ids def set_broup_name(self, broup_name): self.broup_name = broup_name def add_admin(self, bro_id): if self.bro_admin_ids is None: self.bro_admin_ids = [] old_admins = self.bro_admin_ids new_admins = [] for old in old_admins: new_admins.append(old) new_admins.append(bro_id) self.bro_admin_ids = new_admins def dismiss_admin(self, bro_id): if self.bro_admin_ids is None: self.bro_admin_ids = [] old_admins = self.bro_admin_ids new_admins = [] for old in old_admins: if old != bro_id: new_admins.append(old) self.bro_admin_ids = new_admins def remove_bro(self, bro_id): if bro_id in self.bro_admin_ids: self.dismiss_admin(bro_id) if self.bro_ids is None: self.bro_ids = [] old_bros = self.bro_ids new_bros = [] for old in old_bros: if old != bro_id: new_bros.append(old) self.bro_ids = new_bros def update_last_activity(self): self.last_time_activity = datetime.utcnow() def update_description(self, description): self.broup_description = description def get_broup_description(self): return self.broup_description def update_alias(self, alias): self.alias = alias def update_colour(self, colour): self.broup_colour = colour def mute_broup(self, mute): self.mute = mute def is_muted(self): return self.mute def check_mute(self): if self.mute_timestamp is not None and self.mute_timestamp < datetime.now( ).utcnow(): self.set_mute_timestamp(None) self.mute_broup(False) return True return False def broup_removed(self): self.removed = True def is_removed(self): return self.removed def leave_broup(self): self.is_left = True self.unread_messages = 0 def has_left(self): return self.is_left def rejoin(self): self.is_left = False self.removed = False def get_mute_timestamp(self): return self.mute_timestamp def set_mute_timestamp(self, mute_timestamp): self.mute_timestamp = mute_timestamp @property def serialize(self): return { 'id': self.broup_id, 'bro_id': self.bro_id, 'bro_ids': self.bro_ids, 'bro_admin_ids': self.bro_admin_ids, 'broup_name': self.broup_name, 'alias': self.alias, 'broup_description': self.broup_description, 'broup_colour': self.broup_colour, 'unread_messages': self.unread_messages, 'last_time_activity': self.last_time_activity.strftime('%Y-%m-%dT%H:%M:%S.%f'), 'room_name': self.room_name, 'left': self.is_left, 'mute': self.mute }
name_default = 'default' t_list_default = ['foo', 'bar'] t_enum_default = MyEnum.ITEM_2 t_int_enum_default = MyIntEnum.ITEM_1 t_datetime_default = datetime(2017, 1, 1) t_date_default = date(2017, 1, 1) t_date_2_default = lambda: date(2017, 2, 1) t_interval_default = timedelta(seconds=60) t_boolean_default = True users = Table( 'users', metadata, Column('id', PG_UUID, unique=True, default=uuid4), Column('serial', types.Integer, Sequence("serial_seq")), Column('name', types.String(60), nullable=False, default=name_default), Column('t_list', types.ARRAY(types.String(60)), nullable=False, default=t_list_default), Column('t_enum', types.Enum(MyEnum), nullable=False, default=t_enum_default), Column('t_int_enum', types.Enum(MyIntEnum), nullable=False, default=t_int_enum_default), Column('t_datetime', types.DateTime(), nullable=False, default=t_datetime_default), Column('t_date', types.DateTime(), nullable=False, default=t_date_default),
types = {'linked_id': st.Integer(), 'company_name': st.Text(), 'company_name_asic': st.Text(), 'acn': st.Text(), 'abn': st.Text(), 'previous_state_number': st.Text(), 'previous_state_of_registration': st.Text(), 'registration_date': st.Date(), 'next_review_date': st.Date(), 'status': st.Text(), 'type': st.Text(), 'locality_of_registered_office': st.Text(), 'regulator': st.Text(), 'former_names': st.ARRAY(st.Text(), dimensions = 1), 'date_deregistered': st.Date(), 'arbn': st.Text() } asic_information.to_sql('asx', engine, schema="asic", if_exists="replace", index=False, dtype = types)
}, { 'name': 'nested_record.record.name', 'type': types.String(), 'nullable': True, 'default': None }, { 'name': 'nested_record.record.age', 'type': types.Integer(), 'nullable': True, 'default': None }, { 'name': 'array', 'type': types.ARRAY(types.Integer()), 'nullable': True, 'default': None }, ] @pytest.fixture(scope='session') def engine(): engine = create_engine('bigquery://', echo=True) return engine @pytest.fixture(scope='session') def engine_using_test_dataset(): engine = create_engine('bigquery:///test_pybigquery', echo=True)
{'name': 'string', 'type': types.String(), 'nullable': True, 'default': None}, {'name': 'float', 'type': types.Float(), 'nullable': True, 'default': None}, {'name': 'numeric', 'type': types.DECIMAL(), 'nullable': True, 'default': None}, {'name': 'boolean', 'type': types.Boolean(), 'nullable': True, 'default': None}, {'name': 'date', 'type': types.DATE(), 'nullable': True, 'default': None}, {'name': 'datetime', 'type': types.DATETIME(), 'nullable': True, 'default': None}, {'name': 'time', 'type': types.TIME(), 'nullable': True, 'default': None}, {'name': 'bytes', 'type': types.BINARY(), 'nullable': True, 'default': None}, {'name': 'record', 'type': types.JSON(), 'nullable': True, 'default': None}, {'name': 'record.name', 'type': types.String(), 'nullable': True, 'default': None}, {'name': 'record.age', 'type': types.Integer(), 'nullable': True, 'default': None}, {'name': 'nested_record', 'type': types.JSON(), 'nullable': True, 'default': None}, {'name': 'nested_record.record', 'type': types.JSON(), 'nullable': True, 'default': None}, {'name': 'nested_record.record.name', 'type': types.String(), 'nullable': True, 'default': None}, {'name': 'nested_record.record.age', 'type': types.Integer(), 'nullable': True, 'default': None}, {'name': 'array', 'type': types.ARRAY(types.Integer()), 'nullable': True, 'default': None}, ] @pytest.fixture(scope='session') def engine(): engine = create_engine('bigquery://', echo=True) return engine @pytest.fixture(scope='session') def engine_using_test_dataset(): engine = create_engine('bigquery:///test_pybigquery', echo=True) return engine
def test_should_array_convert(): field = get_field(types.ARRAY(types.Integer)) assert isinstance(field.type, graphene.List) assert field.type.of_type == graphene.Int
class BroBros(db.Model): """ A connection between one bro and another bro. Here we store which bros are connected and also the last time both bros read the chat """ __tablename__ = 'BroBros' id = db.Column(db.Integer, primary_key=True) bro_id = db.Column(db.Integer, db.ForeignKey('Bro.id')) bros_bro_id = db.Column(db.Integer, db.ForeignKey('Bro.id')) chat_name = db.Column(db.String) alias = db.Column(db.String) chat_description = db.Column(db.String) chat_colour = db.Column(db.String) room_name = db.Column(db.String) last_message_read_time_bro = db.Column(db.DateTime, index=True, default=datetime.utcnow) last_time_activity = db.Column(db.DateTime, index=True, default=datetime.utcnow) unread_messages = db.Column(db.Integer) blocked = db.Column(db.Boolean, default=False) mute = db.Column(db.Boolean, default=False) removed = db.Column(db.Boolean, default=False) mute_timestamp = db.Column(db.DateTime) blocked_timestamps = db.Column(types.ARRAY(db.DateTime)) def update_unread_messages(self): self.unread_messages += 1 def read_messages(self): self.unread_messages = 0 def update_last_activity(self): self.last_time_activity = datetime.utcnow() def update_last_message_read_time_bro(self): self.last_message_read_time_bro = datetime.utcnow() def get_last_message_read_time_bro(self): return self.last_message_read_time_bro def update_description(self, description): self.chat_description = description def update_alias(self, alias): self.alias = alias def update_colour(self, colour): self.chat_colour = colour def block_chat(self, blocked): self.blocked = blocked def is_blocked(self): return self.blocked def mute_chat(self, mute): self.mute = mute def is_muted(self): return self.mute def check_mute(self): if self.mute_timestamp is not None and self.mute_timestamp < datetime.now( ).utcnow(): self.set_mute_timestamp(None) self.mute_chat(False) return True return False def bro_removed(self): self.removed = True def is_removed(self): return self.removed def re_join(self): self.removed = False def add_blocked_timestamp(self): if self.blocked_timestamps is None: self.blocked_timestamps = [] blocks = [] for blocked_time in self.blocked_timestamps: blocks.append(blocked_time) blocks.append(datetime.utcnow()) self.blocked_timestamps = blocks def get_blocked_timestamps(self): return self.blocked_timestamps def has_been_blocked(self): return self.blocked_timestamps is not None and len( self.blocked_timestamps) >= 1 def get_mute_timestamp(self): return self.mute_timestamp def set_mute_timestamp(self, mute_timestamp): self.mute_timestamp = mute_timestamp def get_bros_bro_name_or_alias(self): if self.alias: return self.alias else: return self.chat_name @property def serialize(self): return { 'id': self.id, 'bro_id': self.bro_id, 'bros_bro_id': self.bros_bro_id, 'chat_name': self.chat_name, 'alias': self.alias, 'chat_description': self.chat_description, 'chat_colour': self.chat_colour, 'unread_messages': self.unread_messages, 'last_time_activity': self.last_time_activity.strftime('%Y-%m-%dT%H:%M:%S.%f'), 'room_name': self.room_name, 'blocked': self.blocked, 'mute': self.mute }