class Update(Base): __tablename__ = "StoreUpdates" id = Column(Integer(), primary_key=True) storeId = Column(Integer(), ForeignKey(u'StoreInfos.id')) ip = Column(String(15)) userId = Column(String(50)) availabilityInfo = Column(Text()) safetyInfo = Column(Text()) openingTime = Column(DateTime) closingTime = Column(DateTime) createdAt = Column(DateTime(timezone=True)) updatedAt = Column(DateTime(timezone=True)) flag = Column(String(80), default=None) deleted = Column(Boolean(), default=False) reviewed = Column(Boolean(), default=False) Store = relationship('Store', foreign_keys=[storeId], lazy='joined') def to_dict(self): return { "id": self.id, "name": self.Store.name, "latitude": self.Store.latitude, "longitude": self.Store.longitude, "address": self.Store.address, "ip": self.ip, "availabilityInfo": self.availabilityInfo, "safetyInfo": self.safetyInfo, "openingTime": self.openingTime, "closingTime": self.closingTime, "flag": self.flag, "deleted": self.deleted }
class Tweet(Base): __tablename__ = 'tweet' id = Column(BigInteger(), primary_key=True) created_at = Column(DateTime(), nullable=False, index=True) text = Column(Text(), nullable=False) source = Column(Text(), nullable=False) lang = Column(Text()) user_id = Column(BigInteger(), nullable=False, index=True) user_description = Column(Text()) user_verified = Column(Boolean) user_followers_count = Column(BigInteger()) user_friends_count = Column(BigInteger()) user_listed_count = Column(BigInteger()) user_statuses_count = Column(BigInteger()) user_favorites_count = Column(BigInteger()) user_created_at = Column(DateTime(), nullable=False) in_reply_to_tweet_id = Column(BigInteger(), index=True) in_reply_to_user_id = Column(BigInteger(), index=True) quoted_tweet_id = Column(BigInteger(), index=True) rt_tweet_id = Column(BigInteger(), index=True) updated_at = Column(DateTime(), nullable=False, index=True) quote_count = Column(BigInteger()) reply_count = Column(BigInteger()) retweet_count = Column(BigInteger()) favorite_count = Column(BigInteger())
def get_string_type(col_type, params): """Create a string type column. Args: col_type (string): Type of the column. params (object): Additional parameters. Returns: sqlalchemy.types.TypeEngine: String type like char or text """ if col_type == 'char': return CHAR(params.get('length')) elif col_type == 'json': return ( JSON(none_as_null=True) .with_variant(JSONB(none_as_null=True), 'postgresql') .with_variant(Text(), 'sqlite') ) elif col_type == 'long_text': return LONGTEXT().with_variant(Text(), 'sqlite') elif col_type == 'medium_text': return MEDIUMTEXT().with_variant(Text(), 'sqlite') elif col_type == 'string': return String(length=params.get('length')) elif col_type == 'text': return Text()
class HuntQR(Base): __tablename__ = "hunt_qr" id = Column(Integer, primary_key=True) hunt_id = Column(Integer, ForeignKey('hunt.id')) qr_id = Column(Integer, ForeignKey('qr.id')) image = Column(Text()) name = Column(Text()) description = Column(Text()) secret = Column(Text()) location = Column(Text()) order = Column(Integer) def __init__(self, hunt_id, qr_id, name, image='', description='', secret='', location='', order=0): self.hunt_id = hunt_id self.qr_id = qr_id self.name = name self.image = image self.description = description self.secret = secret self.location = location self.order = order def __repr__(self): return "<HuntQR('%s: %d, %d')" % (self.name, self.hunt_id, self.qr_id)
class CataWikiAuction(BaseAuction, metaclass=BaseAuctionRelationshipMeta, profile_table='CataWikiProfile', profile_table_name='catawiki_profiles'): """ The database model for an auction on catawiki.com """ __tablename__ = 'catawiki_auctions' subtitle = Column(Text()) lot_details = Column(Text()) expert_estimate_max = Column(Integer) expert_estimate_min = Column(Integer) reserve_price_met = Column(Boolean) closed = Column(Boolean) sold = Column(Boolean) themed = Column(Boolean) auction_type_family_id = Column(Integer) auction_type_family_name = Column(String(64)) auction_id = Column(Integer) auction_name = Column(String(64)) category_L2_id = Column(Integer) category_L2_name = Column(String(32)) category_L1_id = Column(Integer) category_L1_name = Column(String(32)) category_L0_id = Column(Integer) category_L0_name = Column(String(32)) likes = Column(Integer)
class BaseProfile(TimestampBase): __abstract__ = True __tablename__ = 'base_profile' id = Column(Text(), primary_key=True) name = Column(Text()) description = Column(Text()) uri = Column(Text())
def test_divide_columns_into_type_of_filters(): column_types_dict = { "col_1": Integer(), "col_2": Text(), "col_3": Float(), "col_4": DateTime(), "col_5": ARRAY("string"), "col_6": Boolean(), "col_7": Integer(), "col_8": Float(), "col_9": DateTime(), "col_10": Text(), "col_11": Boolean(), "col_12": ARRAY("string"), } unique_entries = { "col_1": [1 for i in range(MAX_ENTRIES_FOR_FILTER_SELECTOR + 1)], "col_2": ["Dream" for i in range(MAX_ENTRIES_FOR_FILTER_SELECTOR - 1)], "col_3": [2.5 for i in range(MAX_ENTRIES_FOR_FILTER_SELECTOR + 1)], "col_4": ["11/10/2013" for i in range(MAX_ENTRIES_FOR_FILTER_SELECTOR + 1)], "col_5": [[1, 4, 5] for i in range(MAX_ENTRIES_FOR_FILTER_SELECTOR - 1)], "col_6": [True for i in range(MAX_ENTRIES_FOR_FILTER_SELECTOR - 1)], "col_7": [1 for i in range(MAX_ENTRIES_FOR_FILTER_SELECTOR - 1)], "col_8": [3.5 for i in range(MAX_ENTRIES_FOR_FILTER_SELECTOR - 1)], "col_9": ["11/10/2014" for i in range(MAX_ENTRIES_FOR_FILTER_SELECTOR - 1)], "col_10": ["Dream" for i in range(MAX_ENTRIES_FOR_FILTER_SELECTOR + 1)], "col_11": [True for i in range(MAX_ENTRIES_FOR_FILTER_SELECTOR + 1)], "col_12": [[1, 4, 5] for i in range(MAX_ENTRIES_FOR_FILTER_SELECTOR + 1)], } ( filter_column_names, numerical_filter_column_names, unique_entries_dict, ) = divide_columns_into_type_of_filters(unique_entries, column_types_dict) assert set(numerical_filter_column_names) == { "col_1", "col_3", "col_4", "col_7", "col_8", "col_9", } filter_column_names_set = { "col_2", "col_5", "col_6", "col_7", "col_8", "col_9" } assert set(filter_column_names) == filter_column_names_set assert set(unique_entries_dict.keys()) == filter_column_names_set assert unique_entries_dict["col_2"] == unique_entries["col_2"] assert unique_entries_dict["col_5"] == unique_entries["col_5"] assert unique_entries_dict["col_6"] == unique_entries["col_6"] assert unique_entries_dict["col_7"] == unique_entries["col_7"] assert unique_entries_dict["col_8"] == unique_entries["col_8"] assert unique_entries_dict["col_9"] == unique_entries["col_9"]
class LiveAuctioneersAuction(BaseAuction, metaclass=BaseAuctionRelationshipMeta, \ profile_table='LiveAuctioneersProfile', \ profile_table_name='liveauctioneers_profiles'): __tablename__ = 'liveauctioneers_auctions' location = Column(Text()) lot_number = Column(Integer) condition = Column(Text()) high_bid_estimate = Column(String(16)) low_bid_estimate = Column(String(16))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('results', sa.Column('id', sa.Integer(), nullable=False), sa.Column('url', sa.String(), nullable=True), sa.Column('result_all', postgresql.JSON(astext_type=Text()), nullable=True), sa.Column('result_no_stop_words', postgresql.JSON(astext_type=Text()), nullable=True), sa.PrimaryKeyConstraint('id') )
class AptRepo(Base): __tablename__ = 'aptrepo' id = Column(Integer, primary_key=True) name = Column(String(length=32), unique=True, nullable=False) gpgkey = Column(Text(), nullable=True) gpgkeyprint = Column(Text(), nullable=True) gpgpubkey = Column(Text(), nullable=True) dists = relationship("AptDist")
class EbayAuction(BaseAuction, metaclass=BaseAuctionRelationshipMeta, \ profile_table='EbayProfile', profile_table_name='ebay_profiles'): __tablename__ = 'ebay_auctions' buy_now_price = Column(String(16)) location = Column(Text()) locale = Column(Text()) quantity = Column(Integer()) video_url = Column(Text()) vat_included = Column(Boolean) domain = Column(Text())
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column( 'organization', sa.Column('social_profiles', postgresql.JSONB(astext_type=Text()), nullable=True)) op.add_column( 'organization', sa.Column('tags', postgresql.JSONB(astext_type=Text()), nullable=True))
def __new__(cls, clsname, bases, namespace, profile_table=None, profile_table_name=None): namespace['seller_id'] = Column(Text(), ForeignKey(profile_table_name + '.id')) namespace['winner_id'] = Column(Text(), ForeignKey(profile_table_name + '.id')) namespace['seller'] = relationship(profile_table, backref='auctions_sold', foreign_keys=clsname + '.seller_id') namespace['winner'] = relationship(profile_table, backref='auctions_won', foreign_keys=clsname + '.winner_id') return super(BaseAuctionRelationshipMeta, cls). \ __new__(cls, clsname, bases, namespace)
def __init__(self, length=None, collation=None, **kwargs): """ Construct a TEXT. :param collation: Optional, a column-level collation for this string value. """ Text.__init__(self, length, kwargs.get('convert_unicode', False), kwargs.get('assert_unicode', None)) _CollationMixin.__init__(self, collation, **kwargs)
def write_info(self, table_name, info): # sqlalchemy.typesで定義されたデータ型を辞書形式で設定 dtype = { 'Code': Integer(), 'StockName': Text(), 'Date': Date(), 'Open': Text() } info.to_sql(table_name, self.engine, if_exists='replace', dtype=dtype)
class WinXinAcount(Base): __tablename__ = 'wx_account_manage' id = Column(Integer(), nullable=False, primary_key=True, autoincrement=True) data_status = Column(Boolean(), index=True, nullable=False, default=True) use_type = Column( Text(), nullable=False, ) name = Column( Text(), nullable=False, ) type = Column( Text(), nullable=False, ) mail = Column( String(64), nullable=False, ) secret = Column( String(64), nullable=False, ) use_name = Column( String(64), nullable=False, ) phone = Column( String(64), nullable=False, ) data_create_time = Column(DateTime(timezone=True), nullable=False, index=True, server_default=func.current_timestamp()) data_update_time = Column(DateTime(timezone=True), nullable=False, index=True, server_default=func.current_timestamp()) @property def get_accunt(self): res_acc = DBsession.query(WinXinAcount).filter( WinXinAcount.data_status == True).order_by(WinXinAcount.id).all() for res in res_acc: res.data_create_time = res.data_create_time.strftime( "%Y-%m-%d %H:%M:%S") res.data_update_time = res.data_update_time.strftime( "%Y-%m-%d %H:%M:%S") return res_acc
def __init__(self, mysql_as_long=False, mysql_as_medium=False): super(JsonEncodedType, self).__init__() if mysql_as_long and mysql_as_medium: raise TypeError("mysql_as_long and mysql_as_medium are mutually " "exclusive") if mysql_as_long: self.impl = Text().with_variant(mysql.LONGTEXT(), 'mysql') elif mysql_as_medium: self.impl = Text().with_variant(mysql.MEDIUMTEXT(), 'mysql')
def load_dialect_impl(self, dialect): """自定义Column实现 - sqlite使用Text类型 - postgresql和mysql 5.7.8+使用json类型 """ if dialect.name == 'sqlite': return dialect.type_descriptor(Text()) elif dialect.name in ('postgresql', 'mysql'): return dialect.type_descriptor(JSON()) else: return dialect.type_descriptor(Text())
def load_grex_from_dwh(table_name): db_engine = get_engine() dwh_ora_engine = get_ora_engine("oracle_dwh_stadsdelen") with dwh_ora_engine.connect() as connection: df = pd.read_sql( """ SELECT PLANNR as ID , PLANNAAM , STARTDATUM , PLANSTATUS , OPPERVLAKTE , GEOMETRIE_WKT AS GEOMETRY FROM DMDATA.GREX_GV_PLANNEN_V2 """, connection, index_col="id", coerce_float=True, params=None, parse_dates=["startdatum"], columns=None, chunksize=None, ) df["geometry"] = df["geometry"].apply(wkt_loads_wrapped) grex_rapportage_dtype = { "id": Integer(), "plannaam": Text(), "startdatum": Date(), "planstatus": Text(), "oppervlakte": Float(), "geometry": Geometry(geometry_type="GEOMETRY", srid=4326), } df.to_sql(table_name, db_engine, if_exists="replace", dtype=grex_rapportage_dtype) with db_engine.connect() as connection: connection.execute( f"ALTER TABLE {table_name} ADD PRIMARY KEY (id)") connection.execute(f""" UPDATE {table_name} SET geometry = ST_CollectionExtract(ST_Makevalid(geometry), 3) WHERE ST_IsValid(geometry) = False OR ST_GeometryType(geometry) != 'ST_MultiPolygon'; COMMIT; """) connection.execute(f""" ALTER TABLE {table_name} ALTER COLUMN geometry TYPE geometry(MultiPolygon,28992) USING ST_Transform(geometry,28992); """) connection.execute( f"DELETE FROM {table_name} WHERE geometry is NULL")
class Story(Base): __tablename__ = 'stories' title = db.Column(Text(), nullable=False) author = db.Column(Text()) display_image = db.Column(UploadedFileField( \ upload_type=UploadedImageWithThumb)) description = db.Column(Text(), nullable=False) text = db.Column(Text(), nullable=False) latitude = db.Column(db.Numeric(10, 7)) longitude = db.Column(db.Numeric(10, 7)) def __repr__(self): return f'<Story "{self.title}" {self.id}>'
class Hunt(Base): __tablename__ = "hunt" id = Column(Integer, primary_key=True) name = Column(Text()) description = Column(Text()) def __init__(self, name, description): self.name = name self.description = description def __repr__(self): return "<Hunt('%s')" % self.name
class MT940Error(IntegerIdModel): mt940 = Column(Text(), nullable=False) exception = Column(Text(), nullable=False) author = relationship("User") author_id = Column(Integer, ForeignKey("user.id"), nullable=False) imported_at = Column(DateTimeTz, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()) bank_account = relationship(BankAccount, backref=backref("mt940_errors")) bank_account_id = Column(Integer, ForeignKey(BankAccount.id), nullable=False)
class CataWikiAuction(BaseAuction, metaclass=BaseAuctionRelationshipMeta, \ profile_table='CataWikiProfile', \ profile_table_name='catawiki_profiles'): """ The database model for an auction on catawiki.com """ __tablename__ = 'catawiki_auctions' subtitle = Column(Text()) lot_details = Column(Text()) expert_estimate_max = Column(Integer) expert_estimate_min = Column(Integer) reserve_price_met = Column(Boolean) closed = Column(Boolean) sold = Column(Boolean)
def transform_column_type(self, column_type, dialect): ctype = column_type.compile().lower() if ctype == "tinyint(1)": return Boolean().compile(dialect=dialect) elif ctype == "datetime": return DateTime().compile(dialect=dialect) elif ctype == "longtext": return Text().compile(dialect=dialect) elif ctype == "mediumtext": return Text().compile(dialect=dialect) elif "varchar" in ctype: return Text().compile(dialect=dialect) else: return super().transform_column_type(column_type, dialect)
class HuntQR(Base): __tablename__ = "hunt_qr" id = Column(Integer, primary_key=True) username = Column(Text()) password = Column(Text()) email = Column(Text()) def __init__(self, username, password, email): self.username = username self.password = password self.email = email def __repr__(self): return "<HuntQR('%s: %d, %d')" % (self.name, self.hunt_id, self.qr_id)
class User(db.Model, UserMixin): id = Column(Integer, primary_key=True, unique=True, nullable=False) username = Column(String(64), index=True, unique=True, nullable=False) password_hash = Column(String(128), nullable=False) #TODO: Remove teamname column, hotfix teamname = Column(Text()) teams = Column(Text(), nullable=True) def __repr__(self): output = "User: {}\n".format(self.username) teamlist = list(self.teams.split(',')) for team in teamlist: output = output + "> {}\n".format(team) return output
class DbLog(Base): __tablename__ = "db_dblog" id = Column(Integer, primary_key=True) time = Column(DateTime(timezone=True), default=timezone.now) loggername = Column(String(255), index=True) levelname = Column(String(255), index=True) objname = Column(String(255), index=True) objpk = Column(Integer, index=True, nullable=True) message = Column(Text(), nullable=True) _metadata = Column('metadata', JSONB) def __init__(self, loggername="", levelname="", objname="", objpk=None, message=None, metadata=None): if not loggername or not levelname: raise ValidationError( "The loggername and levelname can't be empty") self.loggername = loggername self.levelname = levelname self.objname = objname self.objpk = objpk self.message = message self._metadata = metadata or {} def __str__(self): return "DbComment for [{} {}] on {}".format( self.dbnode.get_simple_name(), self.dbnode.id, timezone.localtime(self.ctime).strftime("%Y-%m-%d")) @classmethod def add_from_logrecord(cls, record): """ Add a new entry from a LogRecord (from the standard python logging facility). No exceptions are managed here. """ objpk = record.__dict__.get('objpk', None) objname = record.__dict__.get('objname', None) # Filter: Do not store in DB if no objpk and objname is given if objpk is None or objname is None: return new_entry = cls(loggername=record.name, levelname=record.levelname, objname=objname, objpk=objpk, message=record.getMessage(), metadata=record.__dict__) new_entry.save()
class Transaction(IntegerIdModel): description = Column(Text(), nullable=False) author_id = Column(Integer, ForeignKey("user.id", ondelete='SET NULL', onupdate='CASCADE'), nullable=True) author = relationship("User") posted_at = Column(DateTimeTz, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()) valid_on = Column(Date, nullable=False, server_default=func.current_timestamp(), index=True) accounts = relationship(Account, secondary="split", backref="transactions") confirmed = Column(Boolean(), nullable=False, default=True) @property def amount(self): return sum(max(split.amount, 0) for split in self.splits) @property def is_balanced(self): return sum(split.amount for split in self.splits) == 0 @property def is_simple(self): return len(self.splits) == 2
def table_from_fields(engine, metadata, fprefix, filename, record_type, table_fields, pks=[]): table_name = '_'.join(filter(None, [fprefix, filename, record_type])).lower() if not isinstance(table_fields[0], str) and len(table_fields[0]) == 2: column_names = [f[0] for f in table_fields] column_sizes = [f[1] for f in table_fields] else: column_names = table_fields column_sizes = [None] * len(table_fields) columns = [] for column_size, column_name in zip(column_sizes, column_names): if column_name in ['RECORD_TYPE', 'UPDATE_MARKER']: continue if column_name in DATE_FIELDS: type_ = Date() elif column_name.endswith('_TIME'): type_ = Time() elif column_name in INT_FIELDS: type_ = Integer() elif column_size is None: type_ = Text() else: type_ = String(column_size) columns.append( Column(column_name.lower(), type_, primary_key=column_name in pks)) table = Table(table_name, metadata, *columns) return table
def test_get_numpy_dtype_from_sql_alchemy_type(self): for db, server, storage in self.dbs: try: expected_numpy_type = dtype('i') actual_numpy_type = storage._get_numpy_dtype_from_sql_alchemy_type( Integer()) self.assertEqual(expected_numpy_type, actual_numpy_type) actual_numpy_type = storage._get_numpy_dtype_from_sql_alchemy_type( server.engine.dialect.type_descriptor(Integer)) self.assertEqual(expected_numpy_type, actual_numpy_type) expected_numpy_type = dtype('f') actual_numpy_type = storage._get_numpy_dtype_from_sql_alchemy_type( Float()) self.assertEqual(expected_numpy_type, actual_numpy_type) actual_numpy_type = storage._get_numpy_dtype_from_sql_alchemy_type( server.engine.dialect.type_descriptor(Float)) self.assertEqual(expected_numpy_type, actual_numpy_type) expected_numpy_type = dtype('S') actual_numpy_type = storage._get_numpy_dtype_from_sql_alchemy_type( Text()) self.assertEqual(expected_numpy_type, actual_numpy_type) actual_numpy_type = storage._get_numpy_dtype_from_sql_alchemy_type( server.engine.dialect.type_descriptor(Text)) self.assertEqual(expected_numpy_type, actual_numpy_type) except: print 'ERROR: protocol %s' % server.config.protocol raise