class InvoiceEntry(meta.BaseObject): __tablename__ = "invoice_entry" id = schema.Column(types.Integer(), schema.Sequence("invoice_entry_id_seq", optional=True), primary_key=True, autoincrement=True) position = schema.Column(types.Integer(), default=0) invoice_id = schema.Column(types.Integer(), schema.ForeignKey(Invoice.id, onupdate="CASCADE", ondelete="CASCADE"), nullable=False) invoice = orm.relationship(Invoice, backref=orm.backref("entries", order_by=position)) description = schema.Column(types.UnicodeText(), nullable=False) vat = schema.Column(types.Integer(), nullable=False) currency_id = schema.Column(types.Integer(3), schema.ForeignKey(Currency.id, onupdate="RESTRICT", ondelete="RESTRICT"), nullable=False) currency = orm.relationship(Currency, lazy="joined") unit_price = schema.Column(types.Numeric(precision=7, scale=2), nullable=False) units = schema.Column(types.Numeric(4, 2), nullable=False, default=1) @property def total(self): return self.unit_price * self.units * self.currency.rate
class TrackOld(Base): __tablename__ = 'track_old' id = Column("id", Integer, primary_key=True, autoincrement=True) date = Column("date", types.TIMESTAMP(timezone=False)) trkptnum = Column("trkptnum", Integer) distance = Column("distance", types.Numeric(11, 4)) timespan = Column("timespan", types.Interval) gencpoly_pts = Column("gencpoly_pts", types.UnicodeText) gencpoly_levels = Column("gencpoly_levels", types.UnicodeText) color = Column("color", types.CHAR(6), default='FF0000') maxlat = Column("maxlat", types.Numeric(9, 7)) maxlon = Column("maxlon", types.Numeric(10, 7)) minlat = Column("minlat", types.Numeric(9, 7)) minlon = Column("minlon", types.Numeric(10, 7)) json_0002 = Column("json_0002", Text) def __init__(self, date, trkptnum, distance, timespan, gencpoly_pts, gencpoly_levels, color, maxlat, maxlon, minlat, minlon, json_0002): self.date = date self.trkptnum = trkptnum self.distance = distance self.timespan = timespan self.gencpoly_pts = gencpoly_pts self.gencpoly_levels = gencpoly_levels self.color = color self.maxlat = maxlat self.maxlon = maxlon self.minlat = minlat self.minlon = minlon self.json_0002 = json_0002
class Unaligned(Model): unaligned_id = Column(types.Integer, primary_key=True) sample_id = Column(ForeignKey("sample.sample_id", ondelete="CASCADE"), nullable=False) demux_id = Column(ForeignKey("demux.demux_id", ondelete="CASCADE"), nullable=False) lane = Column(types.Integer) yield_mb = Column(types.Integer) passed_filter_pct = Column(types.Numeric(10, 5)) readcounts = Column(types.Integer) raw_clusters_per_lane_pct = Column(types.Numeric(10, 5)) perfect_indexreads_pct = Column(types.Numeric(10, 5)) q30_bases_pct = Column(types.Numeric(10, 5)) mean_quality_score = Column(types.Numeric(10, 5)) time = Column(types.DateTime) @staticmethod def exists(sample_id: int, demux_id: int, lane: int) -> Optional[int]: """Checks if an Unaligned entry already exists""" try: unaligned: Unaligned = (Unaligned.query.filter_by( sample_id=sample_id).filter_by(demux_id=demux_id).filter_by( lane=lane).one()) return unaligned.unaligned_id except NoResultFound: return None
class Trackpoint(Base): __tablename__ = 'trackpoint' id = Column('id', Integer, primary_key=True, autoincrement=True) track_id = Column('track_id', types.Integer, ForeignKey('track.id')) latitude = Column('latitude', types.Numeric(9, 7)) longitude = Column('longitude', types.Numeric(10, 7)) altitude = Column('altitude', types.Integer) velocity = Column('velocity', types.Integer) temperature = Column('temperature', types.Integer) direction = Column('direction', types.Integer) pressure = Column('pressure', types.Integer) timestamp = Column('timestamp', types.TIMESTAMP(timezone=False)) uuid = Column('uuid', postgresql.UUID, unique=True) images = relationship('Image', primaryjoin="Trackpoint.id==Image.trackpoint", order_by='desc(Image.timestamp_original)') __table_args__ = (UniqueConstraint('latitude', 'longitude', 'timestamp', name='trackpoint_lat_lon_timestamp'), {}) def __init__(self, track, latitude, longitude, altitude, velocity, temperature, direction, pressure, \ timestamp, uuid): self.track = track.id self.latitude = latitude self.longitude = longitude self.altitude = altitude self.velocity = velocity self.temperature = temperature self.direction = direction self.pressure = pressure self.timestamp = timestamp self.uuid = uuid @classmethod def get_trackpoint_by_lat_lon_timestamp(self, latitude, longitude, timestamp): try: trackpoint = DBSession.query(Trackpoint).filter( and_(Trackpoint.latitude == latitude, Trackpoint.longitude == longitude, Trackpoint.timestamp == timestamp)).one() return trackpoint except Exception as e: print(( 'Error retrieving trackpoint by lat({0}), lon({1}), time({2}) :\n {3} ' .format(latitude, longitude, timestamp, e))) return None @classmethod def get_trackpoint_by_uuid(self, uuid): try: trackpoint = DBSession.query(Trackpoint).filter( Trackpoint.uuid == uuid).one() return trackpoint except Exception as e: print(('Error retrieving trackpoint {0}: '.format(e))) return None
class Races(Base): __tablename__ = 'races' recordid = sqla.Column(sqltypes.Integer, primary_key=True) racedate = sqla.Column(sqltypes.TIMESTAMP) racevenue = sqla.Column(sqltypes.VARCHAR(30), nullable=False) racenumber = sqla.Column(sqltypes.Integer) horseid = sqla.Column(sqltypes.Integer) horsename = sqla.Column(sqltypes.VARCHAR(50), nullable=False) price = sqla.Column(sqltypes.Numeric(6, 2), nullable=False, default=0) dlr = sqla.Column(sqltypes.Integer, nullable=False) nr = sqla.Column(sqltypes.Numeric(6, 2), nullable=False) nr_fin = sqla.Column(sqltypes.Numeric(2, 1), nullable=True, default=None)
def test_basic_reflection(self): meta = self.metadata users = Table( "engine_users", meta, Column("user_id", types.INT, primary_key=True), Column("user_name", types.VARCHAR(20), nullable=False), Column("test1", types.CHAR(5), nullable=False), Column("test2", types.Float(5), nullable=False), Column("test2.5", types.Float(), nullable=False), Column("test3", types.Text()), Column("test4", types.Numeric, nullable=False), Column("test4.5", types.Numeric(10, 2), nullable=False), Column("test5", types.DateTime), Column( "parent_user_id", types.Integer, ForeignKey("engine_users.user_id"), ), Column("test6", types.DateTime, nullable=False), Column("test7", types.Text()), Column("test8", types.LargeBinary()), Column("test_passivedefault2", types.Integer, server_default="5"), Column("test9", types.BINARY(100)), Column("test_numeric", types.Numeric()), ) addresses = Table( "engine_email_addresses", meta, Column("address_id", types.Integer, primary_key=True), Column("remote_user_id", types.Integer, ForeignKey(users.c.user_id)), Column("email_address", types.String(20)), ) meta.create_all() meta2 = MetaData() reflected_users = Table("engine_users", meta2, autoload=True, autoload_with=testing.db) reflected_addresses = Table( "engine_email_addresses", meta2, autoload=True, autoload_with=testing.db, ) self.assert_tables_equal(users, reflected_users) self.assert_tables_equal(addresses, reflected_addresses)
class Cookie(Base): __tablename__ = 'cookies' cookie_id = Column(types.Integer(), primary_key=True) cookie_name = Column(types.String(50), index=True) cookie_sku = Column(types.String(55)) quantity = Column(types.Integer()) unit_cost = Column(types.Numeric(12, 2))
class Vehicle(Base): __tablename__ = 'vehicles' id = Column(types.Integer(), Sequence('seq_id'), primary_key=True, doc='The primary key') name = Column(types.String(), doc='The name of the vehicle') type = Column(types.Enum(VehicleType), nullable=False) created_at = Column(types.DateTime()) paint = Column(types.Enum(*COLORS)) is_used = Column(types.Boolean) @property def lower_name(self): return self.name.lower() _engine_cylinders = Column('engine_cylinders', types.BigInteger()) _engine_displacement = Column( 'engine_displacement', types.Numeric(asdecimal=True, precision=10, scale=2)) _engine_type = Column('engine_type', types.String(length=25)) _engine_fuel_type = Column('engine_fuel_type', types.String(length=10)) engine = orm.composite(Engine, _engine_cylinders, _engine_displacement, _engine_type, _engine_fuel_type) _owner_id = Column('owner_id', types.Integer(), ForeignKey(Owner.id)) owner = orm.relationship(Owner, backref='vehicles')
def test_numeric_reflection(self): for typ in self._type_round_trip( sql_types.Numeric(18, 5), ): assert isinstance(typ, sql_types.Numeric) eq_(typ.precision, 18) eq_(typ.scale, 5)
class Vehicle(Base): __tablename__ = "vehicles" id = Column(types.Integer(), Sequence("seq_id"), primary_key=True, doc="The primary key") name = Column(types.String(length=50), doc="The name of the vehicle") type = Column(types.Enum(VehicleType, name="vehicle_type"), nullable=False) created_at = Column(types.DateTime()) paint = Column(types.Enum(*COLORS, name="colors")) is_used = Column(types.Boolean) @property def lower_name(self): return self.name.lower() _engine_cylinders = Column("engine_cylinders", types.BigInteger()) _engine_displacement = Column("engine_displacement", types.Numeric(asdecimal=True, precision=10, scale=2)) _engine_type = Column("engine_type", types.String(length=25)) _engine_fuel_type = Column("engine_fuel_type", types.String(length=10)) engine = orm.composite(Engine, _engine_cylinders, _engine_displacement, _engine_type, _engine_fuel_type) _owner_id = Column("owner_id", types.Integer(), ForeignKey(Owner.id)) owner = orm.relationship(Owner, backref="vehicles") def clean_name(self): if self.name == "invalid": raise ValidationError("invalid vehicle name")
def _fixed_lookup_fixture(self): return [ (sqltypes.String(), sqltypes.VARCHAR()), (sqltypes.String(1), sqltypes.VARCHAR(1)), (sqltypes.String(3), sqltypes.VARCHAR(3)), (sqltypes.Text(), sqltypes.TEXT()), (sqltypes.Unicode(), sqltypes.VARCHAR()), (sqltypes.Unicode(1), sqltypes.VARCHAR(1)), (sqltypes.UnicodeText(), sqltypes.TEXT()), (sqltypes.CHAR(3), sqltypes.CHAR(3)), (sqltypes.NUMERIC, sqltypes.NUMERIC()), (sqltypes.NUMERIC(10, 2), sqltypes.NUMERIC(10, 2)), (sqltypes.Numeric, sqltypes.NUMERIC()), (sqltypes.Numeric(10, 2), sqltypes.NUMERIC(10, 2)), (sqltypes.DECIMAL, sqltypes.DECIMAL()), (sqltypes.DECIMAL(10, 2), sqltypes.DECIMAL(10, 2)), (sqltypes.INTEGER, sqltypes.INTEGER()), (sqltypes.BIGINT, sqltypes.BIGINT()), (sqltypes.Float, sqltypes.FLOAT()), (sqltypes.TIMESTAMP, sqltypes.TIMESTAMP()), (sqltypes.DATETIME, sqltypes.DATETIME()), (sqltypes.DateTime, sqltypes.DATETIME()), (sqltypes.DateTime(), sqltypes.DATETIME()), (sqltypes.DATE, sqltypes.DATE()), (sqltypes.Date, sqltypes.DATE()), (sqltypes.TIME, sqltypes.TIME()), (sqltypes.Time, sqltypes.TIME()), (sqltypes.BOOLEAN, sqltypes.BOOLEAN()), (sqltypes.Boolean, sqltypes.BOOLEAN()), ]
def get_columns(self, connection, table_name, schema=None, **kw): schema = schema or self.default_schema_name c = connection.execute( """select colname, coltype, collength, t3.default, t1.colno from syscolumns as t1 , systables as t2 , OUTER sysdefaults as t3 where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=? and t3.tabid = t2.tabid and t3.colno = t1.colno order by t1.colno""", table_name, schema) pk_constraint = self.get_pk_constraint(connection, table_name, schema, **kw) primary_cols = pk_constraint['constrained_columns'] columns = [] rows = c.fetchall() for name, colattr, collength, default, colno in rows: name = name.lower() autoincrement = False primary_key = False if name in primary_cols: primary_key = True # in 7.31, coltype = 0x000 # ^^-- column type # ^-- 1 not null, 0 null not_nullable, coltype = divmod(colattr, 256) if coltype not in (0, 13) and default: default = default.split()[-1] if coltype == 6: # Serial, mark as autoincrement autoincrement = True if coltype == 0 or coltype == 13: # char, varchar coltype = ischema_names[coltype](collength) if default: default = "'%s'" % default elif coltype == 5: # decimal precision, scale = (collength & 0xFF00) >> 8, collength & 0xFF if scale == 255: scale = 0 coltype = sqltypes.Numeric(precision, scale) else: try: coltype = ischema_names[coltype] except KeyError: util.warn("Did not recognize type '%s' of column '%s'" % (coltype, name)) coltype = sqltypes.NULLTYPE column_info = dict(name=name, type=coltype, nullable=not not_nullable, default=default, autoincrement=autoincrement, primary_key=primary_key) columns.append(column_info) return columns
def load_dialect_impl(self, dialect): if dialect.name == "sqlite": # Precision + 1 to include the decimal point return dialect.type_descriptor(sql_types.String(self.precision + 1)) else: return dialect.type_descriptor( sql_types.Numeric(precision=self.precision, scale=self.scale))
class Trackpoint(Base): __tablename__ = 'trackpoint' id = Column("id", Integer, primary_key=True, autoincrement=True) track_id = Column("track_id", types.Integer, ForeignKey('track.id')) latitude = Column("latitude", types.Numeric(9, 7)) longitude = Column("longitude", types.Numeric(10, 7)) altitude = Column("altitude", types.Integer) velocity = Column("velocity", types.Integer) temperature = Column("temperature", types.Integer) direction = Column("direction", types.Integer) pressure = Column("pressure", types.Integer) timestamp = Column("timestamp", types.TIMESTAMP(timezone=False)) uuid = Column("uuid", postgresql.UUID, unique=True) trackpoint_log = relationship('Log', backref='trackpoint_log_ref') trackpoint_img = relationship('Image', backref='trackpoint_img_ref') def __init__(self, track_id, latitude, longitude, altitude, velocity, temperature, direction, pressure, \ timestamp, uuid): self.track_id = track_id self.latitude = latitude self.longitude = longitude self.altitude = altitude self.velocity = velocity self.temperature = temperature self.direction = direction self.pressure = pressure self.timestamp = timestamp self.uuid = uuid @classmethod def get_trackpoint_by_lat_lon_time(self, latitude, longitude, timestamp): try: trackpoint = DBSession.query(Trackpoint).filter( and_(Trackpoint.latitude == latitude, Trackpoint.longitude == longitude, Trackpoint.timestamp == timestamp)).one() return trackpoint except Exception, e: print( "Error retrieving trackpoint by lat(%s), lon(%s), time(%s) :\n %s " ) % (latitude, longitude, timestamp, e) return None
def test_numeric_reflection(self) -> None: """Test the dialect's handling of numeric reflection.""" # The built-in SQLAlchemy test uses a precision of 18, but # the Intersolv driver maxes out at a precision of 15 and # doesn't appear to care about or support scale for typ in self._type_round_trip(sql_types.Numeric(15, 5)): assert isinstance(typ, sql_types.Numeric) assert typ.precision == 15 assert typ.scale is None
class Course(Model): __tablename__ = "course" id = Column(sqla_types.Integer, primary_key=True) name = Column(sqla_types.String(255), nullable=False) # These are for better model form testing cost = Column(sqla_types.Numeric(5, 2), nullable=False) description = Column(sqla_types.Text, nullable=False) level = Column(sqla_types.Enum('Primary', 'Secondary')) has_prereqs = Column(sqla_types.Boolean, nullable=False) started = Column(sqla_types.DateTime, nullable=False) grade = Column(AnotherInteger, nullable=False)
def load_loan(conn, filename): """ CREATE TABLE loan ( loan_id number NOT NULL, account_id number NOT NULL, loan_date date NOT NULL, amount decimal(10, 2) NOT NULL, duration number, payments decimal(10, 2) NOT NULL, status char(1) NOT NULL, PRIMARY KEY (loan_id), FOREIGN KEY(account_id) REFERENCES account (account_id) ); """ df = pd.read_csv(filename, sep=';', low_memory=False, nrows=NROWS) print("== loan df size: {}".format(len(df))) cols = { 'date': 'loan_date', } df.rename(columns=cols, inplace=True) log.info("{} columns: {}".format(filename, df.columns)) df['loan_date'] = pd.to_datetime(df['loan_date'], format="%y%m%d") dtype = { 'loan_id': sqlt.Integer, 'account_id': sqlt.Integer, 'loan_date': sqlt.Date, 'amount': sqlt.Numeric(10, 2), 'duration': sqlt.Integer, 'payments': sqlt.Numeric(10, 2), 'status': sqlt.NCHAR(1), } log.info('Starting data import for: {} ({} rows)'.format( filename, len(df))) df.to_sql('loan', con=conn, if_exists='append', index=False, dtype=dtype) log.info('Finished data import for: {}'.format(filename))
class TrackpointOld(Base): __tablename__ = 'trackpoint_old' id = Column("id", Integer, primary_key=True, autoincrement=True) track_id = Column("track_id", types.Integer, ForeignKey('track.id')) timezone_id = Column("timezone_id", types.Integer, ForeignKey('timezone.id')) country_id = Column("country_id", types.Integer, ForeignKey('country.iso_numcode')) latitude = Column("latitude", types.Numeric(9, 7)) longitude = Column("longitude", types.Numeric(10, 7)) altitude = Column("altitude", types.Integer) velocity = Column("velocity", types.Integer) temperature = Column("temperature", types.Integer) direction = Column("direction", types.Integer) pressure = Column("pressure", types.Integer) timestamp = Column("timestamp", types.TIMESTAMP(timezone=False)) infomarker = Column("infomarker", types.Boolean, default=False, nullable=False) location = Column("location", types.VARCHAR(256)) def __init__(self, track_id, timezone_id, country_id, latitude, longitude, altitude, velocity, temperature, direction, pressure, timestamp, infomarker, location): self.track_id = track_id self.timezone_id = timezone_id self.country_id = country_id self.latitude = latitude self.longitude = longitude self.altitude = altitude self.velocity = velocity self.temperature = temperature self.direction = direction self.pressure = pressure self.timestamp = timestamp self.informarker = infomarker self.location = location
class PercentFraction(types.TypeDecorator): """ Highly accurate percent fraction. """ impl = types.Numeric(11, 10) def load_dialect_impl(self, dialect): if _is_mysql(dialect): return mysql.DECIMAL(precision=11, scale=10, unsigned=True) return self.impl @property def python_type(self): return int
class Ranking(Base): __tablename__ = 'ranking' id = Column(types.Integer(), primary_key=True) team_id = Column(types.Integer(), ForeignKey('teams.team_id')) league_id = Column(types.Integer()) season_id = Column(types.Integer()) standingsdate = Column(types.Date()) conference = Column(types.Text()) team = Column(types.Text()) g_i = Column(types.SmallInteger()) w = Column(types.SmallInteger()) l = Column(types.SmallInteger()) w_pct = Column(types.Numeric()) home_record = Column(types.Text()) road_record = Column(types.Text())
class Traffic(types.TypeDecorator): """ Amount of traffic in bytes. """ impl = types.Numeric(16, 0) MIN_VALUE = 0 MAX_VALUE = 9999999999999999 def load_dialect_impl(self, dialect): if _is_mysql(dialect): return mysql.DECIMAL(precision=16, scale=0, unsigned=True) return self.impl @property def python_type(self): return int
class IPv6Offset(types.TypeDecorator): """ IPv6 address offset. """ impl = types.Numeric(39, 0) MIN_VALUE = 0 MAX_VALUE = 340282366920938463463374607431768211456 def load_dialect_impl(self, dialect): if _is_mysql(dialect): return mysql.DECIMAL(precision=39, scale=0, unsigned=True) return self.impl @property def python_type(self): return int
class TattsRaces(Base): __tablename__ = 'tattsraces' recordid = sqla.Column(sqltypes.Integer, primary_key=True) parsedate = sqla.Column( sqltypes.DateTime, server_default=sqlfunc.current_timestamp(), nullable=False, ) racevenue = sqla.Column(sqltypes.VARCHAR(30), nullable=False) racenumber = sqla.Column(sqltypes.Integer, nullable=False) horseid = sqla.Column(sqltypes.Integer, nullable=False) horsename = sqla.Column(sqltypes.VARCHAR(50), nullable=False) rating = sqla.Column(sqltypes.Integer, nullable=False) rating_fin = sqla.Column(sqltypes.Numeric(2, 1), nullable=True, default=None)
def get_columns(self, connection, table_name, schema=None, **kw): c = connection.execute( """select colname, coltype, collength, t3.default, t1.colno from syscolumns as t1 , systables as t2 , OUTER sysdefaults as t3 where t1.tabid = t2.tabid and t2.tabname=? and t3.tabid = t2.tabid and t3.colno = t1.colno order by t1.colno""", table.name.lower()) columns = [] for name, colattr, collength, default, colno in rows: name = name.lower() if include_columns and name not in include_columns: continue # in 7.31, coltype = 0x000 # ^^-- column type # ^-- 1 not null, 0 null nullable, coltype = divmod(colattr, 256) if coltype not in (0, 13) and default: default = default.split()[-1] if coltype == 0 or coltype == 13: # char, varchar coltype = ischema_names[coltype](collength) if default: default = "'%s'" % default elif coltype == 5: # decimal precision, scale = (collength & 0xFF00) >> 8, collength & 0xFF if scale == 255: scale = 0 coltype = sqltypes.Numeric(precision, scale) else: try: coltype = ischema_names[coltype] except KeyError: util.warn("Did not recognize type '%s' of column '%s'" % (coltype, name)) coltype = sqltypes.NULLTYPE # TODO: nullability ?? nullable = True column_info = dict(name=name, type=coltype, nullable=nullable, default=default) columns.append(column_info) return columns
def test_numeric_scale(self): from sqlalchemy import Column, types column = Column('_column', types.Numeric(scale=2)) elements = self._get_elements((('column', column), )) self.assertEqual(len(elements), 1) self.assertEqual(elements[0].attrib, { 'minOccurs': '0', 'name': 'column', 'nillable': 'true' }) restrictions = elements[0].findall( self._make_xpath('. simpleType restriction')) self.assertEqual(len(restrictions), 1) self.assertEqual(restrictions[0].attrib, {'base': 'xsd:decimal'}) fractionDigitss = restrictions[0].findall( self._make_xpath('. fractionDigits')) self.assertEqual(len(fractionDigitss), 1) self.assertEqual(fractionDigitss[0].attrib, {'value': '2'})
def test_basic_reflection(self): meta = self.metadata users = Table( 'engine_users', meta, Column('user_id', types.INT, primary_key=True), Column('user_name', types.VARCHAR(20), nullable=False), Column('test1', types.CHAR(5), nullable=False), Column('test2', types.Float(5), nullable=False), Column('test3', types.Text()), Column('test4', types.Numeric, nullable=False), Column('test5', types.DateTime), Column('parent_user_id', types.Integer, ForeignKey('engine_users.user_id')), Column('test6', types.DateTime, nullable=False), Column('test7', types.Text()), Column('test8', types.LargeBinary()), Column('test_passivedefault2', types.Integer, server_default='5'), Column('test9', types.BINARY(100)), Column('test_numeric', types.Numeric()), ) addresses = Table( 'engine_email_addresses', meta, Column('address_id', types.Integer, primary_key=True), Column('remote_user_id', types.Integer, ForeignKey(users.c.user_id)), Column('email_address', types.String(20)), ) meta.create_all() meta2 = MetaData() reflected_users = Table('engine_users', meta2, autoload=True, autoload_with=testing.db) reflected_addresses = Table('engine_email_addresses', meta2, autoload=True, autoload_with=testing.db) self.assert_tables_equal(users, reflected_users) self.assert_tables_equal(addresses, reflected_addresses)
class Currency(BaseObject): """A currency Currencies are identified by their ISO 4217 three letter currency code. """ __tablename__ = "currency" id = schema.Column(types.Integer(), schema.Sequence("currency_id_seq", optional=True), primary_key=True, autoincrement=True) code = schema.Column(types.String(3), nullable=False) rate = schema.Column(types.Numeric(precision=6, scale=2), nullable=False) until = schema.Column(types.Date()) def __repr__(self): return "<Currency id=%s, code=%s rate=%.2f>" % (self.id, self.code, self.rate)
def create_table(self, name, *columns, **kwargs): """ Set up the columns and add a unique key. The Database class can be used without creating a database first, but then pandas will take care of it on first access, and no primary key will be created. It is therefore encouraged to explicitly create the table with this method in the Database subclass constructor .. note:: A primary key column will be automatically added. By default it is called as in the ID_LBL constant. Change by specify primary_key_col= Args: name: The table to create *columns: The list of :class:`sqlalchemy.types.Column` objects to add to the table primary_key_col (str, optional): The column to use as primary key (must be sa.Integer type) Returns: """ try: primary_key_col = kwargs['primary_key_col'] if len(kwargs) > 1: raise Exception("Invalid arguments to create_table") except: primary_key_col = ID_LBL if len(kwargs) > 0: raise Exception("Invalid arguments to create_table") Table( name, self._metadata, Column(primary_key_col, sa_t.Integer(), primary_key=True), Column( TSTAMP_LBL, sa_t.Numeric(20, 12) ), #<-- dates are stored as julian dates (days since epoch). Add enough precision to allow microseconds to be stored *columns, keep_existing=True) self._metadata.create_all()
class DutchCompany(BaseObject): """Information about a Dutch company.""" __tablename__ = "dutch_company" id = schema.Column(types.Integer(), primary_key=True, autoincrement=True) session_id = schema.Column( types.Integer(), schema.ForeignKey("session.id", onupdate="CASCADE", ondelete="CASCADE"), nullable=False, index=True, ) session = orm.relation( "SurveySession", cascade="all,delete-orphan", single_parent=True, backref=orm.backref("dutch_company", uselist=False, cascade="all"), ) title = schema.Column(types.Unicode(128)) address_visit_address = schema.Column(types.UnicodeText()) address_visit_postal = schema.Column(types.Unicode(16)) address_visit_city = schema.Column(types.Unicode(64)) address_postal_address = schema.Column(types.UnicodeText()) address_postal_postal = schema.Column(types.Unicode(16)) address_postal_city = schema.Column(types.Unicode(64)) email = schema.Column(types.String(128)) phone = schema.Column(types.String(32)) activity = schema.Column(types.Unicode(64)) submitter_name = schema.Column(types.Unicode(64)) submitter_function = schema.Column(types.Unicode(64)) department = schema.Column(types.Unicode(64)) location = schema.Column(types.Unicode(64)) submit_date = schema.Column(types.Date(), default=functions.now()) employees = schema.Column(Enum([None, "40h", "max25", "over25"])) absentee_percentage = schema.Column(types.Numeric(precision=5, scale=2)) accidents = schema.Column(types.Integer()) incapacitated_workers = schema.Column(types.Integer()) arbo_expert = schema.Column(types.Unicode(128)) works_council_approval = schema.Column(types.Date())
def _fieldformat(self, specify_field, field): if specify_field.type == "java.lang.Boolean": return field != 0 if specify_field.type in ("java.lang.Integer", "java.lang.Short"): return field if specify_field is CollectionObject_model.get_field('catalogNumber') \ and self.catalog_number_is_numeric(): return cast(field, types.Numeric(65)) # 65 is the mysql max precision if specify_field is Agent_model.get_field('agentType'): return case( { 0: 'Organization', 1: 'Person', 2: 'Other', 3: 'Group' }, field) return field