class MAC_INSPROPERTYY(Base): __tablename__ = "MAC_INSPROPERTYY" SGNYEAR = Column(String(8, u'utf8_bin'), primary_key=True, nullable=False) INSTITUTIONID = Column(Numeric(20, 0), primary_key=True, nullable=False) SHORTNAME = Column(String(40, u'utf8_bin')) FULLNAME = Column(String(200, u'utf8_bin')) INCOMES = Column(Numeric(22, 4)) INCOMEENTERPRISE = Column(Numeric(22, 4)) INCOMEFAMILY = Column(Numeric(22, 4)) INCOMEMOTORVEHICLE = Column(Numeric(22, 4)) INCOMELIABILITY = Column(Numeric(22, 4)) INCOMEPROJECT = Column(Numeric(22, 4)) INCOMEFREIGHT = Column(Numeric(22, 4)) INCOMESHIP = Column(Numeric(22, 4)) INCOMECREDIT = Column(Numeric(22, 4)) INCOMEGUARANTEE = Column(Numeric(22, 4)) INCOMECREDITGUARANTEE = Column(Numeric(22, 4)) INCOMESPECIALRISKS = Column(Numeric(22, 4)) INCOMEFARM = Column(Numeric(22, 4)) INCOMESHORTTIMEHEALTH = Column(Numeric(22, 4)) INCOMEACCIDENT = Column(Numeric(22, 4)) INCOMEOTHER = Column(Numeric(22, 4)) INSUREDSAVINGS = Column(Numeric(22, 4)) INSUREDINVESTMENT = Column(Numeric(22, 4)) PAYMENTS = Column(Numeric(22, 4)) PAYMENTENTERPRISE = Column(Numeric(22, 4)) PAYMENTFAMILY = Column(Numeric(22, 4)) PAYMENTMOTORVEHICLE = Column(Numeric(22, 4)) PAYMENTLIABILITY = Column(Numeric(22, 4)) PAYMENTPROJECT = Column(Numeric(22, 4)) PAYMENTFREIGHT = Column(Numeric(22, 4)) PAYMENTSHIP = Column(Numeric(22, 4)) PAYMENTCREDIT = Column(Numeric(22, 4)) PAYMENTGUARANTEE = Column(Numeric(22, 4)) PAYMENTCREDITGUARANTEE = Column(Numeric(22, 4)) PAYMENTSPECIALRISKS = Column(Numeric(22, 4)) PAYMENTFARM = Column(Numeric(22, 4)) PAYMENTSHORTTIMEHEALTH = Column(Numeric(22, 4)) PAYMENTACCIDENT = Column(Numeric(22, 4)) PAYMENTOTHER = Column(Numeric(22, 4)) CLAIM = Column(Numeric(22, 4)) OUTSTANDING = Column(Numeric(22, 4))
def test_numeric_as_float(self): self._do_test( Numeric(precision=8, scale=4, asdecimal=False), [15.7563, decimal.Decimal("15.7563")], [15.7563], )
def test_numeric_null_as_float(self): self._do_test( Numeric(precision=8, scale=4, asdecimal=False), [None], [None], )
class Query(Model): """ORM model for SQL query""" __tablename__ = 'query' id = Column(Integer, primary_key=True) client_id = Column(String(11), unique=True, nullable=False) database_id = Column(Integer, ForeignKey('dbs.id'), nullable=False) # Store the tmp table into the DB only if the user asks for it. tmp_table_name = Column(String(256)) user_id = Column(Integer, ForeignKey('ab_user.id'), nullable=True) status = Column(String(16), default=QueryStatus.PENDING) tab_name = Column(String(256)) sql_editor_id = Column(String(256)) schema = Column(String(256)) sql = Column(Text) # Query to retrieve the results, # used only in case of select_as_cta_used is true. select_sql = Column(Text) executed_sql = Column(Text) # Could be configured in the superset config. limit = Column(Integer) limit_used = Column(Boolean, default=False) select_as_cta = Column(Boolean) select_as_cta_used = Column(Boolean, default=False) progress = Column(Integer, default=0) # 1..100 # # of rows in the result set or rows modified. rows = Column(Integer) error_message = Column(Text) # key used to store the results in the results backend results_key = Column(String(64), index=True) # Using Numeric in place of DateTime for sub-second precision # stored as seconds since epoch, allowing for milliseconds start_time = Column(Numeric(precision=20, scale=6)) start_running_time = Column(Numeric(precision=20, scale=6)) end_time = Column(Numeric(precision=20, scale=6)) end_result_backend_time = Column(Numeric(precision=20, scale=6)) tracking_url = Column(Text) changed_on = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=True) database = relationship('Database', foreign_keys=[database_id], backref=backref('queries', cascade='all, delete-orphan')) user = relationship(security_manager.user_model, foreign_keys=[user_id]) __table_args__ = (sqla.Index('ti_user_id_changed_on', user_id, changed_on), ) @property def limit_reached(self): return self.rows == self.limit if self.limit_used else False def to_dict(self): return { 'changedOn': self.changed_on, 'changed_on': self.changed_on.isoformat(), 'dbId': self.database_id, 'db': self.database.database_name, 'endDttm': self.end_time, 'errorMessage': self.error_message, 'executedSql': self.executed_sql, 'id': self.client_id, 'limit': self.limit, 'progress': self.progress, 'rows': self.rows, 'schema': self.schema, 'ctas': self.select_as_cta, 'serverId': self.id, 'sql': self.sql, 'sqlEditorId': self.sql_editor_id, 'startDttm': self.start_time, 'state': self.status.lower(), 'tab': self.tab_name, 'tempTable': self.tmp_table_name, 'userId': self.user_id, 'user': user_label(self.user), 'limit_reached': self.limit_reached, 'resultsKey': self.results_key, 'trackingUrl': self.tracking_url, } @property def name(self): """Name property""" ts = datetime.now().isoformat() ts = ts.replace('-', '').replace(':', '').split('.')[0] tab = (self.tab_name.replace(' ', '_').lower() if self.tab_name else 'notab') tab = re.sub(r'\W+', '', tab) return 'sqllab_{tab}_{ts}'.format(**locals())
def test_render_literal_numeric_asfloat(self): self._literal_round_trip( Numeric(precision=8, scale=4, asdecimal=False), [15.7563, decimal.Decimal("15.7563")], [15.7563], )
def test_number_types(self): specs = [(Numeric(5, 2), NUMBER(5, 2)), (NUMBER, NUMBER())] self._run_test(specs, ["precision", "scale"])
class FrtInventory(db.Model, BaseMixin, StoreMixin, DataCopyMixin, CarTypeBuilderMixin): __tablename__ = 'frt_inventory' brand_code = Column(String(100)) brand_name = Column(String(200)) class_code = Column(String(100)) class_name = Column(String(200)) cartype_code = Column(String(100)) cartype = Column(String(200)) subtype_code = Column(String(100)) subtype_name = Column(String(200)) color_name = Column(String(50)) color_attribute = Column(String(50)) warehouse_name = Column(String(200)) location_name = Column(String(200)) out_factory_date = Column(Date) vin = Column(String(50)) inv_status = Column(String(50)) _invday = Column('invday', Integer) stockage_cat = Column(String(50)) in_price = Column(Numeric(precision=12, scale=2)) mrsp = Column(Numeric(precision=12, scale=2)) rebate_amt = Column(Numeric(precision=12, scale=2)) sync_timestamp = Column(Integer) shared = Column(String(50)) valid_lookup_type = ('store', 'group') valid_status = (u'在库', u'待入库', u'调拨在途', u'采购退货', u'采购退库申请', u'已调拨', u'采购在途', u'调拨申请', u'销售出库') default_display_status = (u'在库', u'待入库', u'调拨在途', u'采购在途') stockage_cat_lookups = { '1': u'30天内', '2': u'30天到60天', '3': u'60天到90天', '4': u'90天到120天', '5': u'120天到180天', '6': u'180天以上' } @property def wavehouse_name(self): return self.warehouse_name @wavehouse_name.setter def wavehouse_name(self, name): self.warehouse_name = name @property def invday(self): return self._invday @invday.setter def invday(self, invday): self._invday = invday self.stockage_cat = FrtInventory.get_cat_by_stockage(invday) @classmethod def find_latest_sync_timestamp(cls, store_id): return db.session.query(db.func.max( cls.sync_timestamp)).filter(cls.store_id == store_id).scalar() @classmethod def find_all_by_sync_timestamp(cls, store_id, sync_timestamp): return cls.query.filter( and_(cls.store_id == store_id, cls.sync_timestamp == sync_timestamp)).all() @classmethod def find_all_store_inventories(cls, store_id, **kwargs): latest_sync_timestamp = cls.find_latest_sync_timestamp(store_id) query = cls.query.filter(cls.store_id == store_id).filter( cls.sync_timestamp == latest_sync_timestamp) if kwargs.get('cartype_code'): query = query.filter( cls.cartype_code == kwargs.get('cartype_code')) if kwargs.get('subtype_code'): query = query.filter( cls.subtype_code == kwargs.get('subtype_code')) if kwargs.get('color_name'): query = query.filter(cls.color_name == kwargs.get('color_name')) if kwargs.get('color_attribute'): query = query.filter( cls.color_attribute == kwargs.get('color_attribute')) # status filter if hasattr(g, 'user') and g.user and g.user.is_role_in_store_id( store_id, USER_ROLE_STORE_SALES): status_filters = cls.default_display_status else: status_filters = cls.valid_status query = query.filter(cls.inv_status.in_(status_filters)) if kwargs.get('keywords'): raw_keywords = kwargs.get('keywords') keywords = '%' + raw_keywords + '%' if is_float(raw_keywords): price = float(raw_keywords) if price < 1000: price *= 10000 min_price = price - 1000 max_price = price + 1000 query = query.filter( and_(cls.mrsp >= min_price, cls.mrsp <= max_price)) else: query = query.filter( or_(cls.brand_name.like(keywords), cls.class_name.like(keywords), cls.cartype.like(keywords), cls.subtype_name.like(keywords), cls.color_name.like(keywords), cls.color_attribute.like(keywords), cls.vin.like(keywords))) page_info = get_page_info_from_dict(kwargs) return query.paginate(page_info['page'], page_info['per_page']) @classmethod def find_all_brand_code(cls, store_id, sync_timestamp): if not sync_timestamp: sync_timestamp = cls.find_latest_sync_timestamp(store_id) return db.session.query(distinct(cls.brand_code)).filter( and_(cls.store_id == store_id, cls.sync_timestamp == sync_timestamp)).all() @classmethod def build_color_code_info(cls, store_id, latest_sync_timestamp): colors = db.session.query(distinct(cls.color_name)).filter( and_(cls.store_id == store_id, cls.sync_timestamp == latest_sync_timestamp)).order_by( cls.color_name).all() return [{ 'code': color[0], 'name': color[0] } for color in colors if color[0]] @classmethod def build_color_attribute_info(cls, store_id, latest_sync_timestamp): colors = db.session.query(distinct(cls.color_attribute)).filter( and_(cls.store_id == store_id, cls.sync_timestamp == latest_sync_timestamp)).order_by( cls.color_attribute).all() return [{ 'code': color[0], 'name': color[0] } for color in colors if color[0]] @classmethod def get_lookups(cls, store_id, type): if type not in cls.valid_lookup_type: raise InvalidFrtInventoryLookupTypeException() store_id = int(store_id) if 'store' == type: return cls.get_store_lookups_from_cache(store_id) elif 'group' == type: return FrtSharedInventory.get_group_lookups_from_cache(store_id) @classmethod @cache.memoize() def get_store_lookups_from_cache(cls, store_id): latest_sync_timestamp = cls.find_latest_sync_timestamp(store_id) return { 'cartypes': cls.build_car_code_info(store_id, latest_sync_timestamp), 'colors': cls.build_color_code_info(store_id, latest_sync_timestamp), 'color_attributes': cls.build_color_attribute_info(store_id, latest_sync_timestamp) } @classmethod def get_cartypes_count(cls, store_id, stockage=None): latest_sync_timestamp = cls.find_latest_sync_timestamp(store_id) query = db.session.query( cls.cartype_code, cls.cartype, func.count(cls.id)).filter( and_(cls.store_id == store_id, cls.sync_timestamp == latest_sync_timestamp)) if stockage: query = query.filter(cls.stockage_cat == stockage) result = query.group_by(cls.cartype_code).all() if result: result = [ dict(zip(('cartype_code', 'cartype', 'count'), data)) for data in result ] return result @classmethod def get_subtypes_count(cls, store_id, cartype_code=None): latest_sync_timestamp = cls.find_latest_sync_timestamp(store_id) query = db.session.query( cls.subtype_code, cls.subtype_name, func.count(cls.id)).filter( and_(cls.store_id == store_id, cls.sync_timestamp == latest_sync_timestamp)) if cartype_code: query = query.filter(cls.cartype_code == cartype_code) result = query.group_by(cls.subtype_code).all() if result: result = [ dict(zip(('subtype_code', 'subtype_name', 'count'), data)) for data in result ] return result @classmethod def get_stockages_count(cls, store_id): latest_sync_timestamp = cls.find_latest_sync_timestamp(store_id) result = db.session.query(cls.stockage_cat, func.count(cls.id)).filter( and_(cls.store_id == store_id, cls.sync_timestamp == latest_sync_timestamp)).group_by( cls.stockage_cat).all() if result: result = map( lambda d: (d[0], cls.stockage_cat_lookups.get(d[0], ''), d[1]), result) result = [ dict(zip(('stockage_code', 'stockage_name', 'count'), data)) for data in result ] return result @staticmethod def get_cat_by_stockage(invday): """ Hardcode the stockage cat calculation for now. Divide invday by 2 because demo data problem """ if not invday: invday = 0 else: try: invday = int(invday) except Exception: invday = 0 if invday <= 30: return '1' elif 30 < invday <= 60: return '2' elif 60 < invday <= 90: return '3' elif 90 < invday <= 120: return '4' elif 120 < invday <= 180: return '5' else: return '6'
def post(self): args = self.reqparse.parse_args() catid = args['catid'] cityid = args['cityid'] distid = args['distid'] cbdid = args['cbdid'] name = args['name'] pageIndex = args['pageindex'] pageSize = args['pagesize'] sortName = args['sortname'] sortOrder = args['sortorder'] tuition = args['price'] tuitionList = str.split(tuition, '~') print(tuitionList) tuitionB = tuitionList[0] tuitionE = tuitionList[1] query = Training.query query = query.filter(or_(Training.catid == catid, -1 == catid)) query = query.filter(or_(Training.cityid == cityid, -1 == cityid)) query = query.filter(or_(Training.districtid == distid, -1 == distid)) query = query.filter(or_(Training.cbdid == cbdid, -1 == cbdid)) query = query.filter( or_(cast(Training.price, Numeric(12, 2)) >= tuitionB)) query = query.filter( or_(cast(Training.price, Numeric(12, 2)) <= tuitionE)) query = query.filter( or_(Training.name.like('%' + name + '%'), name == '')) if sortName == "price": if sortOrder == 'desc': query = query.order_by(Training.price.asc()) else: query = query.order_by(Training.price.desc()) else: if sortOrder == 'desc': query = query.order_by(Training.sortindex.desc()) else: query = query.order_by(Training.sortindex.asc()) #[item.to_dict() for item in data = query.limit(pageSize).offset((pageIndex - 1) * pageSize) data = [item.to_dict() for item in data] totalRow=db.session.query(func.count(Training.id))\ .filter(or_(Training.catid==catid,-1==catid))\ .filter(or_(Training.cityid==cityid,-1==cityid))\ .filter(or_(Training.districtid==distid,-1==distid))\ .filter(or_(Training.cbdid==cbdid,-1==cbdid))\ .filter(or_(cast(Training.price,Numeric(12,2))>=tuitionB))\ .filter(or_(cast(Training.price,Numeric(12,2))<=tuitionE))\ .filter(or_(Training.name.like('%'+name+'%'),name=='')).scalar() totalPage = int(totalRow / pageSize) + 1 return jsonify({ 'msg': '', 'code': 200, 'data': { 'page_number': pageIndex, 'page_size': pageSize, 'total_page': totalPage, 'total_row': totalRow, 'list': data } })
def test_decimal_notation(self, metadata, connection): numeric_table = Table( "numeric_table", metadata, Column( "id", Integer, Sequence("numeric_id_seq", optional=True), primary_key=True, ), Column("numericcol", Numeric(precision=38, scale=20, asdecimal=True)), ) metadata.create_all(connection) test_items = [ decimal.Decimal(d) for d in ( "1500000.00000000000000000000", "-1500000.00000000000000000000", "1500000", "0.0000000000000000002", "0.2", "-0.0000000000000000002", "-2E-2", "156666.458923543", "-156666.458923543", "1", "-1", "-1234", "1234", "2E-12", "4E8", "3E-6", "3E-7", "4.1", "1E-1", "1E-2", "1E-3", "1E-4", "1E-5", "1E-6", "1E-7", "1E-1", "1E-8", "0.2732E2", "-0.2432E2", "4.35656E2", "-02452E-2", "45125E-2", "1234.58965E-2", "1.521E+15", # previously, these were at -1E-25, which were inserted # cleanly however we only got back 20 digits of accuracy. # pyodbc as of 4.0.22 now disallows the silent truncation. "-1E-20", "1E-20", "1254E-20", "-1203E-20", "0", "-0.00", "-0", "4585E12", "000000000000000000012", "000000000000.32E12", "00000000000000.1E+12", # these are no longer accepted by pyodbc 4.0.22 but it seems # they were not actually round-tripping correctly before that # in any case # '-1E-25', # '1E-25', # '1254E-25', # '-1203E-25', # '000000000000.2E-32', ) ] for value in test_items: result = connection.execute(numeric_table.insert(), dict(numericcol=value)) primary_key = result.inserted_primary_key returned = connection.scalar( select(numeric_table.c.numericcol).where( numeric_table.c.id == primary_key[0])) eq_(value, returned)
'postgresql+psycopg2://{user}:{password}@{host}:{port}/{dbname}'.format( **db_config)) cinema = Table('cinema', metadata, Column('id', Integer, primary_key=True), Column('cinema_name', String, unique=True, nullable=False), Column('cinema_address', String, unique=True, nullable=False)) film = Table('film', metadata, Column('id', Integer, primary_key=True), Column('film_name', String, unique=True, nullable=False), Column('film_duration', Integer, nullable=False)) cinema_session = Table('cinema_session', metadata, Column('id', Integer, primary_key=True), Column('session_place', String, nullable=False), Column('session_start', DateTime, nullable=False), Column('session_price', Numeric(5, 2), nullable=False), Column('cinema_id', Integer, ForeignKey('cinema.id')), Column('film_id', Integer, ForeignKey('film.id'))) client = Table('client', metadata, Column('id', Integer, primary_key=True), Column('first_name', String, unique=True, nullable=False), Column('last_name', String, unique=True, nullable=False), Column('email', String, unique=True, nullable=False)) session_client = Table( 'session_client', metadata, Column('session_id', Integer, ForeignKey('cinema_session.id'), primary_key=True), Column('client_id', Integer, ForeignKey('client.id'), primary_key=True))
class Stop(Base): datasource = config.DATASOURCE_GTFS filename = 'stops.txt' __tablename__ = 'stops' stop_id = Column(String(255), primary_key=True, index=True, nullable=False) stop_code = Column(String(50)) stop_name = Column(String(255), nullable=False) stop_desc = Column(String(255)) stop_lat = Column(Numeric(12, 9), nullable=False) stop_lon = Column(Numeric(12, 9), nullable=False) zone_id = Column(String(50)) stop_url = Column(String(255)) location_type = Column(Integer, index=True, default=0) parent_station = Column(String(255)) stop_timezone = Column(String(50)) wheelchair_boarding = Column(Integer, default=0) platform_code = Column(String(50)) direction = Column(String(50)) position = Column(String(50)) stop_features = relationship( 'StopFeature', primaryjoin='Stop.stop_id==StopFeature.stop_id', foreign_keys='(Stop.stop_id)', uselist=True, viewonly=True) stop_times = relationship('StopTime', primaryjoin='Stop.stop_id==StopTime.stop_id', foreign_keys='(Stop.stop_id)', uselist=True, viewonly=True) @classmethod def add_geometry_column(cls): if not hasattr(cls, 'geom'): cls.geom = Column(Geometry(geometry_type='POINT', srid=config.SRID)) @classmethod def add_geom_to_dict(cls, row): args = (config.SRID, row['stop_lon'], row['stop_lat']) row['geom'] = 'SRID={0};POINT({1} {2})'.format(*args) @property def routes(self): ''' return list of routes servicing this stop @todo: rewrite the cache to use timeout checking in Base.py ''' try: self._routes except AttributeError: from gtfsdb.model.route import Route from gtfsdb.model.trip import Trip from gtfsdb.model.stop_time import StopTime session = object_session(self) q = session.query(Route) f = ((StopTime.stop_id == self.stop_id) & (StopTime.departure_time != '')) q = q.filter(Route.trips.any(Trip.stop_times.any(f))) q = q.order_by(Route.route_sort_order) self._routes = q.all() return self._routes @property def headsigns(self): ''' Returns a dictionary of all unique (route_id, headsign) tuples used at the stop and the number of trips the head sign is used ''' if not hasattr(self, '_headsigns'): from gtfsdb.model.stop_time import StopTime self._headsigns = defaultdict(int) session = object_session(self) log.info("QUERY StopTime") q = session.query(StopTime) q = q.options(joinedload_all('trip.route')) q = q.filter_by(stop_id=self.stop_id) for r in q: headsign = r.stop_headsign or r.trip.trip_headsign self._headsigns[(r.trip.route, headsign)] += 1 return self._headsigns @property def agencies(self): ''' return list of agency ids with routes hitting this stop @todo: rewrite the cache to use timeout checking in Base.py ''' try: self._agencies except AttributeError: self._agencies = [] if self.routes: for r in self.routes: if r.agency_id not in self._agencies: self.agencies.append(r.agency_id) return self._agencies def is_active(self, date=None): """ :return False whenever we see that the stop has zero stop_times on the given input date (which defaults to 'today') @NOTE: use caution with this routine. calling this for multiple stops can really slow things down, since you're querying large trip and stop_time tables, and asking for a schedule of each stop I used to call this multiple times via route_stop to make sure each stop was active ... that was really bad performance wise. """ _is_active = False if date is None: date = datetime.date.today() #import pdb; pdb.set_trace() from gtfsdb.model.stop_time import StopTime st = StopTime.get_departure_schedule(self.session, self.stop_id, date, limit=1) if st and len(st) > 0: _is_active = True return _is_active @classmethod def active_stops(cls, session, limit=None, active_filter=True, date=None): ''' check for active stops ''' ret_val = None # step 1: get stops q = session.query(Stop) if limit: q = q.limit(limit) stops = q.all() # step 2: filter active stops only ??? if active_filter: ret_val = [] for s in stops: if s.is_active(date): ret_val.append(s) else: ret_val = stops return ret_val @classmethod def active_stop_ids(cls, session, limit=None, active_filter=True): ''' return an array of stop_id / agencies pairs {stop_id:'2112', agencies:['C-TRAN', 'TRIMET']} ''' ret_val = [] stops = cls.active_stops(session, limit, active_filter) for s in stops: ret_val.append({"stop_id": s.stop_id, "agencies": s.agencies}) return ret_val
from sqlalchemy import Table, Column, Integer, Numeric, String, ForeignKey, MetaData from A2ImportCreatedEngine import Initialize_created_engine metadata = MetaData() cookies = Table('cookies', metadata, Column('cookie_id', Integer(), primary_key=True), Column('cookie_name', String(50), index=True), Column('cookie_recipe_url', String(255)), Column('cookie_sku', String(55)), Column('quantity', Integer()), Column('unit_cost', Numeric(12, 2)) ) users = Table('users', metadata, Column('user_id', Integer(), primary_key=True), Column('customer_number', Integer(), autoincrement=True), Column('username', String(15), nullable=False, unique=True), Column('email_address', String(255), nullable=False), Column('phone', String(20), nullable=False), Column('password', String(25), nullable=False), Column('created_on', DateTime(), default=datetime.now), Column('updated_on', DateTime(), default=datetime.now, onupdate=datetime.now) ) orders = Table('orders', metadata, Column('order_id', Integer(), primary_key=True), Column('user_id', ForeignKey('users.user_id')) ) line_items = Table('line_items', metadata, Column('line_items_id', Integer(), primary_key=True),
BLOCKS = Table( 'blocks', metadata, Column('timestamp', TIMESTAMP), Column('number', BigInteger), Column('hash', String, primary_key=True), Column('parent_hash', String), Column('nonce', String), Column('sha3_uncles', String), Column('logs_bloom', String), Column('transactions_root', String), Column('state_root', String), Column('receipts_root', String), Column('miner', String), Column('difficulty', Numeric(38)), Column('total_difficulty', Numeric(38)), Column('size', BigInteger), Column('extra_data', String), Column('gas_limit', BigInteger), Column('gas_used', BigInteger), Column('transaction_count', BigInteger), Column('base_fee_per_gas', BigInteger), ) TRANSACTIONS = Table( 'transactions', metadata, Column('hash', String, primary_key=True), Column('nonce', BigInteger), Column('transaction_index', BigInteger),
comparator=SQLiteComparator, nullable=False) class Lake(Base): __tablename__ = 'lakes' lake_id = Column(Integer, primary_key=True) lake_name = Column(String) lake_geom = GeometryColumn(Polygon(2, srid=4326, spatial_index=False), comparator=SQLiteComparator) spots_table = Table( 'spots', metadata, Column('spot_id', Integer, primary_key=True), Column('spot_height', Numeric()), GeometryExtensionColumn('spot_location', Point(2, srid=4326))) class Spot(object): def __init__(self, spot_id=None, spot_height=None, spot_location=None): self.spot_id = spot_id self.spot_height = spot_height self.spot_location = spot_location mapper(Spot, spots_table, properties={ 'spot_location': GeometryColumn(spots_table.c.spot_location,
class SkillPair(Base): __tablename__ = 'sc4_skill_pairs' #__tablename__ = 'skill_pairs' id = Column(Integer, primary_key=True) primary_term = Column(String(140)) secondary_term = Column(String(140)) number_of_times = Column(Integer) ratio = column_property( select( [cast(cast(number_of_times, Float) / cast(SkillPostCounter.number_of_postings, Float), Numeric(7, 3))]). \ where(SkillPostCounter.skill_term == primary_term). \ correlate_except(SkillPostCounter) ) def __unicode__(self): return self.primary_term
#logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO) engine = create_engine('sqlite:///') metadata = MetaData(engine) orders = Table( 'orders', metadata, Column('order_id', Integer, primary_key=True), Column('customer_name', String(30), nullable=False), Column('order_date', DateTime, nullable=False, default=datetime.now()), ) items = Table('items', metadata, Column('item_id', Integer, primary_key=True), Column('description', String(30), nullable=False), Column('price', Numeric(8, 2), nullable=False)) orderitems = Table( 'orderitems', metadata, Column('order_id', Integer, ForeignKey('orders.order_id'), primary_key=True), Column('item_id', Integer, ForeignKey('items.item_id'), primary_key=True), Column('price', Numeric(8, 2), nullable=False)) metadata.create_all() class Order(object): def __init__(self, customer_name): self.customer_name = customer_name
def test_insert_floats( self, metadata, fe_engine, include_setinputsizes, use_fastexecutemany, apply_setinputsizes_flag, ): # changes for issue #8177 have eliminated all current expected # failures, but we'll leave this here in case we need it again expect_failure = False engine = fe_engine(use_fastexecutemany, apply_setinputsizes_flag) observations = Table( "Observations", metadata, Column("id", Integer, nullable=False, primary_key=True), Column("obs1", Numeric(19, 15), nullable=True), Column("obs2", Numeric(19, 15), nullable=True), schema="test_schema", ) with engine.begin() as conn: metadata.create_all(conn) records = [ { "id": 1, "obs1": Decimal("60.1722066045792"), "obs2": Decimal("24.929289808227466"), }, { "id": 2, "obs1": Decimal("60.16325715615476"), "obs2": Decimal("24.93886459535008"), }, { "id": 3, "obs1": Decimal("60.16445165123469"), "obs2": Decimal("24.949856300109516"), }, ] if include_setinputsizes: canary = mock.Mock() @event.listens_for(engine, "do_setinputsizes") def do_setinputsizes(inputsizes, cursor, statement, parameters, context): canary(list(inputsizes.values())) for key in inputsizes: if isinstance(key.type, Numeric): inputsizes[key] = ( engine.dialect.dbapi.SQL_DECIMAL, 19, 15, ) with engine.begin() as conn: if expect_failure: with expect_raises(DBAPIError): conn.execute(observations.insert(), records) else: conn.execute(observations.insert(), records) eq_( conn.execute( select(observations).order_by( observations.c.id)).mappings().all(), records, ) if include_setinputsizes: if apply_setinputsizes_flag: eq_( canary.mock_calls, [ # float for int? this seems wrong mock.call([float, float, float]), mock.call([]), ], ) else: eq_(canary.mock_calls, [])
class MAC_LENDINGRATE(Base): __tablename__ = "MAC_LENDINGRATE" CHANGEDATE = Column(DateTime, primary_key=True) LOAN6MONTH = Column(Numeric(10, 4)) LOAN1YEAR = Column(Numeric(10, 4)) LOAN3YEAR = Column(Numeric(10, 4)) LOAN5YEAR = Column(Numeric(10, 4)) LOANABOVE5YEAR = Column(Numeric(10, 4)) DISCOUNT = Column(String(200, u'utf8_bin')) OVERDUELOAN = Column(String(200, u'utf8_bin')) DIVERTLOAN = Column(String(200, u'utf8_bin')) HOUSEFUND5YEAR = Column(Numeric(10, 4)) HOUSEFUNDABOVE5YEAR = Column(Numeric(10, 4)) HOUSEDEVELOPMENT = Column(String(200, u'utf8_bin')) INDIVIDUALHOUSE6MONTH = Column(Numeric(10, 4)) INDIVIDUALHOUSE1YEAR = Column(Numeric(10, 4)) INDIVIDUALHOUSE3YEAR = Column(Numeric(10, 4)) INDIVIDUALHOUSE5YEAR = Column(Numeric(10, 4)) INDIVIDUALHOUSEABOVE5YEAR = Column(Numeric(10, 4)) INDIVIDUALHOUSEFLOOR = Column(String(200, u'utf8_bin')) CREDIT = Column(String(200, u'utf8_bin')) SHIPPING = Column(Numeric(10, 4)) HIGHTECHNOLOGY = Column(Numeric(10, 4)) LOWTECHNOLOGY = Column(Numeric(10, 4)) FACTORYLOAN = Column(Numeric(10, 4)) DEVELOPMENTLOAN = Column(Numeric(10, 4)) INDUSTRYLOAN = Column(Numeric(10, 4)) PRODUCELOAN = Column(Numeric(10, 4)) RESERVELOAN = Column(Numeric(10, 4)) POVERTYALLEVIATIONLOAN = Column(Numeric(10, 4))
import uuid import bcrypt from sqlalchemy import String, Numeric, Integer from sqlalchemy.types import TypeDecorator, CHAR from sqlalchemy.dialects.postgresql import UUID Amount = Numeric(8, 2) class Password(str): """Coerce a string to a bcrypt password. Rationale: for an easy string comparison, so we can say ``some_password == 'hello123'`` .. seealso:: https://pypi.python.org/pypi/bcrypt/ """ def __new__(cls, value, salt=None, crypt=True): if isinstance(value, unicode): value = value.encode('utf-8') if crypt: value = bcrypt.hashpw(value, salt or bcrypt.gensalt(4)) return str.__new__(cls, value) def __eq__(self, other): if not isinstance(other, Password):
def test_decimal_notation(self): numeric_table = Table( 'numeric_table', metadata, Column('id', Integer, Sequence('numeric_id_seq', optional=True), primary_key=True), Column('numericcol', Numeric(precision=38, scale=20, asdecimal=True))) metadata.create_all() test_items = [ decimal.Decimal(d) for d in ( '1500000.00000000000000000000', '-1500000.00000000000000000000', '1500000', '0.0000000000000000002', '0.2', '-0.0000000000000000002', '-2E-2', '156666.458923543', '-156666.458923543', '1', '-1', '-1234', '1234', '2E-12', '4E8', '3E-6', '3E-7', '4.1', '1E-1', '1E-2', '1E-3', '1E-4', '1E-5', '1E-6', '1E-7', '1E-1', '1E-8', '0.2732E2', '-0.2432E2', '4.35656E2', '-02452E-2', '45125E-2', '1234.58965E-2', '1.521E+15', # previously, these were at -1E-25, which were inserted # cleanly howver we only got back 20 digits of accuracy. # pyodbc as of 4.0.22 now disallows the silent truncation. '-1E-20', '1E-20', '1254E-20', '-1203E-20', '0', '-0.00', '-0', '4585E12', '000000000000000000012', '000000000000.32E12', '00000000000000.1E+12', # these are no longer accepted by pyodbc 4.0.22 but it seems # they were not actually round-tripping correctly before that # in any case # '-1E-25', # '1E-25', # '1254E-25', # '-1203E-25', # '000000000000.2E-32', ) ] with testing.db.connect() as conn: for value in test_items: result = conn.execute(numeric_table.insert(), dict(numericcol=value)) primary_key = result.inserted_primary_key returned = conn.scalar( select([numeric_table.c.numericcol ]).where(numeric_table.c.id == primary_key[0])) eq_(value, returned)
Column("parent_id", Integer, ForeignKey("history_dataset_association.id"), nullable=True), Column("designation", TrimmedString(255)), Column("deleted", Boolean, index=True, default=False), Column("visible", Boolean)) Dataset_table = Table("dataset", metadata, Column("id", Integer, primary_key=True), Column("create_time", DateTime, default=now), Column("update_time", DateTime, index=True, default=now, onupdate=now), Column("state", TrimmedString(64)), Column("deleted", Boolean, index=True, default=False), Column("purged", Boolean, index=True, default=False), Column("purgable", Boolean, default=True), Column("external_filename", TEXT), Column("_extra_files_path", TEXT), Column('file_size', Numeric(15, 0))) ImplicitlyConvertedDatasetAssociation_table = Table("implicitly_converted_dataset_association", metadata, Column("id", Integer, primary_key=True), Column("create_time", DateTime, default=now), Column("update_time", DateTime, default=now, onupdate=now), Column("hda_id", Integer, ForeignKey("history_dataset_association.id"), index=True, nullable=True), Column("hda_parent_id", Integer, ForeignKey("history_dataset_association.id"), index=True), Column("deleted", Boolean, index=True, default=False), Column("metadata_safe", Boolean, index=True, default=True), Column("type", TrimmedString(255))) ValidationError_table = Table("validation_error", metadata, Column("id", Integer, primary_key=True), Column("dataset_id", Integer, ForeignKey("history_dataset_association.id"), index=True), Column("message", TrimmedString(255)),
return format_ip(change.old_value, obj.version) return format(change.old_value) return None def generate_attr_change_columns(): return [Column('attrname', String(256), info={'filler': attrname_filler}), Column('newvalue', String(256), info={'filler': newvalue_filler}), Column('oldvalue', String(256), info={'filler': oldvalue_filler})] generate_history_table( Pool, [Column('vlan', Integer, info={'filler': default_filler('vlan')}), Column('layer3domain', String(128), info={'filler': default_filler('layer3domain')}), Column('address', Numeric(precision=40, scale=0)), Column('prefix', Integer)] + generate_attr_change_columns(), [Pool.name, Pool.version, Pool.description, Pool.vlan], indexes=[Index('ix_name', 'name')]) generate_history_table( Ipblock, [Column('status', String(64), info={'filler': default_filler('status')}), Column('pool', String(128), info={'filler': default_filler('pool')}), Column('layer3domain', String(128), info={'filler': default_filler('layer3domain')}), Column('vlan', Integer, info={'filler': default_filler('vlan')})] + generate_attr_change_columns(), [Ipblock.version, Ipblock.address, Ipblock.prefix, Ipblock.priority, Ipblock.gateway, Ipblock.status, Ipblock.pool, Ipblock.vlan], suppress_events=[Ipblock.version, Ipblock.address, Ipblock.prefix],
def test_render_literal_numeric(self): self._literal_round_trip( Numeric(precision=8, scale=4), [15.7563, decimal.Decimal("15.7563")], [decimal.Decimal("15.7563")], )
class MAC_FORGCREPORT(Base): __tablename__ = "MAC_FORGCREPORT" SGNYEAR = Column(String(8, u'utf8_bin'), primary_key=True, nullable=False) SGNBANK = Column(String(20, u'utf8_bin'), primary_key=True, nullable=False) FOREIGNASSETS = Column(Numeric(18, 4)) RESERVEASSETS = Column(Numeric(18, 4)) RESERVEDEPOSIT = Column(Numeric(18, 4)) CASHINVAULT = Column(Numeric(18, 4)) CENTRALBANKBONDS = Column(Numeric(18, 4)) GOVERNMENTCLAIM = Column(Numeric(18, 4)) CENTRALGOVERNMENTCLAIM = Column(Numeric(18, 4)) NONFINANCECLAIM = Column(Numeric(18, 4)) OTHERSPECIFICCLAIM = Column(Numeric(18, 4)) OTHERFINANCECLAIM = Column(Numeric(18, 4)) OTHERASSETS = Column(Numeric(18, 4)) TOTALASSETS = Column(Numeric(18, 4)) NONFINANCELIABILITY = Column(Numeric(18, 4)) DEMANDDEPOSIT = Column(Numeric(18, 4)) CORPORATETIMEDEPOSIT = Column(Numeric(18, 4)) PERSONALSAVINGDEPOSIT = Column(Numeric(18, 4)) OTHERDEPOSITS = Column(Numeric(18, 4)) FOREIGNSAVINGS = Column(Numeric(18, 4)) CENTRALBANKLIABILITY = Column(Numeric(18, 4)) OTHERDEPOSITLIABILITY = Column(Numeric(18, 4)) OTHERFINANCELIABILITY = Column(Numeric(18, 4)) OTHERBROADMONEYLIABILITY = Column(Numeric(18, 4)) FOREIGNLIABILITY = Column(Numeric(18, 4)) BONDS = Column(Numeric(18, 4)) PAIDINCAPITAL = Column(Numeric(18, 4)) OTHERLIABILITIES = Column(Numeric(18, 4)) TOTALLIABILITIES = Column(Numeric(18, 4))
def test_numeric_as_decimal(self): self._do_test( Numeric(precision=8, scale=4), [15.7563, decimal.Decimal("15.7563")], [decimal.Decimal("15.7563")], )
class ConvertOperation(Base): """ dPay Blockchain Example ====================== { "amount": "5.000 BBD", "requestid": 1467592156, "owner": "summon" } """ __tablename__ = 'dpds_op_converts' __table_args__ = ( PrimaryKeyConstraint('block_num', 'transaction_num', 'operation_num'), ForeignKeyConstraint(['owner'], ['dpds_meta_accounts.name'], deferrable=True, initially='DEFERRED', use_alter=True), ) block_num = Column(Integer, nullable=False, index=True) transaction_num = Column(SmallInteger, nullable=False, index=True) operation_num = Column(SmallInteger, nullable=False, index=True) trx_id = Column(String(40), nullable=False) timestamp = Column(DateTime(timezone=False)) owner = Column(String(16)) # dpay_type:account_name_type requestid = Column(Numeric) # dpay_type:uint32_t amount = Column(Numeric(20, 6), nullable=False) # dpay_type:asset amount_symbol = Column(String(5)) # dpay_type:asset operation_type = Column(operation_types_enum, nullable=False, index=True, default='convert') _fields = dict( amount=lambda x: amount_field(x.get('amount'), num_func=float ), # dpay_type:asset amount_symbol=lambda x: amount_symbol_field(x.get('amount') ), # dpay_type:asset ) _account_fields = frozenset([ 'owner', ]) def dump(self): return dissoc(self.__dict__, '_sa_instance_state') def to_dict(self, decode_json=True): data_dict = self.dump() if isinstance(data_dict.get('json_metadata'), str) and decode_json: data_dict['json_metadata'] = dpds.dpds_json.loads( data_dict['json_metadata']) return data_dict def to_json(self): data_dict = self.to_dict() return dpds.dpds_json.dumps(data_dict) def __repr__(self): return "<%s (block_num:%s transaction_num: %s operation_num: %s keys: %s)>" % ( self.__class__.__name__, self.block_num, self.transaction_num, self.operation_num, tuple(self.dump().keys())) def __str__(self): return str(self.dump())
def test_numeric_null_as_decimal(self): self._do_test( Numeric(precision=8, scale=4), [None], [None], )
class FillOrderVirtualOperation(Base): """ dPay Blockchain Example ====================== """ __tablename__ = 'dpds_op_virtual_fill_orders' __table_args__ = ( ForeignKeyConstraint(['current_owner'], ['dpds_meta_accounts.name'], deferrable=True, initially='DEFERRED', use_alter=True), ForeignKeyConstraint(['open_owner'], ['dpds_meta_accounts.name'], deferrable=True, initially='DEFERRED', use_alter=True), ) id = Column(Integer, primary_key=True) block_num = Column(Integer, nullable=False, index=True) transaction_num = Column(SmallInteger, nullable=False, index=True) operation_num = Column(SmallInteger, nullable=False, index=True) trx_id = Column(String(40), nullable=False) timestamp = Column(DateTime(timezone=False)) current_owner = Column(String(16)) # dpay_type:account_name_type current_orderid = Column(Numeric) # dpay_type:uint32_t current_pays = Column(Numeric(20, 6), nullable=False) # dpay_type:asset current_pays_symbol = Column(String(5)) # dpay_type:asset open_owner = Column(String(16)) # dpay_type:account_name_type open_orderid = Column(Numeric) # dpay_type:uint32_t open_pays = Column(Numeric(20, 6), nullable=False) # dpay_type:asset open_pays_symbol = Column(String(5)) # dpay_type:asset operation_type = Column(operation_types_enum, nullable=False, index=True, default='fill_order') _fields = dict( current_pays=lambda x: amount_field(x.get('current_pays'), num_func=float), # dpay_type:asset current_pays_symbol=lambda x: amount_symbol_field(x.get('current_pays') ), # dpay_type:asset open_pays=lambda x: amount_field(x.get('open_pays'), num_func=float ), # dpay_type:asset open_pays_symbol=lambda x: amount_symbol_field(x.get('open_pays') ), # dpay_type:asset ) _account_fields = frozenset([ 'current_owner', 'open_owner', ]) def dump(self): return dissoc(self.__dict__, '_sa_instance_state') def to_dict(self, decode_json=True): data_dict = self.dump() if isinstance(data_dict.get('json_metadata'), str) and decode_json: data_dict['json_metadata'] = dpds.dpds_json.loads( data_dict['json_metadata']) return data_dict def to_json(self): data_dict = self.to_dict() return dpds.dpds_json.dumps(data_dict) def __repr__(self): return "<%s (block_num:%s transaction_num: %s operation_num: %s keys: %s)>" % ( self.__class__.__name__, self.block_num, self.transaction_num, self.operation_num, tuple(self.dump().keys())) def __str__(self): return str(self.dump())
def test_numeric_no_decimal(self): numbers = set([decimal.Decimal("1.000")]) self._do_test(Numeric(precision=5, scale=3), numbers, numbers, check_scale=True)
from sqlalchemy import (create_engine, MetaData, Table, Column, Integer, Numeric, String, ForeignKey, DateTime, ForeignKey, Index, ForeignKeyConstraint, insert) from datetime import datetime metadata = MetaData() cookies = Table('cookies', metadata, Column('id', Integer(), primary_key=True), Column('name', String(50), index=True, unique=True), Column('recipe_url', String(255)), Column('sku', String(55)), Column('quantity', Integer()), Column('unit_cost', Numeric(12, 2))) Index('ix_cookies_sku' 'sku', unique=True) users = Table( 'users', metadata, Column('id', Integer(), primary_key=True), Column('name', String(15), nullable=False, unique=True), Column('email_address', String(255)), Column('phone', String(20), nullable=False), Column('password', String(25), nullable=False), Column('created_on', DateTime(), default=datetime.now), Column('updated_on', DateTime(), default=datetime.now, onupdate=datetime.now)) orders = Table('orders', metadata, Column('id', Integer(), primary_key=True), Column('user_id', Integer(), ForeignKey('users.id'))) line_items = Table('line_items', metadata, Column('id', Integer(), primary_key=True),