class WindMeasurement(Base, BaseTableMixin): __tablename__ = 'measurement_wind' epoch = Column('epoch', INTEGER(11, unsigned=True), nullable=False, primary_key=True) avg_mph = Column('avg_mph', DOUBLE(unsigned=True), nullable=False, default=0.0, server_default=text('0')) max_mph = Column('max_mph', DOUBLE(unsigned=True), nullable=False, default=0.0, server_default=text('0')) idx_epoch_mph = Index('epoch_mph', epoch, avg_mph, max_mph)
class Income(db.Model): __tablename__ = "incomes" # 收入账期 acct_month = db.Column(db.String(6), primary_key=True) # 收入科目 subject = db.Column(SMALLINT(unsigned=True), db.ForeignKey('income_types.id'), primary_key=True) # 收入 income = db.Column(DOUBLE(16, 2), default=0.0) # 税额 tax = db.Column(DOUBLE(16, 2), default=0.0)
class Day_k(Base): __tablename__ = "day_k" uid =Column(Integer, primary_key=True) date = Column(DATE) open_price = Column(DOUBLE()) high_price = Column(DOUBLE()) close_price = Column(DOUBLE()) low_price = Column(DOUBLE()) volume = Column(DOUBLE()) code = Column(String(20)) def __repr__(self): return "<Day_k(code='%s'>" % self.code
class Day_k(Base): __tablename__ = "day_k" index = Column(BIGINT(20), primary_key=True) date = Column(TEXT) open_price = Column(DOUBLE()) high_price = Column(DOUBLE()) close_price = Column(DOUBLE()) low_price = Column(DOUBLE()) volume = Column(DOUBLE()) code = Column(TEXT) def __repr__(self): return "<Day_k(code='%s'>" % self.code
class Featuregeographicalposition(Base): __tablename__ = 'featuregeographicalposition' __table_args__ = (Index( 'FK_mysql_climsoft_db_v4_synopfeatureFeatureGeographicalPosition', 'belongsTo', 'observedOn', unique=True), ) belongsTo = Column(ForeignKey('synopfeature.abbreviation'), primary_key=True, nullable=False) observedOn = Column(String(50), primary_key=True, nullable=False) latitude = Column(DOUBLE(precision=11, scale=6, asdecimal=True)) longitude = Column(DOUBLE(precision=11, scale=6, asdecimal=True)) synopfeature = relationship('Synopfeature')
class Temperature(Base, HelperMixin, DefaultMixin): __tablename__ = 'temperature' # uuid = Column(VARCHAR(40), ForeignKey('resource.uuid', ondelete='CASCADE', onupdate='CASCADE'), nullable=False) temperature = Column(DOUBLE(precision=20, scale=15)) units = Column(VARCHAR(10)) range = Column(VARCHAR(20)) resource_id = Column(Integer, ForeignKey('resource.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False) resource = relationship('Resource', backref="temperature", lazy=False) def __init__(self, temperature=None, units=None, range=None, resource_id=None): self.temperature = temperature self.units = units self.range = range # self.uuid = uuid self.resource_id = resource_id def __repr__(self): return "<Temperature(id='%s',uuid='%s',temperature='%s',units='%s',range='%s',gateway_id='%s',created_at='%s')>" % ( str(self.id), self.resource.uuid, str(self.temperature), str(self.units), str(self.range), str( self.resource.gateway_id), str(self.created_at))
def _getDoubleType(self): """DOUBLE type is database-specific, select one based on dialect. Returns ------- type_object : `object` Database-specific type definition. """ if self._engine.name == 'mysql': from sqlalchemy.dialects.mysql import DOUBLE return DOUBLE(asdecimal=False) elif self._engine.name == 'postgresql': from sqlalchemy.dialects.postgresql import DOUBLE_PRECISION return DOUBLE_PRECISION elif self._engine.name == 'oracle': from sqlalchemy.dialects.oracle import DOUBLE_PRECISION return DOUBLE_PRECISION elif self._engine.name == 'sqlite': # all floats in sqlite are 8-byte from sqlalchemy.dialects.sqlite import REAL return REAL else: raise TypeError( 'cannot determine DOUBLE type, unexpected dialect: ' + self._engine.name)
class SensorRead(MyMixin, Base): mission_drone_sensor_id = Column(Integer, ForeignKey('mission_drone_sensors.id')) mission_drone_sensor = relationship("MissionDroneSensor", back_populates='sensor_readings') event_id = Column(Integer, ForeignKey('events.id')) event = relationship("Event", back_populates='sensor_reading') time = Column(DOUBLE(precision=12, scale=2)) data_type = Column(String(50)) __mapper_args__ = {'polymorphic_on': data_type} mission = relationship( 'Mission', secondary= 'join(MissionDroneSensor, MissionDrone, MissionDroneSensor.mission_drone_id == MissionDrone.id)', primaryjoin= 'SensorRead.mission_drone_sensor_id == MissionDroneSensor.id', secondaryjoin='MissionDrone.mission_id == Mission.id', viewonly=True, uselist=False, ) drone = relationship( 'Drone', secondary= 'join(MissionDroneSensor, MissionDrone, MissionDroneSensor.mission_drone_id == MissionDrone.id)', primaryjoin= 'SensorRead.mission_drone_sensor_id == MissionDroneSensor.id', secondaryjoin='MissionDrone.drone_id == Drone.id', viewonly=True, uselist=False, )
class MonitorNetworkIO(db.Model): __tablename__ = 'monitor_network_io' __table_args__ = {"extend_existing": True} id = db.Column(db.Integer, primary_key=True, autoincrement=True) name = db.Column(db.String(100), nullable=False, comment='网卡名称') receive = db.Column(DOUBLE(10, 4), nullable=False, comment='接受速度(单位KB/s)') transmit = db.Column(DOUBLE(10, 4), nullable=False, comment='发送速度(单位KB/s)') create_time = db.Column(db.DateTime, nullable=False, default=datetime.datetime.now) def as_dict(obj): # return {c.name: getattr(self, c.name) for c in self.__table__.columns} # 上面的有缺陷,表字段和属性不一致会有问题 return dict((col.name, getattr(obj, col.name)) for col in class_mapper(obj.__class__).mapped_table.c)
class Review(DeclarativeBase): """Sqlalchemy review model""" __tablename__ = "review" rid = Column('rid', Integer, primary_key=True) pid = Column('pid', Integer, ForeignKey(Professor.pid)) content = Column('content', TEXT) class_name = Column('class_name', String((20))) rating_overall = Column('rating_overall', DOUBLE(precision=4, scale=2)) rating_difficulty = Column('rating_difficulty', DOUBLE(precision=4, scale=2)) reason_taking = Column('reason_taking', ENUM('R', 'S', 'E')) date_posted = Column('date_posted', DATETIME) grade_received = Column('grade_received', String(10)) class_standing = Column('class_standing', String(20)) mysql_engine = 'InnoDB' mysql_charset = 'utf8mb4' mysql_key_block_size = '1024'
class MonitorCpu(db.Model): __tablename__ = 'monitor_cpu' __table_args__ = {"extend_existing": True} id = db.Column(db.Integer, primary_key=True, autoincrement=True) cpu_time_1 = db.Column(db.BigInteger, nullable=False, comment='总CPU时间1') cpu_time_2 = db.Column(db.BigInteger, nullable=False, comment='总CPU时间2') idle = db.Column(db.String(20), nullable=False, comment='计算空闲时间') rate = db.Column(DOUBLE(10, 4), nullable=False, comment='利用率') create_time = db.Column(db.DateTime, nullable=False, default=datetime.datetime.now)
class Products(Base): __tablename__ = 'products' productCode = Column(String(15), primary_key=True) productName = Column(String(70)) productLine = Column(String(50)) productScale = Column(String(10)) productVendor = Column(String(50)) productDescription = Column(Text()) quantityInStock = Column(SMALLINT(6)) buyPrice = Column(DOUBLE()) MSRP = Column(DOUBLE()) def __repr__(self): return """<Product(productCode: {}, productName: {}, productLine: {}, ProductScale: {}, productVendor: {}, productDescription: {}, quantityInStock: {}, buyPrice: {}, MSRP: {}""".format( self.productCode, self.productName, self.productLine, self.productScale, self.productVendor, self.productDescription, self.quantityInStock, self.buyPrice, self.MSRP)
class Model(Base): __tablename__ = 'model' id = Column(INTEGER, primary_key=True) data_time_from = Column(TIMESTAMP) data_time_to = Column(TIMESTAMP) kpi_index = Column(DOUBLE) max_pdf = Column(DOUBLE) min_pdf = Column(DOUBLE) mean_pdf = Column(DOUBLE) kpi_pdf = Column(DOUBLE(asdecimal=False)) description = Column(NVARCHAR(200)) features = relationship("ModelFeature")
class Station(Base): __tablename__ = 'station' __table_args__ = (Index('StationStationId', 'stationId'), ) stationId = Column(String(255), primary_key=True) stationName = Column(String(255)) wmoid = Column(String(20)) icaoid = Column(String(20)) latitude = Column(DOUBLE(precision=11, scale=6, asdecimal=True)) qualifier = Column(String(20)) longitude = Column(DOUBLE(precision=11, scale=6, asdecimal=True)) elevation = Column(String(255)) geoLocationMethod = Column(String(255)) geoLocationAccuracy = Column(Float(11)) openingDatetime = Column(String(50)) closingDatetime = Column(String(50)) country = Column(String(50)) authority = Column(String(255)) adminRegion = Column(String(255)) drainageBasin = Column(String(255)) wacaSelection = Column(TINYINT, server_default=text("'0'")) cptSelection = Column(TINYINT, server_default=text("'0'")) stationOperational = Column(TINYINT, server_default=text("'0'"))
class Quote(Base): __tablename__ = 'quotes' __table_args__ = {'mysql_engine': 'InnoDB', 'sqlite_autoincrement': True} id = Column(Integer, nullable=False, primary_key=True) body = Column(Text, nullable=False) notes = Column(Text, nullable=True) rating = Column(Integer, nullable=False, default=0) votes = Column(Integer, nullable=False, default=0) submitted = Column(DateTime, nullable=False, default=now) status = Column(Integer, nullable=False, default=0) score = Column(DOUBLE(unsigned=True), nullable=False, default=1) tags = relationship("Tag", secondary=QuoteToTag) submitted_by = relationship("User", secondary=QuoteToUser, uselist=False) voters = relationship("VoteToUser")
class Stationlocationhistory(Base): __tablename__ = 'stationlocationhistory' __table_args__ = (Index('history', 'belongsTo', 'openingDatetime', unique=True), ) belongsTo = Column(ForeignKey('station.stationId'), primary_key=True, nullable=False) stationType = Column(String(255)) geoLocationMethod = Column(String(255)) geoLocationAccuracy = Column(Float(11)) openingDatetime = Column(String(50), primary_key=True, nullable=False) closingDatetime = Column(String(50)) latitude = Column(DOUBLE(precision=11, scale=6, asdecimal=True)) longitude = Column(DOUBLE(precision=11, scale=6, asdecimal=True)) elevation = Column(BigInteger) authority = Column(String(255)) adminRegion = Column(String(255)) drainageBasin = Column(String(255)) station = relationship('Station')
class LstAnalysisEvaluation(getBase()): __tablename__ = Table(LstTableNames.LST_ANALYSIS_EVALUATION, getMetaData(), autoload=True, autoload_with=getEngine()) id_analysis_evaluation = Column('ID_ANALYSIS_EVALUATION', INTEGER, primary_key=True, nullable=False) id_lst_r1_data_check_plot = Column('ID_LST_R1_DATA_CHECK_PLOT', INTEGER, nullable=False) parameter_description = Column('PARAMETER_DESCRIPTION', VARCHAR(30), nullable=False) parameter_value = Column('PARAMETER_VALUE', DOUBLE(), nullable=False)
class WP_Page(Base): '''Schema for MediaWiki page table (for wikipedia dumps).''' __tablename__ = 'page' page_id = Column(INTEGER(unsigned=True), primary_key=True) page_namespace = Column(INTEGER) page_title = Column(VARBINARY(255)) page_restrictions = Column(TINYBLOB) page_counter = Column(BIGINT(unsigned=True)) page_is_redirect = Column(TINYINT(unsigned=True)) page_is_new = Column(TINYINT(unsigned=True)) page_random = Column(DOUBLE(unsigned=True)) page_touched = Column(VARBINARY(14)) page_links_updated = Column(VARBINARY(14)) page_latest = Column(INTEGER(unsigned=True)) page_len = Column(INTEGER(unsigned=True)) page_content_model = Column(VARBINARY(32))
class MonitorMemory(db.Model): __tablename__ = 'monitor_memory' __table_args__ = {"extend_existing": True} id = db.Column(db.Integer, primary_key=True, autoincrement=True) total = db.Column(db.Integer, nullable=False, comment='总内存') free = db.Column(db.Integer, nullable=False, comment='空闲内存') buffers = db.Column(db.Integer, nullable=False, comment='缓冲大小') cached = db.Column(db.Integer, nullable=False, comment='缓冲存储器') swap_cached = db.Column(db.Integer, nullable=False, comment='交换空间缓冲存储器') swap_total = db.Column(db.Integer, nullable=False, comment='交换空间总大小') swap_free = db.Column(db.Integer, nullable=False, comment='交换空间空闲总大小') swap_rate = db.Column(db.String(100), nullable=False, comment='交换空间利用率') rate = db.Column(DOUBLE(10, 4), nullable=False, comment='内存利用率') create_time = db.Column(db.DateTime, nullable=False, default=datetime.datetime.now)
class OrderDetails(Base): __tablename__ = 'orderdetails' orderNumber = Column(Integer, primary_key=True) productCode = Column(String(15), ForeignKey('products.productCode')) quantityOrdered = Column(Integer) priceEach = Column(DOUBLE()) orderLineNumber = Column(SMALLINT(6)) products = relationship("Products") def __repr__(self): return """orderNumber: {}, productCode: {}, quantityOrdered: {}, priceEach: {}, orderLineNumber: {}""".format(self.orderNumber, self.productCode, self.quantityOrdered, self.priceEach, self.orderLineNumber)
def get_number_type(col_type, params, unsigned=False): """Create a number type column. Args: col_type (string): Type of the column. params (object): Additional parameters. unsigned (bool, optional): Defaults to False. Whether or not it is an unsigned int. Returns: sqlalchemy.types.TypeEngine: Number type like integer or float """ # Todo: Unsinged Integers, check if they work in mysql, postgresql etc unsigned = True if col_type == 'big_increments' or col_type == 'increments' else unsigned # Integers if col_type == 'big_increments' or col_type == 'big_integer': return BigInteger()\ .with_variant(BIGINT(unsigned=unsigned), 'mysql')\ .with_variant(Integer(), 'sqlite') elif col_type == 'increments' or col_type == 'integer': return Integer().with_variant(INTEGER(unsigned=unsigned), 'mysql') # Floats elif col_type == 'float': # ! Seems not to work, don't know why??? return Float(params.get('precision'), decimal_return_scale=params.get('scale')) elif col_type == 'decimal': return Numeric(precision=params.get('precision'), scale=params.get('scale')) elif col_type == 'double': return DOUBLE( precision=params.get('precision'), scale=params.get('scale') ).with_variant( Float(params.get('precision'), decimal_return_scale=params.get('scale')), 'sqlite' ) elif col_type == 'medium_integer': return MEDIUMINT().with_variant(INTEGER(unsigned=unsigned), 'sqlite') elif col_type == 'small_integer': return SmallInteger()
class DBCache: fort_ids_within_range = [] unknown_fort_id = None not_a_fort_id = None if config.DB_ENGINE.startswith('mysql'): from sqlalchemy.dialects.mysql import TINYINT, MEDIUMINT, BIGINT, DOUBLE, LONGTEXT TINY_TYPE = TINYINT(unsigned=True) # 0 to 255 MEDIUM_TYPE = MEDIUMINT(unsigned=True) # 0 to 4294967295 UNSIGNED_HUGE_TYPE = BIGINT(unsigned=True) # 0 to 18446744073709551615 HUGE_TYPE = BigInteger PRIMARY_HUGE_TYPE = HUGE_TYPE FLOAT_TYPE = DOUBLE(precision=18, scale=14, asdecimal=False) LONG_TEXT = LONGTEXT elif config.DB_ENGINE.startswith('postgres'): from sqlalchemy.dialects.postgresql import DOUBLE_PRECISION, TEXT class NumInt(TypeDecorator): '''Modify Numeric type for integers''' impl = Numeric def process_bind_param(self, value, dialect): if value is None: return None return int(value) def process_result_value(self, value, dialect): if value is None:
class MySQLEngineSpec(BaseEngineSpec): engine = "mysql" engine_name = "MySQL" max_column_name_length = 64 column_type_mappings: Tuple[ Tuple[ Pattern[str], Union[TypeEngine, Callable[[Match[str]], TypeEngine]], GenericDataType, ], ..., ] = ( (re.compile(r"^int.*", re.IGNORECASE), INTEGER(), GenericDataType.NUMERIC,), (re.compile(r"^tinyint", re.IGNORECASE), TINYINT(), GenericDataType.NUMERIC,), ( re.compile(r"^mediumint", re.IGNORECASE), MEDIUMINT(), GenericDataType.NUMERIC, ), (re.compile(r"^decimal", re.IGNORECASE), DECIMAL(), GenericDataType.NUMERIC,), (re.compile(r"^float", re.IGNORECASE), FLOAT(), GenericDataType.NUMERIC,), (re.compile(r"^double", re.IGNORECASE), DOUBLE(), GenericDataType.NUMERIC,), (re.compile(r"^bit", re.IGNORECASE), BIT(), GenericDataType.NUMERIC,), (re.compile(r"^tinytext", re.IGNORECASE), TINYTEXT(), GenericDataType.STRING,), ( re.compile(r"^mediumtext", re.IGNORECASE), MEDIUMTEXT(), GenericDataType.STRING, ), (re.compile(r"^longtext", re.IGNORECASE), LONGTEXT(), GenericDataType.STRING,), ) _time_grain_expressions = { None: "{col}", "PT1S": "DATE_ADD(DATE({col}), " "INTERVAL (HOUR({col})*60*60 + MINUTE({col})*60" " + SECOND({col})) SECOND)", "PT1M": "DATE_ADD(DATE({col}), " "INTERVAL (HOUR({col})*60 + MINUTE({col})) MINUTE)", "PT1H": "DATE_ADD(DATE({col}), " "INTERVAL HOUR({col}) HOUR)", "P1D": "DATE({col})", "P1W": "DATE(DATE_SUB({col}, " "INTERVAL DAYOFWEEK({col}) - 1 DAY))", "P1M": "DATE(DATE_SUB({col}, " "INTERVAL DAYOFMONTH({col}) - 1 DAY))", "P0.25Y": "MAKEDATE(YEAR({col}), 1) " "+ INTERVAL QUARTER({col}) QUARTER - INTERVAL 1 QUARTER", "P1Y": "DATE(DATE_SUB({col}, " "INTERVAL DAYOFYEAR({col}) - 1 DAY))", "1969-12-29T00:00:00Z/P1W": "DATE(DATE_SUB({col}, " "INTERVAL DAYOFWEEK(DATE_SUB({col}, " "INTERVAL 1 DAY)) - 1 DAY))", } type_code_map: Dict[int, str] = {} # loaded from get_datatype only if needed custom_errors = { CONNECTION_ACCESS_DENIED_REGEX: ( __('Either the username "%(username)s" or the password is incorrect.'), SupersetErrorType.CONNECTION_ACCESS_DENIED_ERROR, ), CONNECTION_INVALID_HOSTNAME_REGEX: ( __('Unknown MySQL server host "%(hostname)s".'), SupersetErrorType.CONNECTION_INVALID_HOSTNAME_ERROR, ), CONNECTION_HOST_DOWN_REGEX: ( __('The host "%(hostname)s" might be down and can\'t be reached.'), SupersetErrorType.CONNECTION_HOST_DOWN_ERROR, ), CONNECTION_UNKNOWN_DATABASE_REGEX: ( __( 'We were unable to connect to your database named "%(database)s". ' "Please verify your database name and try again." ), SupersetErrorType.CONNECTION_UNKNOWN_DATABASE_ERROR, ), } @classmethod def convert_dttm(cls, target_type: str, dttm: datetime) -> Optional[str]: tt = target_type.upper() if tt == utils.TemporalType.DATE: return f"STR_TO_DATE('{dttm.date().isoformat()}', '%Y-%m-%d')" if tt == utils.TemporalType.DATETIME: datetime_formatted = dttm.isoformat(sep=" ", timespec="microseconds") return f"""STR_TO_DATE('{datetime_formatted}', '%Y-%m-%d %H:%i:%s.%f')""" return None @classmethod def adjust_database_uri( cls, uri: URL, selected_schema: Optional[str] = None ) -> None: if selected_schema: uri.database = parse.quote(selected_schema, safe="") @classmethod def get_datatype(cls, type_code: Any) -> Optional[str]: if not cls.type_code_map: # only import and store if needed at least once import MySQLdb ft = MySQLdb.constants.FIELD_TYPE cls.type_code_map = { getattr(ft, k): k for k in dir(ft) if not k.startswith("_") } datatype = type_code if isinstance(type_code, int): datatype = cls.type_code_map.get(type_code) if datatype and isinstance(datatype, str) and datatype: return datatype return None @classmethod def epoch_to_dttm(cls) -> str: return "from_unixtime({col})" @classmethod def _extract_error_message(cls, ex: Exception) -> str: """Extract error message for queries""" message = str(ex) try: if isinstance(ex.args, tuple) and len(ex.args) > 1: message = ex.args[1] except (AttributeError, KeyError): pass return message @classmethod def get_column_spec( # type: ignore cls, native_type: Optional[str], source: utils.ColumnTypeSource = utils.ColumnTypeSource.GET_TABLE, column_type_mappings: Tuple[ Tuple[ Pattern[str], Union[TypeEngine, Callable[[Match[str]], TypeEngine]], GenericDataType, ], ..., ] = column_type_mappings, ) -> Union[ColumnSpec, None]: column_spec = super().get_column_spec(native_type) if column_spec: return column_spec return super().get_column_spec( native_type, column_type_mappings=column_type_mappings )
f_value = str(line[1]) if fname_id and f_value: features_data.append([fname_id, f_value]) fp_table_name = 'z_fp_%s' % str(metrics_id) if not engine.dialect.has_table(engine, fp_table_name): # If table don't exist, Create. # Create z_fp_<metric_id> table fp_table_created = False try: fp_meta = MetaData() fp_metric_table = Table( fp_table_name, fp_meta, Column('id', Integer, primary_key=True), Column('fp_id', Integer, nullable=False, key='fp_id'), Column('feature_id', Integer, nullable=False), Column('value', DOUBLE(), nullable=True), mysql_charset='utf8', mysql_engine='InnoDB') fp_metric_table.create(engine, checkfirst=True) fp_table_created = True tables_created += 1 except: print(traceback.format_exc()) print('error :: create_features_profile :: failed to create table - %s' % fp_table_name) if not fp_table_created: continue # Insert features and values insert_statement = [] for fname_id, f_value in features_data:
class ScannedLocation(db.Model): __tablename__ = 'scannedlocation' cellid = db.Column(BIGINT(unsigned=True), primary_key=True) latitude = db.Column(DOUBLE(asdecimal=False), nullable=False) longitude = db.Column(DOUBLE(asdecimal=False), nullable=False) last_modified = db.Column(db.DateTime) __table_args__ = ( Index('scannedlocation_last_modified', 'last_modified'), Index('scannedlocation_latitude_longitude', 'latitude', 'longitude'), ) @staticmethod def get_recent(swLat, swLng, neLat, neLng, oSwLat=None, oSwLng=None, oNeLat=None, oNeLng=None, timestamp=0, geofences=None, exclude_geofences=None): query = db.session.query(ScannedLocation.cellid, ScannedLocation.latitude, ScannedLocation.longitude, ScannedLocation.last_modified) if timestamp > 0: # If timestamp is known only send last scanned locations. t = datetime.utcfromtimestamp(timestamp / 1000) query = query.filter(ScannedLocation.last_modified > t) else: # Only send locations scanned in last 15 minutes. active_time = datetime.utcnow() - timedelta(minutes=15) query = query.filter(ScannedLocation.last_modified > active_time) if swLat and swLng and neLat and neLng: query = query.filter(ScannedLocation.latitude >= swLat, ScannedLocation.longitude >= swLng, ScannedLocation.latitude <= neLat, ScannedLocation.longitude <= neLng) if oSwLat and oSwLng and oNeLat and oNeLng: # Exclude scanned locations within old boundaries. query = query.filter( ~and_(ScannedLocation.latitude >= oSwLat, ScannedLocation. longitude >= oSwLng, ScannedLocation.latitude <= oNeLat, ScannedLocation.longitude <= oNeLng)) if geofences: sql = geofences_to_query(geofences, 'scannedlocation') query = query.filter(text(sql)) if exclude_geofences: sql = geofences_to_query(exclude_geofences, 'scannedlocation') query = query.filter(~text(sql)) result = query.all() return [loc._asdict() for loc in result]
class TrsSpawn(db.Model): spawnpoint = db.Column(BIGINT(unsigned=True), primary_key=True) latitude = db.Column(DOUBLE(asdecimal=False), nullable=False) longitude = db.Column(DOUBLE(asdecimal=False), nullable=False) spawndef = db.Column(db.Integer, default=240, nullable=False) earliest_unseen = db.Column(db.Integer, nullable=False) last_scanned = db.Column(db.DateTime) first_detection = db.Column(db.DateTime, default=datetime.utcnow(), nullable=False) last_non_scanned = db.Column(db.DateTime) calc_endminsec = db.Column( db.String(length=5, collation='utf8mb4_unicode_ci')) eventid = db.Column(db.Integer, default=1, nullable=False) __table_args__ = (Index('event_lat_long', 'eventid', 'latitude', 'longitude'), ) @staticmethod def get_spawnpoints(swLat, swLng, neLat, neLng, oSwLat=None, oSwLng=None, oNeLat=None, oNeLng=None, timestamp=0, geofences=None, exclude_geofences=None): query = db.session.query(TrsSpawn.latitude, TrsSpawn.longitude, TrsSpawn.spawnpoint.label('spawnpoint_id'), TrsSpawn.spawndef, TrsSpawn.first_detection, TrsSpawn.last_non_scanned, TrsSpawn.last_scanned, TrsSpawn.calc_endminsec.label('end_time')) if timestamp > 0: # If timestamp is known only send last scanned spawn points. t = datetime.fromtimestamp(timestamp / 1000) query = query.filter((TrsSpawn.last_scanned > t) | (TrsSpawn.last_non_scanned > t)) if swLat and swLng and neLat and neLng: query = query.filter(TrsSpawn.latitude >= swLat, TrsSpawn.longitude >= swLng, TrsSpawn.latitude <= neLat, TrsSpawn.longitude <= neLng) if oSwLat and oSwLng and oNeLat and oNeLng: # Exclude spawn points within old boundaries. query = query.filter(~and_( TrsSpawn.latitude >= oSwLat, TrsSpawn.longitude >= oSwLng, TrsSpawn.latitude <= oNeLat, TrsSpawn.longitude <= oNeLng)) if geofences: sql = geofences_to_query(geofences, 'trs_spawn') query = query.filter(text(sql)) if exclude_geofences: sql = geofences_to_query(exclude_geofences, 'trs_spawn') query = query.filter(~text(sql)) result = query.all() spawnpoints = [] ts = time.time() utc_offset = datetime.fromtimestamp(ts) - datetime.utcfromtimestamp(ts) for sp in result: sp = sp._asdict() if sp['last_non_scanned'] is not None: sp['last_non_scanned'] = sp['last_non_scanned'] - utc_offset if sp['end_time'] is not None: if sp['last_scanned'] is not None: sp['last_scanned'] = sp['last_scanned'] - utc_offset end_time_split = sp['end_time'].split(':') end_time_seconds = int(end_time_split[1]) end_time_minutes = int(end_time_split[0]) despawn_time = datetime.today().replace( minute=end_time_minutes, second=end_time_seconds, microsecond=0) if despawn_time <= datetime.today(): despawn_time += timedelta(hours=1) sp['despawn_time'] = despawn_time - utc_offset if sp['spawndef'] == 15: sp['spawn_time'] = sp['despawn_time'] - timedelta(hours=1) else: sp['spawn_time'] = (sp['despawn_time'] - timedelta(minutes=30)) del sp['end_time'] spawnpoints.append(sp) return spawnpoints
class Weather(db.Model): s2_cell_id = db.Column(db.String(length=50, collation='utf8mb4_unicode_ci'), primary_key=True) latitude = db.Column(DOUBLE(asdecimal=False), nullable=False) longitude = db.Column(DOUBLE(asdecimal=False), nullable=False) cloud_level = db.Column(db.SmallInteger) rain_level = db.Column(db.SmallInteger) wind_level = db.Column(db.SmallInteger) snow_level = db.Column(db.SmallInteger) fog_level = db.Column(db.SmallInteger) wind_direction = db.Column(db.SmallInteger) gameplay_weather = db.Column(db.SmallInteger) severity = db.Column(db.SmallInteger) warn_weather = db.Column(db.SmallInteger) world_time = db.Column(db.SmallInteger) last_updated = db.Column(db.DateTime) __table_args__ = (Index('weather_last_updated', 'last_updated'), ) @staticmethod def get_weather(swLat, swLng, neLat, neLng, oSwLat=None, oSwLng=None, oNeLat=None, oNeLng=None, timestamp=0, geofences=None, exclude_geofences=None): # We can filter by the center of a cell, # this deltas can expand the viewport bounds # So cells with center outside the viewport, # but close to it can be rendered # otherwise edges of cells that intersects # with viewport won't be rendered lat_delta = 0.15 lng_delta = 0.4 query = db.session.query(Weather.s2_cell_id, Weather.latitude, Weather.longitude, Weather.gameplay_weather, Weather.severity, Weather.world_time, Weather.last_updated) if timestamp > 0: # If timestamp is known only send last scanned weather. t = datetime.utcfromtimestamp(timestamp / 1000) query = query.filter(Weather.last_updated > t) if swLat and swLng and neLat and neLng: query = query.filter(Weather.latitude >= float(swLat) - lat_delta, Weather.longitude >= float(swLng) - lng_delta, Weather.latitude <= float(neLat) + lat_delta, Weather.longitude <= float(neLng) + lng_delta) if oSwLat and oSwLng and oNeLat and oNeLng: # Exclude weather within old boundaries. query = query.filter( ~and_(Weather.latitude >= float(oSwLat) - lat_delta, Weather.longitude >= float(oSwLng) - lng_delta, Weather.latitude <= float(oNeLat) + lat_delta, Weather.longitude <= float(oNeLng) + lng_delta)) if geofences: sql = geofences_to_query(geofences, 'weather') query = query.filter(text(sql)) if exclude_geofences: sql = geofences_to_query(exclude_geofences, 'weather') query = query.filter(~text(sql)) result = query.all() return [w._asdict() for w in result]
class Pokestop(db.Model): pokestop_id = db.Column(db.String(length=50, collation='utf8mb4_unicode_ci'), primary_key=True) enabled = db.Column(TINYINT, default=1, nullable=False) latitude = db.Column(DOUBLE(asdecimal=False), nullable=False) longitude = db.Column(DOUBLE(asdecimal=False), nullable=False) last_modified = db.Column(db.DateTime, default=datetime.utcnow()) lure_expiration = db.Column(db.DateTime) active_fort_modifier = db.Column(db.SmallInteger) last_updated = db.Column(db.DateTime) name = db.Column(db.String(length=250, collation='utf8mb4_unicode_ci')) image = db.Column(db.String(length=255, collation='utf8mb4_unicode_ci')) incident_start = db.Column(db.DateTime) incident_expiration = db.Column(db.DateTime) incident_grunt_type = db.Column(db.SmallInteger) __table_args__ = ( Index('pokestop_last_modified', 'last_modified'), Index('pokestop_lure_expiration', 'lure_expiration'), Index('pokestop_active_fort_modifier', 'active_fort_modifier'), Index('pokestop_last_updated', 'last_updated'), Index('pokestop_latitude_longitude', 'latitude', 'longitude'), ) @staticmethod def get_pokestops(swLat, swLng, neLat, neLng, oSwLat=None, oSwLng=None, oNeLat=None, oNeLng=None, timestamp=0, eventless_stops=True, quests=True, invasions=True, lures=True, geofences=None, exclude_geofences=None): columns = [ 'pokestop_id', 'name', 'image', 'latitude', 'longitude', 'last_updated', 'incident_grunt_type', 'incident_expiration', 'active_fort_modifier', 'lure_expiration' ] if quests: quest_columns = [ 'GUID', 'quest_timestamp', 'quest_task', 'quest_type', 'quest_stardust', 'quest_pokemon_id', 'quest_pokemon_form_id', 'quest_pokemon_costume_id', 'quest_reward_type', 'quest_item_id', 'quest_item_amount' ] hours = int(args.quest_reset_time.split(':')[0]) minutes = int(args.quest_reset_time.split(':')[1]) reset_time = datetime.today().replace(hour=hours, minute=minutes, second=0, microsecond=0) reset_timestamp = datetime.timestamp(reset_time) query = (db.session.query(Pokestop, TrsQuest).outerjoin( TrsQuest, and_(Pokestop.pokestop_id == TrsQuest.GUID, TrsQuest.quest_timestamp >= reset_timestamp)).options( Load(Pokestop).load_only(*columns), Load(TrsQuest).load_only(*quest_columns))) else: query = Pokestop.query.options(load_only(*columns)) if not eventless_stops: conds = [] if quests: conds.append(TrsQuest.GUID.isnot(None)) if invasions: conds.append(Pokestop.incident_expiration > datetime.utcnow()) if lures: conds.append(Pokestop.lure_expiration > datetime.utcnow()) query = query.filter(or_(*conds)) if timestamp > 0: # If timestamp is known only send last scanned PokéStops. t = datetime.utcfromtimestamp(timestamp / 1000) query = query.filter(Pokestop.last_updated > t) if swLat and swLng and neLat and neLng: query = query.filter(Pokestop.latitude >= swLat, Pokestop.longitude >= swLng, Pokestop.latitude <= neLat, Pokestop.longitude <= neLng) if oSwLat and oSwLng and oNeLat and oNeLng: # Exclude PokéStops within old boundaries. query = query.filter(~and_( Pokestop.latitude >= oSwLat, Pokestop.longitude >= oSwLng, Pokestop.latitude <= oNeLat, Pokestop.longitude <= oNeLng)) if geofences: sql = geofences_to_query(geofences, 'pokestop') query = query.filter(text(sql)) if exclude_geofences: sql = geofences_to_query(exclude_geofences, 'pokestop') query = query.filter(~text(sql)) result = query.all() now = datetime.utcnow() pokestops = [] for r in result: pokestop_orm = r[0] if quests else r quest_orm = r[1] if quests else None pokestop = orm_to_dict(pokestop_orm) if quest_orm is not None: pokestop['quest'] = { 'scanned_at': quest_orm.quest_timestamp * 1000, 'task': quest_orm.quest_task, 'reward_type': quest_orm.quest_reward_type, 'item_id': quest_orm.quest_item_id, 'item_amount': quest_orm.quest_item_amount, 'pokemon_id': quest_orm.quest_pokemon_id, 'form_id': quest_orm.quest_pokemon_form_id, 'costume_id': quest_orm.quest_pokemon_costume_id, 'stardust': quest_orm.quest_stardust } else: pokestop['quest'] = None if (pokestop['incident_expiration'] is not None and (pokestop['incident_expiration'] < now or not invasions)): pokestop['incident_grunt_type'] = None pokestop['incident_expiration'] = None if (pokestop['lure_expiration'] is not None and (pokestop['lure_expiration'] < now or not lures)): pokestop['active_fort_modifier'] = None pokestop['lure_expiration'] = None pokestops.append(pokestop) return pokestops
class Gym(db.Model): gym_id = db.Column(db.String(length=50, collation='utf8mb4_unicode_ci'), primary_key=True) team_id = db.Column(db.SmallInteger, default=0, nullable=False) guard_pokemon_id = db.Column(db.SmallInteger, default=0, nullable=False) slots_available = db.Column(db.SmallInteger, default=6, nullable=False) enabled = db.Column(TINYINT, default=1, nullable=False) latitude = db.Column(DOUBLE(asdecimal=False), nullable=False) longitude = db.Column(DOUBLE(asdecimal=False), nullable=False) total_cp = db.Column(db.SmallInteger, default=0, nullable=False) is_in_battle = db.Column(TINYINT, default=0, nullable=False) gender = db.Column(db.SmallInteger) form = db.Column(db.SmallInteger) costume = db.Column(db.SmallInteger) weather_boosted_condition = db.Column(db.SmallInteger) shiny = db.Column(TINYINT) last_modified = db.Column(db.DateTime, default=datetime.utcnow(), nullable=False) last_scanned = db.Column(db.DateTime, default=datetime.utcnow(), nullable=False) is_ex_raid_eligible = db.Column(TINYINT, default=0, nullable=False) gym_details = db.relationship('GymDetails', uselist=False, backref='gym', lazy='joined', cascade='delete') __table_args__ = ( Index('gym_last_modified', 'last_modified'), Index('gym_last_scanned', 'last_scanned'), Index('gym_latitude_longitude', 'latitude', 'longitude'), ) @staticmethod def get_gyms(swLat, swLng, neLat, neLng, oSwLat=None, oSwLng=None, oNeLat=None, oNeLng=None, timestamp=0, raids=True, geofences=None, exclude_geofences=None): if raids: query = (db.session.query(Gym, Raid).outerjoin( Raid, and_(Gym.gym_id == Raid.gym_id, Raid.end > datetime.utcnow()))) else: query = Gym.query if timestamp > 0: # If timestamp is known only send last scanned Gyms. t = datetime.utcfromtimestamp(timestamp / 1000) query = query.filter(Gym.last_scanned > t) if swLat and swLng and neLat and neLng: query = query.filter(Gym.latitude >= swLat, Gym.longitude >= swLng, Gym.latitude <= neLat, Gym.longitude <= neLng) if oSwLat and oSwLng and oNeLat and oNeLng: # Exclude Gyms within old boundaries. query = query.filter( ~and_(Gym.latitude >= oSwLat, Gym.longitude >= oSwLng, Gym.latitude <= oNeLat, Gym.longitude <= oNeLng)) if geofences: sql = geofences_to_query(geofences, 'gym') query = query.filter(text(sql)) if exclude_geofences: sql = geofences_to_query(exclude_geofences, 'gym') query = query.filter(~text(sql)) result = query.all() gyms = [] for r in result: gym = r[0] if raids else r raid = r[1] if raids else None gym_dict = orm_to_dict(gym) del gym_dict['gym_details'] if gym.gym_details: gym_dict['name'] = gym.gym_details.name gym_dict['url'] = gym.gym_details.url else: gym_dict['name'] = None gym_dict['url'] = None if raid is not None: gym_dict['raid'] = orm_to_dict(raid) else: gym_dict['raid'] = None gyms.append(gym_dict) return gyms
class Pokemon(db.Model): encounter_id = db.Column(BIGINT(unsigned=True), primary_key=True) spawnpoint_id = db.Column(BIGINT(unsigned=True), nullable=False) pokemon_id = db.Column(db.SmallInteger, nullable=False) latitude = db.Column(DOUBLE(asdecimal=False), nullable=False) longitude = db.Column(DOUBLE(asdecimal=False), nullable=False) disappear_time = db.Column(db.DateTime, nullable=False) individual_attack = db.Column(db.SmallInteger) individual_defense = db.Column(db.SmallInteger) individual_stamina = db.Column(db.SmallInteger) move_1 = db.Column(db.SmallInteger) move_2 = db.Column(db.SmallInteger) cp = db.Column(db.SmallInteger) cp_multiplier = db.Column(db.Float) weight = db.Column(db.Float) height = db.Column(db.Float) gender = db.Column(db.SmallInteger) form = db.Column(db.SmallInteger) costume = db.Column(db.SmallInteger) catch_prob_1 = db.Column(DOUBLE(asdecimal=False)) catch_prob_2 = db.Column(DOUBLE(asdecimal=False)) catch_prob_3 = db.Column(DOUBLE(asdecimal=False)) rating_attack = db.Column( db.String(length=2, collation='utf8mb4_unicode_ci')) rating_defense = db.Column( db.String(length=2, collation='utf8mb4_unicode_ci')) weather_boosted_condition = db.Column(db.SmallInteger) last_modified = db.Column(db.DateTime) __table_args__ = ( Index('pokemon_spawnpoint_id', 'spawnpoint_id'), Index('pokemon_pokemon_id', 'pokemon_id'), Index('pokemon_last_modified', 'last_modified'), Index('pokemon_latitude_longitude', 'latitude', 'longitude'), Index('pokemon_disappear_time_pokemon_id', 'disappear_time', 'pokemon_id'), ) @staticmethod def get_active(swLat, swLng, neLat, neLng, oSwLat=None, oSwLng=None, oNeLat=None, oNeLng=None, timestamp=0, eids=None, ids=None, geofences=None, exclude_geofences=None, verified_despawn_time=False): columns = [ Pokemon.encounter_id, Pokemon.pokemon_id, Pokemon.latitude, Pokemon.longitude, Pokemon.disappear_time, Pokemon.individual_attack, Pokemon.individual_defense, Pokemon.individual_stamina, Pokemon.move_1, Pokemon.move_2, Pokemon.cp, Pokemon.cp_multiplier, Pokemon.weight, Pokemon.height, Pokemon.gender, Pokemon.form, Pokemon.costume, Pokemon.catch_prob_1, Pokemon.catch_prob_2, Pokemon.catch_prob_3, Pokemon.weather_boosted_condition, Pokemon.last_modified ] if verified_despawn_time: columns.append( TrsSpawn.calc_endminsec.label('verified_disappear_time')) query = (db.session.query(*columns).outerjoin( TrsSpawn, Pokemon.spawnpoint_id == TrsSpawn.spawnpoint)) else: query = db.session.query(*columns) query = query.filter(Pokemon.disappear_time > datetime.utcnow()) if timestamp > 0: # If timestamp is known only load modified Pokémon. t = datetime.utcfromtimestamp(timestamp / 1000) query = query.filter(Pokemon.last_modified > t) if swLat and swLng and neLat and neLng: query = query.filter(Pokemon.latitude >= swLat, Pokemon.longitude >= swLng, Pokemon.latitude <= neLat, Pokemon.longitude <= neLng) if oSwLat and oSwLng and oNeLat and oNeLng: # Exclude Pokémon within old boundaries. query = query.filter( ~and_(Pokemon.latitude >= oSwLat, Pokemon.longitude >= oSwLng, Pokemon.latitude <= oNeLat, Pokemon.longitude <= oNeLng)) if geofences: sql = geofences_to_query(geofences, 'pokemon') query = query.filter(text(sql)) if exclude_geofences: sql = geofences_to_query(exclude_geofences, 'pokemon') query = query.filter(~text(sql)) if eids: query = query.filter(Pokemon.pokemon_id.notin_(eids)) elif ids: query = query.filter(Pokemon.pokemon_id.in_(ids)) return [pokemon._asdict() for pokemon in query.all()] # Get all Pokémon spawn counts based on the last x hours. # More efficient than get_seen(): we don't do any unnecessary mojo. # Returns a dict: # { 'pokemon': [ {'pokemon_id': '', 'count': 1} ], 'total': 1 }. @staticmethod def get_spawn_counts(hours=0): query = (db.session.query( Pokemon.pokemon_id, func.count(Pokemon.pokemon_id).label('count')).group_by( Pokemon.pokemon_id)) # Allow 0 to query everything. if hours > 0: hours = datetime.utcnow() - timedelta(hours=hours) query = query.filter(Pokemon.disappear_time > hours) result = query.all() counts = [] total = 0 for c in result: counts.append(c._asdict()) total += c[1] return {'pokemon': counts, 'total': total} @staticmethod def get_seen(timediff=0, geofences=None, exclude_geofences=None): query = (db.session.query( Pokemon.pokemon_id, Pokemon.form, func.count(Pokemon.pokemon_id).label('count'), func.max(Pokemon.disappear_time).label('disappear_time')).group_by( Pokemon.pokemon_id, Pokemon.form)) if timediff > 0: timediff = datetime.utcnow() - timedelta(hours=timediff) query = query.filter(Pokemon.disappear_time > timediff) if geofences: sql = geofences_to_query(geofences, 'pokemon') query = query.filter(text(sql)) if exclude_geofences: sql = geofences_to_query(exclude_geofences, 'pokemon') query = query.filter(~text(sql)) result = query.all() pokemon = [] total = 0 for p in result: pokemon.append(p._asdict()) total += p[2] return {'pokemon': pokemon, 'total': total} @staticmethod def get_appearances(pokemon_id, form_id=None, timediff=0, geofences=None, exclude_geofences=None): ''' :param pokemon_id: id of Pokémon that we need appearances for :param form_id: id of form that we need appearances for :param timediff: limiting period of the selection :return: list of Pokémon appearances over a selected period ''' query = (db.session.query( Pokemon.latitude, Pokemon.longitude, Pokemon.pokemon_id, Pokemon.form, Pokemon.spawnpoint_id, func.count(Pokemon.pokemon_id).label('count')).filter( Pokemon.pokemon_id == pokemon_id).group_by( Pokemon.latitude, Pokemon.longitude, Pokemon.pokemon_id, Pokemon.form, Pokemon.spawnpoint_id)) if form_id is not None: query = query.filter(Pokemon.form == form_id) if timediff > 0: timediff = datetime.utcnow() - timedelta(hours=timediff) query = query.filter(Pokemon.disappear_time > timediff) if geofences: sql = geofences_to_query(geofences, 'pokemon') query = query.filter(text(sql)) if exclude_geofences: sql = geofences_to_query(exclude_geofences, 'pokemon') query = query.filter(~text(sql)) result = query.all() return [a._asdict() for a in result] @staticmethod def get_appearances_times_by_spawnpoint(pokemon_id, spawnpoint_id, form_id=None, timediff=0, geofences=None, exclude_geofences=None): ''' :param pokemon_id: id of Pokemon that we need appearances times for. :param spawnpoint_id: spawnpoint id we need appearances times for. :param timediff: limiting period of the selection. :return: list of time appearances over a selected period. ''' query = (db.session.query(Pokemon.disappear_time).filter( Pokemon.pokemon_id == pokemon_id, Pokemon.spawnpoint_id == spawnpoint_id).order_by( Pokemon.disappear_time)) if form_id is not None: query = query.filter_by(form=form_id) if timediff: timediff = datetime.utcnow() - timedelta(hours=timediff) query = query.filter(Pokemon.disappear_time > timediff) if geofences: sql = geofences_to_query(geofences, 'pokemon') query = query.filter(text(sql)) if exclude_geofences: sql = geofences_to_query(exclude_geofences, 'pokemon') query = query.filter(~text(sql)) result = query.all() return [a[0] for a in result]