def get_number_type(col_type, params, unsigned=False):
    """Create a number type column.

    Args:
        col_type (string): Type of the column.
        params (object): Additional parameters.
        unsigned (bool, optional): Defaults to False. Whether or not it is an unsigned int.

    Returns:
        sqlalchemy.types.TypeEngine: Number type like integer or float
    """
    # Todo: Unsinged Integers, check if they work in mysql, postgresql etc
    unsigned = True if col_type == 'big_increments' or col_type == 'increments' else unsigned

    # Integers
    if col_type == 'big_increments' or col_type == 'big_integer':
        return BigInteger()\
            .with_variant(BIGINT(unsigned=unsigned), 'mysql')\
            .with_variant(Integer(), 'sqlite')
    elif col_type == 'increments' or col_type == 'integer':
        return Integer().with_variant(INTEGER(unsigned=unsigned), 'mysql')
    # Floats
    elif col_type == 'float':
        # ! Seems not to work, don't know why???
        return Float(params.get('precision'), decimal_return_scale=params.get('scale'))
    elif col_type == 'decimal':
        return Numeric(precision=params.get('precision'), scale=params.get('scale'))
    elif col_type == 'double':
        return DOUBLE(
            precision=params.get('precision'),
            scale=params.get('scale')
        ).with_variant(
            Float(params.get('precision'), decimal_return_scale=params.get('scale')), 'sqlite'
        )
    elif col_type == 'medium_integer':
        return MEDIUMINT().with_variant(INTEGER(unsigned=unsigned), 'sqlite')
    elif col_type == 'small_integer':
        return SmallInteger()
Example #2
0
    def _getDoubleType(self):
        """DOUBLE type is database-specific, select one based on dialect.

        Returns
        -------
        type_object : `object`
            Database-specific type definition.
        """
        if self._engine.name == 'mysql':
            from sqlalchemy.dialects.mysql import DOUBLE
            return DOUBLE(asdecimal=False)
        elif self._engine.name == 'postgresql':
            from sqlalchemy.dialects.postgresql import DOUBLE_PRECISION
            return DOUBLE_PRECISION
        elif self._engine.name == 'oracle':
            from sqlalchemy.dialects.oracle import DOUBLE_PRECISION
            return DOUBLE_PRECISION
        elif self._engine.name == 'sqlite':
            # all floats in sqlite are 8-byte
            from sqlalchemy.dialects.sqlite import REAL
            return REAL
        else:
            raise TypeError('cannot determine DOUBLE type, unexpected dialect: ' + self._engine.name)
Example #3
0
class Gym(db.Model):
    gym_id = db.Column(db.String(length=50, collation='utf8mb4_unicode_ci'),
                       primary_key=True)
    team_id = db.Column(db.SmallInteger, default=0, nullable=False)
    guard_pokemon_id = db.Column(db.SmallInteger, default=0, nullable=False)
    slots_available = db.Column(db.SmallInteger, default=6, nullable=False)
    enabled = db.Column(TINYINT, default=1, nullable=False)
    latitude = db.Column(DOUBLE(asdecimal=False), nullable=False)
    longitude = db.Column(DOUBLE(asdecimal=False), nullable=False)
    total_cp = db.Column(db.SmallInteger, default=0, nullable=False)
    is_in_battle = db.Column(TINYINT, default=0, nullable=False)
    gender = db.Column(db.SmallInteger)
    form = db.Column(db.SmallInteger)
    costume = db.Column(db.SmallInteger)
    weather_boosted_condition = db.Column(db.SmallInteger)
    shiny = db.Column(TINYINT)
    last_modified = db.Column(db.DateTime,
                              default=datetime.utcnow(),
                              nullable=False)
    last_scanned = db.Column(db.DateTime,
                             default=datetime.utcnow(),
                             nullable=False)
    is_ex_raid_eligible = db.Column(TINYINT, default=0, nullable=False)

    gym_details = db.relationship('GymDetails',
                                  uselist=False,
                                  backref='gym',
                                  lazy='joined',
                                  cascade='delete')

    __table_args__ = (
        Index('gym_last_modified', 'last_modified'),
        Index('gym_last_scanned', 'last_scanned'),
        Index('gym_latitude_longitude', 'latitude', 'longitude'),
    )

    @staticmethod
    def get_gyms(swLat,
                 swLng,
                 neLat,
                 neLng,
                 oSwLat=None,
                 oSwLng=None,
                 oNeLat=None,
                 oNeLng=None,
                 timestamp=0,
                 raids=True,
                 geofences=None,
                 exclude_geofences=None):
        if raids:
            query = (db.session.query(Gym, Raid).outerjoin(
                Raid,
                and_(Gym.gym_id == Raid.gym_id, Raid.end > datetime.utcnow())))
        else:
            query = Gym.query

        if timestamp > 0:
            # If timestamp is known only send last scanned Gyms.
            t = datetime.utcfromtimestamp(timestamp / 1000)
            query = query.filter(Gym.last_scanned > t)

        if swLat and swLng and neLat and neLng:
            query = query.filter(Gym.latitude >= swLat, Gym.longitude >= swLng,
                                 Gym.latitude <= neLat, Gym.longitude <= neLng)

        if oSwLat and oSwLng and oNeLat and oNeLng:
            # Exclude Gyms within old boundaries.
            query = query.filter(
                ~and_(Gym.latitude >= oSwLat, Gym.longitude >= oSwLng,
                      Gym.latitude <= oNeLat, Gym.longitude <= oNeLng))

        if geofences:
            sql = geofences_to_query(geofences, 'gym')
            query = query.filter(text(sql))

        if exclude_geofences:
            sql = geofences_to_query(exclude_geofences, 'gym')
            query = query.filter(~text(sql))

        result = query.all()

        gyms = []
        for r in result:
            gym = r[0] if raids else r
            raid = r[1] if raids else None
            gym_dict = orm_to_dict(gym)
            del gym_dict['gym_details']
            if gym.gym_details:
                gym_dict['name'] = gym.gym_details.name
                gym_dict['url'] = gym.gym_details.url
            else:
                gym_dict['name'] = None
                gym_dict['url'] = None
            if raid is not None:
                gym_dict['raid'] = orm_to_dict(raid)
            else:
                gym_dict['raid'] = None
            gyms.append(gym_dict)

        return gyms
Example #4
0
class Pokemon(db.Model):
    encounter_id = db.Column(BIGINT(unsigned=True), primary_key=True)
    spawnpoint_id = db.Column(BIGINT(unsigned=True), nullable=False)
    pokemon_id = db.Column(db.SmallInteger, nullable=False)
    latitude = db.Column(DOUBLE(asdecimal=False), nullable=False)
    longitude = db.Column(DOUBLE(asdecimal=False), nullable=False)
    disappear_time = db.Column(db.DateTime, nullable=False)
    individual_attack = db.Column(db.SmallInteger)
    individual_defense = db.Column(db.SmallInteger)
    individual_stamina = db.Column(db.SmallInteger)
    move_1 = db.Column(db.SmallInteger)
    move_2 = db.Column(db.SmallInteger)
    cp = db.Column(db.SmallInteger)
    cp_multiplier = db.Column(db.Float)
    weight = db.Column(db.Float)
    height = db.Column(db.Float)
    gender = db.Column(db.SmallInteger)
    form = db.Column(db.SmallInteger)
    costume = db.Column(db.SmallInteger)
    catch_prob_1 = db.Column(DOUBLE(asdecimal=False))
    catch_prob_2 = db.Column(DOUBLE(asdecimal=False))
    catch_prob_3 = db.Column(DOUBLE(asdecimal=False))
    rating_attack = db.Column(
        db.String(length=2, collation='utf8mb4_unicode_ci'))
    rating_defense = db.Column(
        db.String(length=2, collation='utf8mb4_unicode_ci'))
    weather_boosted_condition = db.Column(db.SmallInteger)
    last_modified = db.Column(db.DateTime)

    __table_args__ = (
        Index('pokemon_spawnpoint_id', 'spawnpoint_id'),
        Index('pokemon_pokemon_id', 'pokemon_id'),
        Index('pokemon_last_modified', 'last_modified'),
        Index('pokemon_latitude_longitude', 'latitude', 'longitude'),
        Index('pokemon_disappear_time_pokemon_id', 'disappear_time',
              'pokemon_id'),
    )

    @staticmethod
    def get_active(swLat,
                   swLng,
                   neLat,
                   neLng,
                   oSwLat=None,
                   oSwLng=None,
                   oNeLat=None,
                   oNeLng=None,
                   timestamp=0,
                   eids=None,
                   ids=None,
                   geofences=None,
                   exclude_geofences=None,
                   verified_despawn_time=False):
        columns = [
            Pokemon.encounter_id, Pokemon.pokemon_id, Pokemon.latitude,
            Pokemon.longitude, Pokemon.disappear_time,
            Pokemon.individual_attack, Pokemon.individual_defense,
            Pokemon.individual_stamina, Pokemon.move_1, Pokemon.move_2,
            Pokemon.cp, Pokemon.cp_multiplier, Pokemon.weight, Pokemon.height,
            Pokemon.gender, Pokemon.form, Pokemon.costume,
            Pokemon.catch_prob_1, Pokemon.catch_prob_2, Pokemon.catch_prob_3,
            Pokemon.weather_boosted_condition, Pokemon.last_modified
        ]

        if verified_despawn_time:
            columns.append(
                TrsSpawn.calc_endminsec.label('verified_disappear_time'))
            query = (db.session.query(*columns).outerjoin(
                TrsSpawn, Pokemon.spawnpoint_id == TrsSpawn.spawnpoint))
        else:
            query = db.session.query(*columns)

        query = query.filter(Pokemon.disappear_time > datetime.utcnow())

        if timestamp > 0:
            # If timestamp is known only load modified Pokémon.
            t = datetime.utcfromtimestamp(timestamp / 1000)
            query = query.filter(Pokemon.last_modified > t)

        if swLat and swLng and neLat and neLng:
            query = query.filter(Pokemon.latitude >= swLat,
                                 Pokemon.longitude >= swLng,
                                 Pokemon.latitude <= neLat,
                                 Pokemon.longitude <= neLng)

        if oSwLat and oSwLng and oNeLat and oNeLng:
            # Exclude Pokémon within old boundaries.
            query = query.filter(
                ~and_(Pokemon.latitude >= oSwLat, Pokemon.longitude >= oSwLng,
                      Pokemon.latitude <= oNeLat, Pokemon.longitude <= oNeLng))

        if geofences:
            sql = geofences_to_query(geofences, 'pokemon')
            query = query.filter(text(sql))

        if exclude_geofences:
            sql = geofences_to_query(exclude_geofences, 'pokemon')
            query = query.filter(~text(sql))

        if eids:
            query = query.filter(Pokemon.pokemon_id.notin_(eids))
        elif ids:
            query = query.filter(Pokemon.pokemon_id.in_(ids))

        return [pokemon._asdict() for pokemon in query.all()]

    # Get all Pokémon spawn counts based on the last x hours.
    # More efficient than get_seen(): we don't do any unnecessary mojo.
    # Returns a dict:
    # { 'pokemon': [ {'pokemon_id': '', 'count': 1} ], 'total': 1 }.
    @staticmethod
    def get_spawn_counts(hours=0):
        query = (db.session.query(
            Pokemon.pokemon_id,
            func.count(Pokemon.pokemon_id).label('count')).group_by(
                Pokemon.pokemon_id))
        # Allow 0 to query everything.
        if hours > 0:
            hours = datetime.utcnow() - timedelta(hours=hours)
            query = query.filter(Pokemon.disappear_time > hours)

        result = query.all()

        counts = []
        total = 0
        for c in result:
            counts.append(c._asdict())
            total += c[1]

        return {'pokemon': counts, 'total': total}

    @staticmethod
    def get_seen(timediff=0, geofences=None, exclude_geofences=None):
        query = (db.session.query(
            Pokemon.pokemon_id, Pokemon.form,
            func.count(Pokemon.pokemon_id).label('count'),
            func.max(Pokemon.disappear_time).label('disappear_time')).group_by(
                Pokemon.pokemon_id, Pokemon.form))

        if timediff > 0:
            timediff = datetime.utcnow() - timedelta(hours=timediff)
            query = query.filter(Pokemon.disappear_time > timediff)

        if geofences:
            sql = geofences_to_query(geofences, 'pokemon')
            query = query.filter(text(sql))

        if exclude_geofences:
            sql = geofences_to_query(exclude_geofences, 'pokemon')
            query = query.filter(~text(sql))

        result = query.all()

        pokemon = []
        total = 0
        for p in result:
            pokemon.append(p._asdict())
            total += p[2]

        return {'pokemon': pokemon, 'total': total}

    @staticmethod
    def get_appearances(pokemon_id,
                        form_id=None,
                        timediff=0,
                        geofences=None,
                        exclude_geofences=None):
        '''
        :param pokemon_id: id of Pokémon that we need appearances for
        :param form_id: id of form that we need appearances for
        :param timediff: limiting period of the selection
        :return: list of Pokémon appearances over a selected period
        '''
        query = (db.session.query(
            Pokemon.latitude, Pokemon.longitude, Pokemon.pokemon_id,
            Pokemon.form, Pokemon.spawnpoint_id,
            func.count(Pokemon.pokemon_id).label('count')).filter(
                Pokemon.pokemon_id == pokemon_id).group_by(
                    Pokemon.latitude, Pokemon.longitude, Pokemon.pokemon_id,
                    Pokemon.form, Pokemon.spawnpoint_id))

        if form_id is not None:
            query = query.filter(Pokemon.form == form_id)

        if timediff > 0:
            timediff = datetime.utcnow() - timedelta(hours=timediff)
            query = query.filter(Pokemon.disappear_time > timediff)

        if geofences:
            sql = geofences_to_query(geofences, 'pokemon')
            query = query.filter(text(sql))

        if exclude_geofences:
            sql = geofences_to_query(exclude_geofences, 'pokemon')
            query = query.filter(~text(sql))

        result = query.all()

        return [a._asdict() for a in result]

    @staticmethod
    def get_appearances_times_by_spawnpoint(pokemon_id,
                                            spawnpoint_id,
                                            form_id=None,
                                            timediff=0,
                                            geofences=None,
                                            exclude_geofences=None):
        '''
        :param pokemon_id: id of Pokemon that we need appearances times for.
        :param spawnpoint_id: spawnpoint id we need appearances times for.
        :param timediff: limiting period of the selection.
        :return: list of time appearances over a selected period.
        '''
        query = (db.session.query(Pokemon.disappear_time).filter(
            Pokemon.pokemon_id == pokemon_id,
            Pokemon.spawnpoint_id == spawnpoint_id).order_by(
                Pokemon.disappear_time))

        if form_id is not None:
            query = query.filter_by(form=form_id)

        if timediff:
            timediff = datetime.utcnow() - timedelta(hours=timediff)
            query = query.filter(Pokemon.disappear_time > timediff)

        if geofences:
            sql = geofences_to_query(geofences, 'pokemon')
            query = query.filter(text(sql))

        if exclude_geofences:
            sql = geofences_to_query(exclude_geofences, 'pokemon')
            query = query.filter(~text(sql))

        result = query.all()

        return [a[0] for a in result]
Example #5
0
class MySQLEngineSpec(BaseEngineSpec, BasicParametersMixin):
    engine = "mysql"
    engine_name = "MySQL"
    max_column_name_length = 64

    default_driver = "mysqldb"
    sqlalchemy_uri_placeholder = (
        "mysql://*****:*****@host:port/dbname[?key=value&key=value...]")
    encryption_parameters = {"ssl": "1"}

    column_type_mappings = (
        (
            re.compile(r"^int.*", re.IGNORECASE),
            INTEGER(),
            GenericDataType.NUMERIC,
        ),
        (
            re.compile(r"^tinyint", re.IGNORECASE),
            TINYINT(),
            GenericDataType.NUMERIC,
        ),
        (
            re.compile(r"^mediumint", re.IGNORECASE),
            MEDIUMINT(),
            GenericDataType.NUMERIC,
        ),
        (
            re.compile(r"^decimal", re.IGNORECASE),
            DECIMAL(),
            GenericDataType.NUMERIC,
        ),
        (
            re.compile(r"^float", re.IGNORECASE),
            FLOAT(),
            GenericDataType.NUMERIC,
        ),
        (
            re.compile(r"^double", re.IGNORECASE),
            DOUBLE(),
            GenericDataType.NUMERIC,
        ),
        (
            re.compile(r"^bit", re.IGNORECASE),
            BIT(),
            GenericDataType.NUMERIC,
        ),
        (
            re.compile(r"^tinytext", re.IGNORECASE),
            TINYTEXT(),
            GenericDataType.STRING,
        ),
        (
            re.compile(r"^mediumtext", re.IGNORECASE),
            MEDIUMTEXT(),
            GenericDataType.STRING,
        ),
        (
            re.compile(r"^longtext", re.IGNORECASE),
            LONGTEXT(),
            GenericDataType.STRING,
        ),
    )

    _time_grain_expressions = {
        None:
        "{col}",
        "PT1S":
        "DATE_ADD(DATE({col}), "
        "INTERVAL (HOUR({col})*60*60 + MINUTE({col})*60"
        " + SECOND({col})) SECOND)",
        "PT1M":
        "DATE_ADD(DATE({col}), "
        "INTERVAL (HOUR({col})*60 + MINUTE({col})) MINUTE)",
        "PT1H":
        "DATE_ADD(DATE({col}), "
        "INTERVAL HOUR({col}) HOUR)",
        "P1D":
        "DATE({col})",
        "P1W":
        "DATE(DATE_SUB({col}, "
        "INTERVAL DAYOFWEEK({col}) - 1 DAY))",
        "P1M":
        "DATE(DATE_SUB({col}, "
        "INTERVAL DAYOFMONTH({col}) - 1 DAY))",
        "P3M":
        "MAKEDATE(YEAR({col}), 1) "
        "+ INTERVAL QUARTER({col}) QUARTER - INTERVAL 1 QUARTER",
        "P1Y":
        "DATE(DATE_SUB({col}, "
        "INTERVAL DAYOFYEAR({col}) - 1 DAY))",
        "1969-12-29T00:00:00Z/P1W":
        "DATE(DATE_SUB({col}, "
        "INTERVAL DAYOFWEEK(DATE_SUB({col}, "
        "INTERVAL 1 DAY)) - 1 DAY))",
    }

    type_code_map: Dict[int,
                        str] = {}  # loaded from get_datatype only if needed

    custom_errors: Dict[Pattern[str], Tuple[str, SupersetErrorType, Dict[
        str, Any]]] = {
            CONNECTION_ACCESS_DENIED_REGEX: (
                __('Either the username "%(username)s" or the password is incorrect.'
                   ),
                SupersetErrorType.CONNECTION_ACCESS_DENIED_ERROR,
                {
                    "invalid": ["username", "password"]
                },
            ),
            CONNECTION_INVALID_HOSTNAME_REGEX: (
                __('Unknown MySQL server host "%(hostname)s".'),
                SupersetErrorType.CONNECTION_INVALID_HOSTNAME_ERROR,
                {
                    "invalid": ["host"]
                },
            ),
            CONNECTION_HOST_DOWN_REGEX: (
                __('The host "%(hostname)s" might be down and can\'t be reached.'
                   ),
                SupersetErrorType.CONNECTION_HOST_DOWN_ERROR,
                {
                    "invalid": ["host", "port"]
                },
            ),
            CONNECTION_UNKNOWN_DATABASE_REGEX: (
                __('Unable to connect to database "%(database)s".'),
                SupersetErrorType.CONNECTION_UNKNOWN_DATABASE_ERROR,
                {
                    "invalid": ["database"]
                },
            ),
            SYNTAX_ERROR_REGEX: (
                __('Please check your query for syntax errors near "%(server_error)s". '
                   "Then, try running your query again."),
                SupersetErrorType.SYNTAX_ERROR,
                {},
            ),
        }

    @classmethod
    def convert_dttm(
            cls,
            target_type: str,
            dttm: datetime,
            db_extra: Optional[Dict[str, Any]] = None) -> Optional[str]:
        tt = target_type.upper()
        if tt == utils.TemporalType.DATE:
            return f"STR_TO_DATE('{dttm.date().isoformat()}', '%Y-%m-%d')"
        if tt == utils.TemporalType.DATETIME:
            datetime_formatted = dttm.isoformat(sep=" ",
                                                timespec="microseconds")
            return f"""STR_TO_DATE('{datetime_formatted}', '%Y-%m-%d %H:%i:%s.%f')"""
        return None

    @classmethod
    def adjust_database_uri(cls,
                            uri: URL,
                            selected_schema: Optional[str] = None) -> URL:
        if selected_schema:
            uri = uri.set(database=parse.quote(selected_schema, safe=""))

        return uri

    @classmethod
    def get_datatype(cls, type_code: Any) -> Optional[str]:
        if not cls.type_code_map:
            # only import and store if needed at least once
            # pylint: disable=import-outside-toplevel
            import MySQLdb

            ft = MySQLdb.constants.FIELD_TYPE
            cls.type_code_map = {
                getattr(ft, k): k
                for k in dir(ft) if not k.startswith("_")
            }
        datatype = type_code
        if isinstance(type_code, int):
            datatype = cls.type_code_map.get(type_code)
        if datatype and isinstance(datatype, str) and datatype:
            return datatype
        return None

    @classmethod
    def epoch_to_dttm(cls) -> str:
        return "from_unixtime({col})"

    @classmethod
    def _extract_error_message(cls, ex: Exception) -> str:
        """Extract error message for queries"""
        message = str(ex)
        try:
            if isinstance(ex.args, tuple) and len(ex.args) > 1:
                message = ex.args[1]
        except (AttributeError, KeyError):
            pass
        return message

    @classmethod
    def get_column_spec(
        cls,
        native_type: Optional[str],
        db_extra: Optional[Dict[str, Any]] = None,
        source: utils.ColumnTypeSource = utils.ColumnTypeSource.GET_TABLE,
        column_type_mappings: Tuple[ColumnTypeMapping,
                                    ...] = column_type_mappings,
    ) -> Optional[ColumnSpec]:

        column_spec = super().get_column_spec(native_type)
        if column_spec:
            return column_spec

        return super().get_column_spec(
            native_type, column_type_mappings=column_type_mappings)

    @classmethod
    def get_cancel_query_id(cls, cursor: Any, query: Query) -> Optional[str]:
        """
        Get MySQL connection ID that will be used to cancel all other running
        queries in the same connection.

        :param cursor: Cursor instance in which the query will be executed
        :param query: Query instance
        :return: MySQL Connection ID
        """
        cursor.execute("SELECT CONNECTION_ID()")
        row = cursor.fetchone()
        return row[0]

    @classmethod
    def cancel_query(cls, cursor: Any, query: Query,
                     cancel_query_id: str) -> bool:
        """
        Cancel query in the underlying database.

        :param cursor: New cursor instance to the db of the query
        :param query: Query instance
        :param cancel_query_id: MySQL Connection ID
        :return: True if query cancelled successfully, False otherwise
        """
        try:
            cursor.execute(f"KILL CONNECTION {cancel_query_id}")
        except Exception:  # pylint: disable=broad-except
            return False

        return True
Example #6
0
           ForeignKey(metric.c.uid, name="metric_data_to_metric_fk",
                      onupdate="CASCADE", ondelete="CASCADE"),
           primary_key=True,
           nullable=False,
           server_default=""),
    Column("rowid",
           INTEGER(),
           primary_key=True,
           autoincrement=False,
           nullable=False,
           server_default="0"),
    Column("timestamp",
           DATETIME(),
           nullable=False),
    Column("metric_value",
           DOUBLE(asdecimal=False),
           nullable=False),
    Column("raw_anomaly_score",
           DOUBLE(asdecimal=False)),
    Column("anomaly_score",
           DOUBLE(asdecimal=False)),
    Column("display_value",
           INTEGER(),
           autoincrement=False),
    schema=None,
)

Index("timestamp_idx", metric_data.c.timestamp)
Index("anomaly_score_idx", metric_data.c.anomaly_score)

Example #7
0
class MySQLEngineSpec(BaseEngineSpec):
    engine = "mysql"
    engine_name = "MySQL"
    max_column_name_length = 64

    column_type_mappings: Tuple[
        Tuple[
            Pattern[str],
            Union[TypeEngine, Callable[[Match[str]], TypeEngine]],
            GenericDataType,
        ],
        ...,
    ] = (
        (re.compile(r"^int.*", re.IGNORECASE), INTEGER(), GenericDataType.NUMERIC,),
        (re.compile(r"^tinyint", re.IGNORECASE), TINYINT(), GenericDataType.NUMERIC,),
        (
            re.compile(r"^mediumint", re.IGNORECASE),
            MEDIUMINT(),
            GenericDataType.NUMERIC,
        ),
        (re.compile(r"^decimal", re.IGNORECASE), DECIMAL(), GenericDataType.NUMERIC,),
        (re.compile(r"^float", re.IGNORECASE), FLOAT(), GenericDataType.NUMERIC,),
        (re.compile(r"^double", re.IGNORECASE), DOUBLE(), GenericDataType.NUMERIC,),
        (re.compile(r"^bit", re.IGNORECASE), BIT(), GenericDataType.NUMERIC,),
        (re.compile(r"^tinytext", re.IGNORECASE), TINYTEXT(), GenericDataType.STRING,),
        (
            re.compile(r"^mediumtext", re.IGNORECASE),
            MEDIUMTEXT(),
            GenericDataType.STRING,
        ),
        (re.compile(r"^longtext", re.IGNORECASE), LONGTEXT(), GenericDataType.STRING,),
    )

    _time_grain_expressions = {
        None: "{col}",
        "PT1S": "DATE_ADD(DATE({col}), "
        "INTERVAL (HOUR({col})*60*60 + MINUTE({col})*60"
        " + SECOND({col})) SECOND)",
        "PT1M": "DATE_ADD(DATE({col}), "
        "INTERVAL (HOUR({col})*60 + MINUTE({col})) MINUTE)",
        "PT1H": "DATE_ADD(DATE({col}), " "INTERVAL HOUR({col}) HOUR)",
        "P1D": "DATE({col})",
        "P1W": "DATE(DATE_SUB({col}, " "INTERVAL DAYOFWEEK({col}) - 1 DAY))",
        "P1M": "DATE(DATE_SUB({col}, " "INTERVAL DAYOFMONTH({col}) - 1 DAY))",
        "P0.25Y": "MAKEDATE(YEAR({col}), 1) "
        "+ INTERVAL QUARTER({col}) QUARTER - INTERVAL 1 QUARTER",
        "P1Y": "DATE(DATE_SUB({col}, " "INTERVAL DAYOFYEAR({col}) - 1 DAY))",
        "1969-12-29T00:00:00Z/P1W": "DATE(DATE_SUB({col}, "
        "INTERVAL DAYOFWEEK(DATE_SUB({col}, "
        "INTERVAL 1 DAY)) - 1 DAY))",
    }

    type_code_map: Dict[int, str] = {}  # loaded from get_datatype only if needed

    custom_errors = {
        CONNECTION_ACCESS_DENIED_REGEX: (
            __('Either the username "%(username)s" or the password is incorrect.'),
            SupersetErrorType.CONNECTION_ACCESS_DENIED_ERROR,
        ),
        CONNECTION_INVALID_HOSTNAME_REGEX: (
            __('Unknown MySQL server host "%(hostname)s".'),
            SupersetErrorType.CONNECTION_INVALID_HOSTNAME_ERROR,
        ),
        CONNECTION_HOST_DOWN_REGEX: (
            __('The host "%(hostname)s" might be down and can\'t be reached.'),
            SupersetErrorType.CONNECTION_HOST_DOWN_ERROR,
        ),
        CONNECTION_UNKNOWN_DATABASE_REGEX: (
            __(
                'We were unable to connect to your database named "%(database)s". '
                "Please verify your database name and try again."
            ),
            SupersetErrorType.CONNECTION_UNKNOWN_DATABASE_ERROR,
        ),
    }

    @classmethod
    def convert_dttm(cls, target_type: str, dttm: datetime) -> Optional[str]:
        tt = target_type.upper()
        if tt == utils.TemporalType.DATE:
            return f"STR_TO_DATE('{dttm.date().isoformat()}', '%Y-%m-%d')"
        if tt == utils.TemporalType.DATETIME:
            datetime_formatted = dttm.isoformat(sep=" ", timespec="microseconds")
            return f"""STR_TO_DATE('{datetime_formatted}', '%Y-%m-%d %H:%i:%s.%f')"""
        return None

    @classmethod
    def adjust_database_uri(
        cls, uri: URL, selected_schema: Optional[str] = None
    ) -> None:
        if selected_schema:
            uri.database = parse.quote(selected_schema, safe="")

    @classmethod
    def get_datatype(cls, type_code: Any) -> Optional[str]:
        if not cls.type_code_map:
            # only import and store if needed at least once
            import MySQLdb

            ft = MySQLdb.constants.FIELD_TYPE
            cls.type_code_map = {
                getattr(ft, k): k for k in dir(ft) if not k.startswith("_")
            }
        datatype = type_code
        if isinstance(type_code, int):
            datatype = cls.type_code_map.get(type_code)
        if datatype and isinstance(datatype, str) and datatype:
            return datatype
        return None

    @classmethod
    def epoch_to_dttm(cls) -> str:
        return "from_unixtime({col})"

    @classmethod
    def _extract_error_message(cls, ex: Exception) -> str:
        """Extract error message for queries"""
        message = str(ex)
        try:
            if isinstance(ex.args, tuple) and len(ex.args) > 1:
                message = ex.args[1]
        except (AttributeError, KeyError):
            pass
        return message

    @classmethod
    def get_column_spec(  # type: ignore
        cls,
        native_type: Optional[str],
        source: utils.ColumnTypeSource = utils.ColumnTypeSource.GET_TABLE,
        column_type_mappings: Tuple[
            Tuple[
                Pattern[str],
                Union[TypeEngine, Callable[[Match[str]], TypeEngine]],
                GenericDataType,
            ],
            ...,
        ] = column_type_mappings,
    ) -> Union[ColumnSpec, None]:

        column_spec = super().get_column_spec(native_type)
        if column_spec:
            return column_spec

        return super().get_column_spec(
            native_type, column_type_mappings=column_type_mappings
        )
Example #8
0
                f_value = str(line[1])
                if fname_id and f_value:
                    features_data.append([fname_id, f_value])

        fp_table_name = 'z_fp_%s' % str(metrics_id)
        if not engine.dialect.has_table(engine, fp_table_name):  # If table don't exist, Create.
            # Create z_fp_<metric_id> table
            fp_table_created = False
            try:
                fp_meta = MetaData()
                fp_metric_table = Table(
                    fp_table_name, fp_meta,
                    Column('id', Integer, primary_key=True),
                    Column('fp_id', Integer, nullable=False, key='fp_id'),
                    Column('feature_id', Integer, nullable=False),
                    Column('value', DOUBLE(), nullable=True),
                    mysql_charset='utf8',
                    mysql_engine='InnoDB')
                fp_metric_table.create(engine, checkfirst=True)
                fp_table_created = True
                tables_created += 1
            except:
                print(traceback.format_exc())
                print('error :: create_features_profile :: failed to create table - %s' % fp_table_name)

            if not fp_table_created:
                continue

            # Insert features and values
            insert_statement = []
            for fname_id, f_value in features_data:
Example #9
0
class TrsSpawn(db.Model):
    spawnpoint = db.Column(BIGINT(unsigned=True), primary_key=True)
    latitude = db.Column(DOUBLE(asdecimal=False), nullable=False)
    longitude = db.Column(DOUBLE(asdecimal=False), nullable=False)
    spawndef = db.Column(db.Integer, default=240, nullable=False)
    earliest_unseen = db.Column(db.Integer, nullable=False)
    last_scanned = db.Column(db.DateTime)
    first_detection = db.Column(db.DateTime,
                                default=datetime.utcnow(),
                                nullable=False)
    last_non_scanned = db.Column(db.DateTime)
    calc_endminsec = db.Column(
        db.String(length=5, collation='utf8mb4_unicode_ci'))
    eventid = db.Column(db.Integer, default=1, nullable=False)

    __table_args__ = (Index('event_lat_long', 'eventid', 'latitude',
                            'longitude'), )

    @staticmethod
    def get_spawnpoints(swLat,
                        swLng,
                        neLat,
                        neLng,
                        oSwLat=None,
                        oSwLng=None,
                        oNeLat=None,
                        oNeLng=None,
                        timestamp=0,
                        geofences=None,
                        exclude_geofences=None):
        query = db.session.query(TrsSpawn.latitude, TrsSpawn.longitude,
                                 TrsSpawn.spawnpoint.label('spawnpoint_id'),
                                 TrsSpawn.spawndef, TrsSpawn.first_detection,
                                 TrsSpawn.last_non_scanned,
                                 TrsSpawn.last_scanned,
                                 TrsSpawn.calc_endminsec.label('end_time'))

        if timestamp > 0:
            # If timestamp is known only send last scanned spawn points.
            t = datetime.fromtimestamp(timestamp / 1000)
            query = query.filter((TrsSpawn.last_scanned > t)
                                 | (TrsSpawn.last_non_scanned > t))

        if swLat and swLng and neLat and neLng:
            query = query.filter(TrsSpawn.latitude >= swLat,
                                 TrsSpawn.longitude >= swLng,
                                 TrsSpawn.latitude <= neLat,
                                 TrsSpawn.longitude <= neLng)

        if oSwLat and oSwLng and oNeLat and oNeLng:
            # Exclude spawn points within old boundaries.
            query = query.filter(~and_(
                TrsSpawn.latitude >= oSwLat, TrsSpawn.longitude >= oSwLng,
                TrsSpawn.latitude <= oNeLat, TrsSpawn.longitude <= oNeLng))

        if geofences:
            sql = geofences_to_query(geofences, 'trs_spawn')
            query = query.filter(text(sql))

        if exclude_geofences:
            sql = geofences_to_query(exclude_geofences, 'trs_spawn')
            query = query.filter(~text(sql))

        result = query.all()

        spawnpoints = []
        ts = time.time()
        utc_offset = datetime.fromtimestamp(ts) - datetime.utcfromtimestamp(ts)
        for sp in result:
            sp = sp._asdict()
            if sp['last_non_scanned'] is not None:
                sp['last_non_scanned'] = sp['last_non_scanned'] - utc_offset
            if sp['end_time'] is not None:
                if sp['last_scanned'] is not None:
                    sp['last_scanned'] = sp['last_scanned'] - utc_offset
                end_time_split = sp['end_time'].split(':')
                end_time_seconds = int(end_time_split[1])
                end_time_minutes = int(end_time_split[0])
                despawn_time = datetime.today().replace(
                    minute=end_time_minutes,
                    second=end_time_seconds,
                    microsecond=0)
                if despawn_time <= datetime.today():
                    despawn_time += timedelta(hours=1)
                sp['despawn_time'] = despawn_time - utc_offset
                if sp['spawndef'] == 15:
                    sp['spawn_time'] = sp['despawn_time'] - timedelta(hours=1)
                else:
                    sp['spawn_time'] = (sp['despawn_time'] -
                                        timedelta(minutes=30))
                del sp['end_time']
            spawnpoints.append(sp)

        return spawnpoints
Example #10
0
class Nest(db.Model):
    __tablename__ = 'nests'

    nest_id = db.Column(BIGINT, primary_key=True)
    lat = db.Column(DOUBLE(asdecimal=False))
    lon = db.Column(DOUBLE(asdecimal=False))
    pokemon_id = db.Column(db.Integer, default=0)
    updated = db.Column(BIGINT)
    type = db.Column(TINYINT, nullable=False, default=0)
    name = db.Column(db.String(length=250, collation='utf8mb4_unicode_ci'))
    pokemon_count = db.Column(DOUBLE(asdecimal=False), default=0)
    pokemon_avg = db.Column(DOUBLE(asdecimal=False), default=0)

    __table_args__ = (
        Index('CoordsIndex', 'lat', 'lon'),
        Index('UpdatedIndex', 'updated'),
    )

    @staticmethod
    def get_nests(swLat,
                  swLng,
                  neLat,
                  neLng,
                  oSwLat=None,
                  oSwLng=None,
                  oNeLat=None,
                  oNeLng=None,
                  timestamp=0,
                  geofences=None,
                  exclude_geofences=None):
        query = db.session.query(Nest.nest_id, Nest.lat.label('latitude'),
                                 Nest.lon.label('longitude'), Nest.pokemon_id,
                                 Nest.updated.label('last_updated'), Nest.name,
                                 Nest.pokemon_count, Nest.pokemon_avg)

        if timestamp > 0:
            # If timestamp is known only send last updated nests.
            query = query.filter(Nest.updated > timestamp / 1000)

        if swLat and swLng and neLat and neLng:
            query = query.filter(Nest.lat >= swLat, Nest.lon >= swLng,
                                 Nest.lat <= neLat, Nest.lon <= neLng)

        if oSwLat and oSwLng and oNeLat and oNeLng:
            # Exclude scanned locations within old boundaries.
            query = query.filter(~and_(Nest.lat >= oSwLat, Nest.lon >= oSwLng,
                                       Nest.lat <= oNeLat, Nest.lon <= oNeLng))

        if geofences:
            sql = geofences_to_query(geofences, 'nests', 'lat', 'lon')
            query = query.filter(text(sql))

        if exclude_geofences:
            sql = geofences_to_query(exclude_geofences, 'nests', 'lat', 'lon')
            query = query.filter(~text(sql))

        result = query.all()

        nests = []
        for n in result:
            nest = n._asdict()
            nest['last_updated'] *= 1000
            nests.append(nest)

        return nests
Example #11
0
from sqlalchemy.sql.functions import current_timestamp

metadata = MetaData()

users = Table('users', metadata,
              Column('id', Integer, primary_key=True),
              Column('ip_id', Integer, ForeignKey('ips.id')),
              Column('name_id', Integer, ForeignKey('names.id')),
              Column('steamid_id', Integer, ForeignKey('steamids.id')),
              Column('hostname_id', Integer, ForeignKey('hostnames.id')),
              Column('hwid_id', Integer, ForeignKey('hwids.id')),
              Column('first_seen', TIMESTAMP(), server_default=current_timestamp()),
              Column('last_seen', TIMESTAMP(), server_default=current_timestamp()),
              UniqueConstraint('ip_id', 'name_id', 'steamid_id', 'hwid_id', 'hostname_id',\
  name='unique_user')
              )
ips = Table(
    'ips', metadata, Column('id', Integer, primary_key=True),
    Column('ip', String(15), unique=True, nullable=False),
    Column('ip_int', INTEGER(unsigned=True), unique=True, nullable=False),
    Column('risk', DOUBLE(), server_default='0.00'))
hostnames = Table('hostnames', metadata, Column('id',
                                                Integer,
                                                primary_key=True),
                  Column('hostname', String(100), unique=True, nullable=False))
steamids = Table('steamids', metadata, Column('id', Integer, primary_key=True),
                 Column('steamid', String(20), unique=True, nullable=False))
hwids = Table('hwids', metadata, Column('id', Integer, primary_key=True),
              Column('hwid', String(20), unique=True, nullable=False))
names = Table('names', metadata, Column('id', Integer, primary_key=True),
              Column('name', String(30), unique=True, nullable=False))
Example #12
0
class User(Base):
    __tablename__ = 'gk-users'

    id = Column(
        'userid',
        Integer,
        primary_key=True,
        key='id',
    )
    _name = Column(
        'user',
        String(80),
        key='name',
        nullable=False,
        unique=True,
    )
    is_admin = Column(
        'is_admin',
        Boolean,
        key='is_admin',
        nullable=True,
        default=False,
    )
    _password = Column(
        'haslo2',
        String(120),
        key='password',
        nullable=False,
    )
    email = Column(
        String(150),
        nullable=False,
        unique=True,
    )
    daily_mails = Column(
        'wysylacmaile',
        Boolean,
        key='daily_news',
        nullable=False,
        default=False,
    )
    ip = Column(
        String(39),
        nullable=True,
        default=None,
    )
    language = Column(
        'lang',
        String(2),
        key='language',
        nullable=False,
        default="",
    )
    latitude = Column(
        'lat',
        DOUBLE(precision=8, scale=5),
        key='latitude',
        nullable=True,
        default=None,
    )
    longitude = Column(
        'lon',
        DOUBLE(precision=8, scale=5),
        key='longitude',
        nullable=True,
        default=None,
    )
    observation_radius = Column(
        'promien',
        INTEGER(unsigned=True),
        key='observation_radius',
        default=0,
    )
    country = Column(
        String(3),
        nullable=True,
        default=None,
    )
    hour = Column(
        'godzina',
        Integer,
        key='hour',
        default=random_0_23,
    )
    statpic_id = Column(
        'statpic',
        Integer,
        key='statpic_id',
        default=1,
    )
    join_datetime = Column(
        'joined',
        DateTime,
        key='join_datetime',
        default=datetime.datetime.utcnow,
    )
    last_mail_datetime = Column(
        'ostatni_mail',
        DateTime,
        nullable=True,
        key='last_mail_datetime',
        default=None,
    )
    last_login_datetime = Column(
        'ostatni_login',
        DateTime,
        nullable=True,
        key='last_login_datetime',
        default=None,
    )
    last_update_datetime = Column(
        'timestamp',
        DateTime,
        key='last_update_datetime',
        default=datetime.datetime.utcnow,
    )
    secid = Column(String(128), )

    news = relationship(
        'News',
        backref="author",
        cascade="all,delete",
    )
    news_comments = relationship(
        'NewsComment',
        backref="author",
        cascade="all,delete",
    )
    news_subscriptions = relationship(
        'NewsSubscription',
        backref="user",
        cascade="all,delete",
    )

    @hybrid_property
    def password(self):
        """
        Hybrid property for password
        :return:
        """
        return self._password

    @password.setter
    def password(self, password):
        """
        Setter for _password, saves hashed password, salt and reset_password string
        :param password:
        :return:
        """
        t_hasher = phpass.PasswordHash(11, False)
        self._password = t_hasher.hash_password(
            password.encode('utf-8') +
            "DDD"  # app.config['PASSWORD_HASH_SALT']
        )

    @password.expression
    def password(cls):
        return cls._password

    @hybrid_property
    def name(self):
        return characterentities.decode(self._name)

    @name.setter
    def name(self, name):
        name_clean = bleach.clean(name, tags=[], strip=True)
        self._name = characterentities.decode(name_clean).strip()

    @name.expression
    def name(cls):
        return cls._name
            fp_table_created = False
            try:
                fp_meta = MetaData()
                fp_metric_table = Table(fp_table_name,
                                        fp_meta,
                                        Column('id', Integer,
                                               primary_key=True),
                                        Column('fp_id',
                                               Integer,
                                               nullable=False,
                                               key='fp_id'),
                                        Column('feature_id',
                                               Integer,
                                               nullable=False),
                                        Column('value',
                                               DOUBLE(),
                                               nullable=True),
                                        mysql_charset='utf8',
                                        mysql_engine='InnoDB')
                fp_metric_table.create(engine, checkfirst=True)
                fp_table_created = True
                tables_created += 1
            except:
                print(traceback.format_exc())
                print(
                    'error :: create_features_profile :: failed to create table - %s'
                    % fp_table_name)

            if not fp_table_created:
                continue
Example #14
0
class Move(Base):
    __tablename__ = 'gk-ruchy'

    id = Column(
        'ruch_id',
        Integer,
        primary_key=True,
        key='id'
    )

    geokret_id = Column(
        'id',
        Integer,
        ForeignKey('gk-geokrety.id', name='fk_geokret_moved'),
        key='geokret_id',
        nullable=False,
        default=None
    )
    geokret = relationship(
        "Geokret",
        foreign_keys=[geokret_id],
        backref=backref("moves", order_by="-Move.moved_on_datetime")
    )

    latitude = Column(
        'lat',
        DOUBLE(precision=8, scale=5),
        key='latitude',
        nullable=True,
        default=None
    )

    longitude = Column(
        'lon',
        DOUBLE(precision=8, scale=5),
        key='longitude',
        nullable=True,
        default=None
    )

    altitude = Column(
        'alt',
        Integer,
        key='altitude',
        nullable=True,
        default=None,
    )

    country = Column(
        'country',
        String(3),
        key='country',
        nullable=True,
        default=None,
    )

    distance = Column(
        'droga',
        Integer,
        key='distance',
        nullable=False,
        default=0
    )

    waypoint = Column(
        'waypoint',
        String(10),
        key='waypoint',
        nullable=False,
        default=''
    )

    _comment = Column(
        'koment',
        String(5120),
        key='comment',
        nullable=False,
        default=''
    )

    pictures_count = Column(
        'zdjecia',
        Integer,
        key='pictures_count',
        nullable=False,
        default=0
    )

    comments_count = Column(
        'komentarze',
        Integer,
        key='comments_count',
        nullable=False,
        default=0
    )

    type = Column(
        'logtype',
        Enum('0', '1', '2', '3', '4', '5'),
        key='type',
        nullable=False,
    )

    author_id = Column(
        'user',
        Integer,
        ForeignKey('gk-users.id'),
        key='author_id',
        nullable=False,
        default=None,
    )
    author = relationship("User", foreign_keys=[author_id], backref="moves")

    username = Column(
        'username',
        String(20),
        key='username',
        nullable=False,
        default=''
    )

    # This is used to compute the missing status
    _move_comments = relationship(
        'MoveComment',
        foreign_keys="MoveComment.move_id",
        lazy="dynamic",
    )

    moved_on_datetime = Column(
        'data',
        DateTime,
        key='moved_on_datetime',
        nullable=False,
        default=datetime.utcnow,
    )

    created_on_datetime = Column(
        'data_dodania',
        DateTime,
        key='created_on_datetime',
        nullable=False,
        default=datetime.utcnow,
    )

    updated_on_datetime = Column(
        'timestamp',
        DateTime,
        key='updated_on_datetime',
        default=datetime.utcnow,
        onupdate=datetime.utcnow
    )

    _application_name = Column(
        'app',
        String(16),
        key='application_name',
        nullable=True
    )

    _application_version = Column(
        'app_ver',
        String(16),
        key='application_version',
        nullable=True
    )

    @hybrid_property
    def comment(self):
        return characterentities.decode(self._comment)

    @comment.setter
    def comment(self, comment):
        comment_clean = bleach.clean(comment, strip=True)
        self._comment = characterentities.decode(comment_clean).strip()

    @comment.expression
    def comment(cls):
        return cls._comment

    @hybrid_property
    def application_version(self):
        if self._application_version is None:
            return None
        return characterentities.decode(self._application_version)

    @application_version.setter
    def application_version(self, application_version):
        if application_version is None:
            self._application_version = None
        else:
            application_version_clean = bleach.clean(application_version, tags=[], strip=True)
            self._application_version = characterentities.decode(application_version_clean).strip()

    @application_version.expression
    def application_version(cls):
        return cls._application_version

    @hybrid_property
    def application_name(self):
        if self._application_name is None:
            return None
        return characterentities.decode(self._application_name)

    @application_name.setter
    def application_name(self, application_name):
        if application_name is None:
            self._application_name = None
        else:
            application_name_clean = bleach.clean(application_name, tags=[], strip=True)
            self._application_name = characterentities.decode(application_name_clean).strip()

    @application_name.expression
    def application_name(cls):
        return cls._application_name

    @hybrid_property
    def _moved_on_datetime(self):
        if isinstance(self.moved_on_datetime, str):
            self.moved_on_datetime = datetime.strptime(self.moved_on_datetime, "%Y-%m-%dT%H:%M:%S")
        return round_microseconds(self.moved_on_datetime)
Example #15
0
class Pokestop(db.Model):
    pokestop_id = db.Column(db.String(length=50,
                                      collation='utf8mb4_unicode_ci'),
                            primary_key=True)
    enabled = db.Column(TINYINT, default=1, nullable=False)
    latitude = db.Column(DOUBLE(asdecimal=False), nullable=False)
    longitude = db.Column(DOUBLE(asdecimal=False), nullable=False)
    last_modified = db.Column(db.DateTime, default=datetime.utcnow())
    lure_expiration = db.Column(db.DateTime)
    active_fort_modifier = db.Column(db.SmallInteger)
    last_updated = db.Column(db.DateTime)
    name = db.Column(db.String(length=250, collation='utf8mb4_unicode_ci'))
    image = db.Column(db.String(length=255, collation='utf8mb4_unicode_ci'))
    incident_start = db.Column(db.DateTime)
    incident_expiration = db.Column(db.DateTime)
    incident_grunt_type = db.Column(db.SmallInteger)

    __table_args__ = (
        Index('pokestop_last_modified', 'last_modified'),
        Index('pokestop_lure_expiration', 'lure_expiration'),
        Index('pokestop_active_fort_modifier', 'active_fort_modifier'),
        Index('pokestop_last_updated', 'last_updated'),
        Index('pokestop_latitude_longitude', 'latitude', 'longitude'),
    )

    @staticmethod
    def get_pokestops(swLat,
                      swLng,
                      neLat,
                      neLng,
                      oSwLat=None,
                      oSwLng=None,
                      oNeLat=None,
                      oNeLng=None,
                      timestamp=0,
                      eventless_stops=True,
                      quests=True,
                      invasions=True,
                      lures=True,
                      geofences=None,
                      exclude_geofences=None):
        columns = [
            'pokestop_id', 'name', 'image', 'latitude', 'longitude',
            'last_updated', 'incident_grunt_type', 'incident_expiration',
            'active_fort_modifier', 'lure_expiration'
        ]

        if quests:
            quest_columns = [
                'GUID', 'quest_timestamp', 'quest_task', 'quest_type',
                'quest_stardust', 'quest_pokemon_id', 'quest_pokemon_form_id',
                'quest_pokemon_costume_id', 'quest_reward_type',
                'quest_item_id', 'quest_item_amount'
            ]
            hours = int(args.quest_reset_time.split(':')[0])
            minutes = int(args.quest_reset_time.split(':')[1])
            reset_time = datetime.today().replace(hour=hours,
                                                  minute=minutes,
                                                  second=0,
                                                  microsecond=0)
            reset_timestamp = datetime.timestamp(reset_time)
            query = (db.session.query(Pokestop, TrsQuest).outerjoin(
                TrsQuest,
                and_(Pokestop.pokestop_id == TrsQuest.GUID,
                     TrsQuest.quest_timestamp >= reset_timestamp)).options(
                         Load(Pokestop).load_only(*columns),
                         Load(TrsQuest).load_only(*quest_columns)))
        else:
            query = Pokestop.query.options(load_only(*columns))

        if not eventless_stops:
            conds = []
            if quests:
                conds.append(TrsQuest.GUID.isnot(None))
            if invasions:
                conds.append(Pokestop.incident_expiration > datetime.utcnow())
            if lures:
                conds.append(Pokestop.lure_expiration > datetime.utcnow())
            query = query.filter(or_(*conds))

        if timestamp > 0:
            # If timestamp is known only send last scanned PokéStops.
            t = datetime.utcfromtimestamp(timestamp / 1000)
            query = query.filter(Pokestop.last_updated > t)

        if swLat and swLng and neLat and neLng:
            query = query.filter(Pokestop.latitude >= swLat,
                                 Pokestop.longitude >= swLng,
                                 Pokestop.latitude <= neLat,
                                 Pokestop.longitude <= neLng)

        if oSwLat and oSwLng and oNeLat and oNeLng:
            # Exclude PokéStops within old boundaries.
            query = query.filter(~and_(
                Pokestop.latitude >= oSwLat, Pokestop.longitude >= oSwLng,
                Pokestop.latitude <= oNeLat, Pokestop.longitude <= oNeLng))

        if geofences:
            sql = geofences_to_query(geofences, 'pokestop')
            query = query.filter(text(sql))

        if exclude_geofences:
            sql = geofences_to_query(exclude_geofences, 'pokestop')
            query = query.filter(~text(sql))

        result = query.all()

        now = datetime.utcnow()
        pokestops = []
        for r in result:
            pokestop_orm = r[0] if quests else r
            quest_orm = r[1] if quests else None
            pokestop = orm_to_dict(pokestop_orm)
            if quest_orm is not None:
                pokestop['quest'] = {
                    'scanned_at': quest_orm.quest_timestamp * 1000,
                    'task': quest_orm.quest_task,
                    'reward_type': quest_orm.quest_reward_type,
                    'item_id': quest_orm.quest_item_id,
                    'item_amount': quest_orm.quest_item_amount,
                    'pokemon_id': quest_orm.quest_pokemon_id,
                    'form_id': quest_orm.quest_pokemon_form_id,
                    'costume_id': quest_orm.quest_pokemon_costume_id,
                    'stardust': quest_orm.quest_stardust
                }
            else:
                pokestop['quest'] = None
            if (pokestop['incident_expiration'] is not None and
                (pokestop['incident_expiration'] < now or not invasions)):
                pokestop['incident_grunt_type'] = None
                pokestop['incident_expiration'] = None
            if (pokestop['lure_expiration'] is not None
                    and (pokestop['lure_expiration'] < now or not lures)):
                pokestop['active_fort_modifier'] = None
                pokestop['lure_expiration'] = None
            pokestops.append(pokestop)

        return pokestops
Example #16
0
class Weather(db.Model):
    s2_cell_id = db.Column(db.String(length=50,
                                     collation='utf8mb4_unicode_ci'),
                           primary_key=True)
    latitude = db.Column(DOUBLE(asdecimal=False), nullable=False)
    longitude = db.Column(DOUBLE(asdecimal=False), nullable=False)
    cloud_level = db.Column(db.SmallInteger)
    rain_level = db.Column(db.SmallInteger)
    wind_level = db.Column(db.SmallInteger)
    snow_level = db.Column(db.SmallInteger)
    fog_level = db.Column(db.SmallInteger)
    wind_direction = db.Column(db.SmallInteger)
    gameplay_weather = db.Column(db.SmallInteger)
    severity = db.Column(db.SmallInteger)
    warn_weather = db.Column(db.SmallInteger)
    world_time = db.Column(db.SmallInteger)
    last_updated = db.Column(db.DateTime)

    __table_args__ = (Index('weather_last_updated', 'last_updated'), )

    @staticmethod
    def get_weather(swLat,
                    swLng,
                    neLat,
                    neLng,
                    oSwLat=None,
                    oSwLng=None,
                    oNeLat=None,
                    oNeLng=None,
                    timestamp=0,
                    geofences=None,
                    exclude_geofences=None):
        # We can filter by the center of a cell,
        # this deltas can expand the viewport bounds
        # So cells with center outside the viewport,
        # but close to it can be rendered
        # otherwise edges of cells that intersects
        # with viewport won't be rendered
        lat_delta = 0.15
        lng_delta = 0.4

        query = db.session.query(Weather.s2_cell_id, Weather.latitude,
                                 Weather.longitude, Weather.gameplay_weather,
                                 Weather.severity, Weather.world_time,
                                 Weather.last_updated)

        if timestamp > 0:
            # If timestamp is known only send last scanned weather.
            t = datetime.utcfromtimestamp(timestamp / 1000)
            query = query.filter(Weather.last_updated > t)

        if swLat and swLng and neLat and neLng:
            query = query.filter(Weather.latitude >= float(swLat) - lat_delta,
                                 Weather.longitude >= float(swLng) - lng_delta,
                                 Weather.latitude <= float(neLat) + lat_delta,
                                 Weather.longitude <= float(neLng) + lng_delta)

        if oSwLat and oSwLng and oNeLat and oNeLng:
            # Exclude weather within old boundaries.
            query = query.filter(
                ~and_(Weather.latitude >= float(oSwLat) -
                      lat_delta, Weather.longitude >= float(oSwLng) -
                      lng_delta, Weather.latitude <= float(oNeLat) + lat_delta,
                      Weather.longitude <= float(oNeLng) + lng_delta))

        if geofences:
            sql = geofences_to_query(geofences, 'weather')
            query = query.filter(text(sql))

        if exclude_geofences:
            sql = geofences_to_query(exclude_geofences, 'weather')
            query = query.filter(~text(sql))

        result = query.all()

        return [w._asdict() for w in result]
Example #17
0
def create_features_profile(current_skyline_app, requested_timestamp, data_for_metric, context, ionosphere_job, fp_parent_id, fp_generation, fp_learn):
    """
    Add a features_profile to the Skyline ionosphere database table.

    :param current_skyline_app: Skyline app name
    :param requested_timestamp: The timestamp of the dir that the features
        profile data is in
    :param data_for_metric: The base_name of the metric
    :param context: The context of the caller
    :param ionosphere_job: The ionosphere_job name related to creation request
        valid jobs are ``learn_fp_human``, ``learn_fp_generation``,
        ``learn_fp_learnt`` and ``learn_fp_automatic``.
    :param fp_parent_id: The id of the parent features profile that this was
        learnt from, 0 being an original human generated features profile
    :param fp_generation: The number of generations away for the original
        human generated features profile, 0 being an original human generated
        features profile.
    :param fp_learn: Whether Ionosphere should learn at use_full_duration_days
    :type current_skyline_app: str
    :type requested_timestamp: int
    :type data_for_metric: str
    :type context: str
    :type ionosphere_job: str
    :type fp_parent_id: int
    :type fp_generation: int
    :type fp_learn: boolean
    :return: fp_id, fp_in_successful, fp_exists, fail_msg, traceback_format_exc
    :rtype: str, boolean, boolean, str, str

    """

    current_skyline_app_logger = current_skyline_app + 'Log'
    current_logger = logging.getLogger(current_skyline_app_logger)

    base_name = data_for_metric.replace(settings.FULL_NAMESPACE, '', 1)

    if context == 'training_data':
        log_context = 'training data'
        ionosphere_learn_job = 'learn_fp_human'
    if context == 'features_profiles':
        log_context = 'features profile data'

    # @added 20170113 - Feature #1854: Ionosphere learn
    if context == 'ionosphere_learn':
        log_context = 'learn'

    current_logger.info('create_features_profile :: %s :: requested for %s at %s' % (
        context, str(base_name), str(requested_timestamp)))

    metric_timeseries_dir = base_name.replace('.', '/')
    if context == 'training_data':
        metric_training_data_dir = '%s/%s/%s' % (
            settings.IONOSPHERE_DATA_FOLDER, str(requested_timestamp),
            metric_timeseries_dir)
    if context == 'features_profiles':
        metric_training_data_dir = '%s/%s/%s' % (
            settings.IONOSPHERE_PROFILES_FOLDER, metric_timeseries_dir,
            str(requested_timestamp))

    # @added 20170113 - Feature #1854: Ionosphere learn
    if context == 'ionosphere_learn':
        # @modified 20170116 - Feature #1854: Ionosphere learn
        # Allowing ionosphere_learn to create a features profile for a training
        # data set that it has learnt is not anomalous
        if ionosphere_job != 'learn_fp_automatic':
            metric_training_data_dir = '%s/%s/%s' % (
                settings.IONOSPHERE_LEARN_FOLDER, str(requested_timestamp),
                metric_timeseries_dir)
        else:
            metric_training_data_dir = '%s/%s/%s' % (
                settings.IONOSPHERE_DATA_FOLDER, str(requested_timestamp),
                metric_timeseries_dir)

    features_file = '%s/%s.tsfresh.input.csv.features.transposed.csv' % (
        metric_training_data_dir, base_name)

    features_profile_dir = '%s/%s' % (
        settings.IONOSPHERE_PROFILES_FOLDER, metric_timeseries_dir)

    ts_features_profile_dir = '%s/%s/%s' % (
        settings.IONOSPHERE_PROFILES_FOLDER, metric_timeseries_dir,
        str(requested_timestamp))

    features_profile_created_file = '%s/%s.%s.fp.created.txt' % (
        metric_training_data_dir, str(requested_timestamp), base_name)

    features_profile_details_file = '%s/%s.%s.fp.details.txt' % (
        metric_training_data_dir, str(requested_timestamp), base_name)

    anomaly_check_file = '%s/%s.txt' % (metric_training_data_dir, base_name)

    trace = 'none'
    fail_msg = 'none'
    new_fp_id = False
    calculated_with_tsfresh = False
    calculated_time = False
    fcount = None
    fsum = None
    # @added 20170104 - Feature #1842: Ionosphere - Graphite now graphs
    # Added the ts_full_duration parameter so that the appropriate graphs can be
    # embedded for the user in the training data page
    ts_full_duration = '0'

    if context == 'ionosphere_learn':
        if not path.isfile(features_profile_details_file):
            current_logger.error('error :: create_features_profile :: no features_profile_details_file - %s' % features_profile_details_file)
            return 'none', False, False, fail_msg, trace

    if path.isfile(features_profile_details_file):
        current_logger.info('create_features_profile :: getting features profile details from from - %s' % features_profile_details_file)
        # Read the details file
        with open(features_profile_details_file, 'r') as f:
            fp_details_str = f.read()
        fp_details = literal_eval(fp_details_str)
        calculated_with_tsfresh = fp_details[1]
        calculated_time = str(fp_details[2])
        fcount = str(fp_details[3])
        fsum = str(fp_details[4])
        try:
            ts_full_duration = str(fp_details[5])
        except:
            current_logger.error('error :: create_features_profile :: could not determine the full duration from - %s' % features_profile_details_file)
            ts_full_duration = '0'

        if context != 'ionosphere_learn':
            if ts_full_duration == '0':
                if path.isfile(anomaly_check_file):
                    current_logger.info('create_features_profile :: determining the full duration from anomaly_check_file - %s' % anomaly_check_file)
                    # Read the details file
                    with open(anomaly_check_file, 'r') as f:
                        anomaly_details = f.readlines()
                        for i, line in enumerate(anomaly_details):
                            if 'full_duration' in line:
                                _ts_full_duration = '%s' % str(line).split("'", 2)
                                full_duration_array = literal_eval(_ts_full_duration)
                                ts_full_duration = str(int(full_duration_array[1]))
                                current_logger.info('create_features_profile :: determined the full duration as - %s' % str(ts_full_duration))

    if path.isfile(features_profile_created_file):
        # Read the created file
        with open(features_profile_created_file, 'r') as f:
            fp_created_str = f.read()
        fp_created = literal_eval(fp_created_str)
        new_fp_id = fp_created[0]

        return str(new_fp_id), True, True, fail_msg, trace

    # Have data
    if path.isfile(features_file):
        current_logger.info('create_features_profile :: features_file exists: %s' % features_file)
    else:
        trace = traceback.format_exc()
        current_logger.error(trace)
        fail_msg = 'error :: create_features_profile :: features_file does not exist: %s' % features_file
        current_logger.error('%s' % fail_msg)
        if context == 'training' or context == 'features_profile':
            # Raise to webbapp I believe to provide traceback to user in UI
            raise
        else:
            return False, False, False, fail_msg, trace

    features_data = []
    with open(features_file, 'rb') as fr:
        reader = csv.reader(fr, delimiter=',')
        for i, line in enumerate(reader):
            feature_name_item = False
            fname_id = False
            f_value = False
            feature_name = str(line[0])
            feature_name_item = filter(
                lambda x: x[1] == feature_name, TSFRESH_FEATURES)
            if feature_name_item:
                feature_name_id = feature_name_item[0]
            if feature_name_item:
                feature_name_list = feature_name_item[0]
                fname_id = int(feature_name_list[0])
            f_value = str(line[1])
            if fname_id and f_value:
                features_data.append([fname_id, f_value])

    # @added 20170113 - Feature #1854: Ionosphere learn - generations
    # Set the learn generations variables with the IONOSPHERE_LEARN_DEFAULT_ and any
    # settings.IONOSPHERE_LEARN_NAMESPACE_CONFIG values.  These will later be
    # overridden by any database values determined for the specific metric if
    # they exist.
    # Set defaults
    use_full_duration_days = int(settings.IONOSPHERE_LEARN_DEFAULT_FULL_DURATION_DAYS)
    valid_learning_duration = int(settings.IONOSPHERE_LEARN_DEFAULT_VALID_TIMESERIES_OLDER_THAN_SECONDS)
    max_generations = int(settings.IONOSPHERE_LEARN_DEFAULT_MAX_GENERATIONS)
    max_percent_diff_from_origin = float(settings.IONOSPHERE_LEARN_DEFAULT_MAX_PERCENT_DIFF_FROM_ORIGIN)
    try:
        use_full_duration, valid_learning_duration, use_full_duration_days, max_generations, max_percent_diff_from_origin = get_ionosphere_learn_details(current_skyline_app, base_name)
        learn_full_duration_days = use_full_duration_days
    except:
        current_logger.error(traceback.format_exc())
        current_logger.error('error :: create_features_profile :: failed to get_ionosphere_learn_details')

    current_logger.info('create_features_profile :: learn_full_duration_days     :: %s days' % (str(learn_full_duration_days)))
    current_logger.info('create_features_profile :: valid_learning_duration      :: %s seconds' % (str(valid_learning_duration)))
    current_logger.info('create_features_profile :: max_generations              :: %s' % (str(max_generations)))
    current_logger.info('create_features_profile :: max_percent_diff_from_origin :: %s' % (str(max_percent_diff_from_origin)))

    current_logger.info('create_features_profile :: getting MySQL engine')
    try:
        engine, fail_msg, trace = fp_create_get_an_engine(current_skyline_app)
        current_logger.info(fail_msg)
    except:
        trace = traceback.format_exc()
        current_logger.error(trace)
        fail_msg = 'error :: create_features_profile :: could not get a MySQL engine'
        current_logger.error('%s' % fail_msg)
        if context == 'training' or context == 'features_profile':
            # Raise to webbapp I believe to provide traceback to user in UI
            raise
        else:
            return False, False, False, fail_msg, trace

    if not engine:
        trace = 'none'
        fail_msg = 'error :: create_features_profile :: engine not obtained'
        current_logger.error(fail_msg)
        if context == 'training' or context == 'features_profile':
            # Raise to webbapp I believe to provide traceback to user in UI
            raise
        else:
            return False, False, False, fail_msg, trace

    # Get metric details from the database
    metrics_id = False

    # Use the learn details as per config
    metric_learn_full_duration_days = int(use_full_duration_days)
    metric_learn_valid_ts_older_than = int(valid_learning_duration)
    metric_max_generations = int(max_generations)
    metric_max_percent_diff_from_origin = int(max_percent_diff_from_origin)

    metrics_table = None
    try:
        metrics_table, fail_msg, trace = metrics_table_meta(current_skyline_app, engine)
        current_logger.info(fail_msg)
    except:
        trace = traceback.format_exc()
        current_logger.error('%s' % trace)
        fail_msg = 'error :: create_features_profile :: failed to get metrics_table meta for %s' % base_name
        current_logger.error('%s' % fail_msg)
        if context == 'training' or context == 'features_profile':
            # @added 20170806 - Bug #2130: MySQL - Aborted_clients
            # Added missing disposal
            if engine:
                fp_create_engine_disposal(current_skyline_app, engine)
            # Raise to webbapp I believe to provide traceback to user in UI
            raise
        else:
            current_logger.info('create_features_profile :: disposing of any engine')
            fp_create_engine_disposal(current_skyline_app, engine)
            return False, False, False, fail_msg, trace

    current_logger.info('create_features_profile :: metrics_table OK')

    metric_db_object = None
    try:
        connection = engine.connect()
        # @modified 20161209 -  - Branch #922: ionosphere
        #                        Task #1658: Patterning Skyline Ionosphere
        # result = connection.execute('select id from metrics where metric=\'%s\'' % base_name)
#        for row in result:
#            while not metrics_id:
#                metrics_id = row['id']
        stmt = select([metrics_table]).where(metrics_table.c.metric == base_name)
        result = connection.execute(stmt)
        for row in result:
            metrics_id = row['id']
            # @added 20170113 - Feature #1854: Ionosphere learn - generations
            # Added Ionosphere LEARN generation related variables
            try:
                metric_learn_full_duration_days = int(row['learn_full_duration_days'])
                metric_learn_valid_ts_older_than = int(row['learn_valid_ts_older_than'])
                metric_max_generations = int(row['max_generations'])
                metric_max_percent_diff_from_origin = float(row['max_percent_diff_from_origin'])
            except:
                current_logger.error('error :: create_features_profile :: failed to determine learn related values from DB for %s' % base_name)
        row = result.fetchone()
        # metric_db_object = row
        connection.close()
        current_logger.info('create_features_profile :: determined db metric id: %s' % str(metrics_id))
        current_logger.info('create_features_profile :: determined db metric learn_full_duration_days: %s' % str(metric_learn_full_duration_days))
        current_logger.info('create_features_profile :: determined db metric learn_valid_ts_older_than: %s' % str(metric_learn_valid_ts_older_than))
        current_logger.info('create_features_profile :: determined db metric max_generations: %s' % str(metric_max_generations))
        current_logger.info('create_features_profile :: determined db metric max_percent_diff_from_origin: %s' % str(metric_max_percent_diff_from_origin))
    except:
        trace = traceback.format_exc()
        current_logger.error(trace)
        fail_msg = 'error :: create_features_profile :: could not determine id of metric from DB: %s' % base_name
        current_logger.error('%s' % fail_msg)

    if metric_learn_full_duration_days:
        learn_full_duration_days = metric_learn_full_duration_days
        # learn_full_duration = int(learn_full_duration_days) * 86400
    if metric_learn_valid_ts_older_than:
        learn_valid_ts_older_than = metric_learn_valid_ts_older_than
    if metric_max_generations:
        max_generations = metric_max_generations
    if metric_max_percent_diff_from_origin:
        max_percent_diff_from_origin = metric_max_percent_diff_from_origin
    current_logger.info('create_features_profile :: generation info - learn_full_duration_days     :: %s' % (str(learn_full_duration_days)))
    current_logger.info('create_features_profile :: generation info - learn_valid_ts_older_than    :: %s' % (str(learn_valid_ts_older_than)))
    current_logger.info('create_features_profile :: generation info - max_generations              :: %s' % (str(max_generations)))
    current_logger.info('create_features_profile :: generation info - max_percent_diff_from_origin :: %s' % (str(max_percent_diff_from_origin)))

    # @added 20170120 - Feature #1854: Ionosphere learn
    # Always use the timestamp from the anomaly file
    use_anomaly_timestamp = int(requested_timestamp)
    if context == 'ionosphere_learn':
        if path.isfile(anomaly_check_file):
            current_logger.info('create_features_profile :: determining the full duration from anomaly_check_file - %s' % anomaly_check_file)
            # Read the details file
            with open(anomaly_check_file, 'r') as f:
                anomaly_details = f.readlines()
                for i, line in enumerate(anomaly_details):
                    if 'metric_timestamp' in line:
                        _metric_timestamp = '%s' % str(line).split("'", 2)
                        metric_timestamp_array = literal_eval(_metric_timestamp)
                        use_anomaly_timestamp = (int(metric_timestamp_array[1]))
                        current_logger.info('create_features_profile :: determined the anomaly metric_timestamp as - %s' % str(use_anomaly_timestamp))

    ionosphere_table = None
    try:
        ionosphere_table, fail_msg, trace = ionosphere_table_meta(current_skyline_app, engine)
        current_logger.info(fail_msg)
    except:
        trace = traceback.format_exc()
        current_logger.error('%s' % trace)
        fail_msg = 'error :: create_features_profile :: failed to get ionosphere_table meta for %s' % base_name
        current_logger.error('%s' % fail_msg)
        if context == 'training' or context == 'features_profile':
            # Raise to webbapp I believe to provide traceback to user in UI
            # @added 20170806 - Bug #2130: MySQL - Aborted_clients
            # Added missing disposal
            if engine:
                fp_create_engine_disposal(current_skyline_app, engine)
            raise
        else:
            current_logger.info('create_features_profile :: disposing of any engine')
            fp_create_engine_disposal(current_skyline_app, engine)
            return False, False, False, fail_msg, trace

    current_logger.info('create_features_profile :: ionosphere_table OK')

    # @added 20170403 - Feature #2000: Ionosphere - validated
    # Set all learn_fp_human features profiles to validated.
    fp_validated = 0
    if ionosphere_job == 'learn_fp_human':
        fp_validated = 1

    # @added 20170424 - Feature #2000: Ionosphere - validated
    # Set all generation 0 and 1 as validated
    if int(fp_generation) <= 1:
        fp_validated = 1

    new_fp_id = False
    try:
        connection = engine.connect()
        # @added 20170113 - Feature #1854: Ionosphere learn
        # Added learn values parent_id, generation
        # @modified 20170120 - Feature #1854: Ionosphere learn
        # Added anomaly_timestamp
        # @modified 20170403 - Feature #2000: Ionosphere - validated
        ins = ionosphere_table.insert().values(
            metric_id=int(metrics_id), full_duration=int(ts_full_duration),
            anomaly_timestamp=int(use_anomaly_timestamp),
            enabled=1, tsfresh_version=str(tsfresh_version),
            calc_time=calculated_time, features_count=fcount,
            features_sum=fsum, parent_id=fp_parent_id,
            generation=fp_generation, validated=fp_validated)
        result = connection.execute(ins)
        connection.close()
        new_fp_id = result.inserted_primary_key[0]
        current_logger.info('create_features_profile :: new ionosphere fp_id: %s' % str(new_fp_id))
    except:
        trace = traceback.format_exc()
        current_logger.error('%s' % trace)
        fail_msg = 'error :: create_features_profile :: failed to insert a new record into the ionosphere table for %s' % base_name
        current_logger.error('%s' % fail_msg)
        if context == 'training' or context == 'features_profile':
            # @added 20170806 - Bug #2130: MySQL - Aborted_clients
            # Added missing disposal
            if engine:
                fp_create_engine_disposal(current_skyline_app, engine)
            # Raise to webbapp I believe to provide traceback to user in UI
            raise
        else:
            current_logger.info('create_features_profile :: disposing of any engine')
            fp_create_engine_disposal(current_skyline_app, engine)
            return False, False, False, fail_msg, trace

    if not RepresentsInt(new_fp_id):
        trace = traceback.format_exc()
        current_logger.error('%s' % trace)
        fail_msg = 'error :: create_features_profile :: unknown new ionosphere new_fp_id for %s' % base_name
        current_logger.error('%s' % fail_msg)
        if context == 'training' or context == 'features_profile':
            # @added 20170806 - Bug #2130: MySQL - Aborted_clients
            # Added missing disposal
            if engine:
                fp_create_engine_disposal(current_skyline_app, engine)
            # Raise to webbapp I believe to provide traceback to user in UI
            raise
        else:
            current_logger.info('create_features_profile :: disposing of any engine')
            fp_create_engine_disposal(current_skyline_app, engine)
            return False, False, False, fail_msg, trace

    # Create z_fp_<metric_id> table
    fp_table_created = False
    fp_table_name = 'z_fp_%s' % str(metrics_id)
    try:
        fp_meta = MetaData()
        # @modified 20161222 - Task #1812: z_fp table type
        # Changed to InnoDB from MyISAM as no files open issues and MyISAM clean
        # up, there can be LOTS of file_per_table z_fp_ tables/files without
        # the MyISAM issues.  z_fp_ tables are mostly read and will be shuffled
        # in the table cache as required.
        fp_metric_table = Table(
            fp_table_name, fp_meta,
            Column('id', Integer, primary_key=True),
            Column('fp_id', Integer, nullable=False, key='fp_id'),
            Column('feature_id', Integer, nullable=False),
            Column('value', DOUBLE(), nullable=True),
            mysql_charset='utf8',
            mysql_key_block_size='255',
            mysql_engine='InnoDB')
        fp_metric_table.create(engine, checkfirst=True)
        fp_table_created = True
    except:
        trace = traceback.format_exc()
        current_logger.error('%s' % trace)
        fail_msg = 'error :: create_features_profile :: failed to create table - %s' % fp_table_name
        current_logger.error('%s' % fail_msg)
        if context == 'training' or context == 'features_profile':
            # @added 20170806 - Bug #2130: MySQL - Aborted_clients
            # Added missing disposal
            if engine:
                fp_create_engine_disposal(current_skyline_app, engine)
            # Raise to webbapp I believe to provide traceback to user in UI
            raise
        else:
            current_logger.info('create_features_profile :: %s - automated so the table should exists continuing' % context)

    if not fp_table_created:
        trace = traceback.format_exc()
        current_logger.error('%s' % trace)
        fail_msg = 'error :: create_features_profile :: failed to determine True for create table - %s' % fp_table_name
        current_logger.error('%s' % fail_msg)
        if context == 'training' or context == 'features_profile':
            # @added 20170806 - Bug #2130: MySQL - Aborted_clients
            # Added missing disposal
            if engine:
                fp_create_engine_disposal(current_skyline_app, engine)
            # Raise to webbapp I believe to provide traceback to user in UI
            raise
        else:
            current_logger.info('create_features_profile :: %s - automated so the table should exists continuing' % context)

    # Insert features and values
    insert_statement = []
    for fname_id, f_value in features_data:
        insert_statement.append({'fp_id': new_fp_id, 'feature_id': fname_id, 'value': f_value},)
    if insert_statement == []:
        trace = traceback.format_exc()
        current_logger.error('%s' % trace)
        fail_msg = 'error :: create_features_profile :: empty insert_statement for %s inserts' % fp_table_name
        current_logger.error('%s' % fail_msg)
        # raise
    # else:
        # feature_count = sum(1 for x in a if isinstance(x, insert_statement))
        # current_logger.info(
        #     'fp_id - %s - %s feature values in insert_statement for %s ' %
        #     (str(feature_count), str(new_fp_id), fp_table_name))
        # feature_count = sum(1 for x in a if isinstance(x, insert_statement))
        # current_logger.info(
        #     'fp_id - %s - feature values in insert_statement for %s ' %
        #     (str(new_fp_id), fp_table_name))

    try:
        connection = engine.connect()
        connection.execute(fp_metric_table.insert(), insert_statement)
        connection.close()
        current_logger.info('create_features_profile :: fp_id - %s - feature values inserted into %s' % (str(new_fp_id), fp_table_name))
    except:
        trace = traceback.format_exc()
        current_logger.error('%s' % trace)
        fail_msg = 'error :: create_features_profile :: failed to insert a feature values into %s' % fp_table_name
        current_logger.error('%s' % fail_msg)
        if context == 'training' or context == 'features_profile':
            # @added 20170806 - Bug #2130: MySQL - Aborted_clients
            # Added missing disposal
            if engine:
                fp_create_engine_disposal(current_skyline_app, engine)
            # Raise to webbapp I believe to provide traceback to user in UI
            raise
        else:
            current_logger.info('create_features_profile :: %s - automated so the table should exists continuing' % context)

    # Create metric ts table if not exists ts_<metric_id>
    # Create z_ts_<metric_id> table
    # @modified 20170121 - Feature #1854: Ionosphere learn - generations
    # TODO Adding the option to not save timeseries to DB, as default?
    # ts_table_created = False
    ts_table_name = 'z_ts_%s' % str(metrics_id)
    try:
        ts_meta = MetaData()
        # @modified 20161222 - Task #1812: z_fp table type
        # Changed to InnoDB from MyISAM as no files open issues and MyISAM clean
        # up, there can be LOTS of file_per_table z_fp_ tables/files without
        # the MyISAM issues.  z_fp_ tables are mostly read and will be shuffled
        # in the table cache as required.
        ts_metric_table = Table(
            ts_table_name, ts_meta,
            Column('id', Integer, primary_key=True),
            Column('fp_id', Integer, nullable=False, key='fp_id'),
            Column('timestamp', Integer, nullable=False),
            Column('value', DOUBLE(), nullable=True),
            mysql_charset='utf8',
            mysql_key_block_size='255',
            mysql_engine='InnoDB')
        ts_metric_table.create(engine, checkfirst=True)
        # ts_table_created = True
        current_logger.info('create_features_profile :: metric ts table created OK - %s' % (ts_table_name))
    except:
        trace = traceback.format_exc()
        current_logger.error('%s' % trace)
        fail_msg = 'error :: create_features_profile :: failed to create table - %s' % ts_table_name
        current_logger.error('%s' % fail_msg)
        if context == 'training' or context == 'features_profile':
            # @added 20170806 - Bug #2130: MySQL - Aborted_clients
            # Added missing disposal
            if engine:
                fp_create_engine_disposal(current_skyline_app, engine)
            # Raise to webbapp I believe to provide traceback to user in UI
            raise
        else:
            current_logger.info('create_features_profile :: %s - automated so the table should exists continuing' % context)

    # Insert timeseries that the features profile was created from
    raw_timeseries = []
    anomaly_json = '%s/%s.json' % (metric_training_data_dir, base_name)
    if path.isfile(anomaly_json):
        current_logger.info('create_features_profile :: metric anomaly json found OK - %s' % (anomaly_json))
        try:
            # Read the timeseries json file
            with open(anomaly_json, 'r') as f:
                raw_timeseries = f.read()
        except:
            trace = traceback.format_exc()
            current_logger.error(trace)
            fail_msg = 'error :: create_features_profile :: failed to read timeseries data from %s' % anomaly_json
            current_logger.error('%s' % (fail_msg))
            fail_msg = 'error: failed to read timeseries data from %s' % anomaly_json
            # end = timer()
            if context == 'training' or context == 'features_profile':
                # @added 20170806 - Bug #2130: MySQL - Aborted_clients
                # Added missing disposal
                if engine:
                    fp_create_engine_disposal(current_skyline_app, engine)
                # Raise to webbapp I believe to provide traceback to user in UI
                raise
    else:
        trace = 'none'
        fail_msg = 'error: file not found - %s' % (anomaly_json)
        current_logger.error(fail_msg)
        # raise

    # Convert the timeseries to csv
    timeseries_array_str = str(raw_timeseries).replace('(', '[').replace(')', ']')
    timeseries = literal_eval(timeseries_array_str)

    datapoints = timeseries
    validated_timeseries = []
    for datapoint in datapoints:
        try:
            new_datapoint = [str(int(datapoint[0])), float(datapoint[1])]
            validated_timeseries.append(new_datapoint)
        # @modified 20170913 - Task #2160: Test skyline with bandit
        # Added nosec to exclude from bandit tests
        except:  # nosec
            continue

    insert_statement = []
    for ts, value in validated_timeseries:
        insert_statement.append({'fp_id': new_fp_id, 'timestamp': ts, 'value': value},)
    try:
        connection = engine.connect()
        connection.execute(ts_metric_table.insert(), insert_statement)
        connection.close()
        current_logger.info('create_features_profile :: fp_id - %s - timeseries inserted into %s' % (str(new_fp_id), ts_table_name))
    except:
        trace = traceback.format_exc()
        current_logger.error('%s' % trace)
        fail_msg = 'error :: create_features_profile :: failed to insert the timeseries into %s' % ts_table_name
        current_logger.error('%s' % fail_msg)
        if context == 'training' or context == 'features_profile':
            # @added 20170806 - Bug #2130: MySQL - Aborted_clients
            # Added missing disposal
            if engine:
                fp_create_engine_disposal(current_skyline_app, engine)
            raise
        else:
            current_logger.info('create_features_profile :: %s - automated so the table should exists continuing' % context)

    # Create a created features profile file
    try:
        # data = '[%s, %s, ]' % (new_fp_id, str(int(time.time())))
        # write_data_to_file(skyline_app, features_profile_created_file, 'w', data)
        # @modified 20170115 -  Feature #1854: Ionosphere learn - generations
        # Added parent_id and generation
        data = '[%s, %s, \'%s\', %s, %s, %s, %s, %s, %s]' % (
            new_fp_id, str(int(time.time())), str(tsfresh_version),
            str(calculated_time), str(fcount), str(fsum), str(ts_full_duration),
            str(fp_parent_id), str(fp_generation))
        write_data_to_file(current_skyline_app, features_profile_created_file, 'w', data)
    except:
        trace = traceback.format_exc()
        current_logger.error('%s' % trace)
        fail_msg = 'error :: create_features_profile :: failed to write fp.created file'
        current_logger.error('%s' % fail_msg)

    # Set ionosphere_enabled for the metric
    try:
        # update_statement = 'UPDATE metrics SET ionosphere_enabled=1 WHERE id=%s' % str(metrics_id)
        connection = engine.connect()
        # result = connection.execute('UPDATE metrics SET ionosphere_enabled=1 WHERE id=%s' % str(metrics_id))
        # connection.execute(ts_metric_table.insert(), insert_statement)
        connection.execute(
            metrics_table.update(
                metrics_table.c.id == metrics_id).values(ionosphere_enabled=1))
        connection.close()
        current_logger.info('create_features_profile :: ionosphere_enabled set on metric id: %s' % str(metrics_id))
    except:
        trace = traceback.format_exc()
        current_logger.error(trace)
        fail_msg = 'error :: create_features_profile :: could not update metrics table and set ionosphere_enabled on id %s' % str(metrics_id)
        current_logger.error('%s' % fail_msg)
        # raise

    # Copy data from training data dir to features_profiles dir
    if not path.isdir(ts_features_profile_dir):
        mkdir_p(ts_features_profile_dir)

    if path.isdir(ts_features_profile_dir):
        current_logger.info('create_features_profile :: fp_id - %s - features profile dir created - %s' % (str(new_fp_id), ts_features_profile_dir))
        # src_files = os.listdir(src)
        # for file_name in src_files:
        #    full_file_name = path.join(src, file_name)
        #    if (path.isfile(full_file_name)):
        #        shutil.copy(full_file_name, dest)

        data_files = []
        try:
            glob_path = '%s/*.*' % metric_training_data_dir
            data_files = glob.glob(glob_path)
        except:
            trace = traceback.format_exc()
            current_logger.error('%s' % trace)
            current_logger.error('error :: create_features_profile :: glob - fp_id - %s - training data not copied to %s' % (str(new_fp_id), ts_features_profile_dir))

        for i_file in data_files:
            try:
                shutil.copy(i_file, ts_features_profile_dir)
                current_logger.info('create_features_profile :: fp_id - %s - training data copied - %s' % (str(new_fp_id), i_file))
            except shutil.Error as e:
                trace = traceback.format_exc()
                current_logger.error('%s' % trace)
                current_logger.error('error :: create_features_profile :: shutil error - fp_id - %s - training data not copied to %s' % (str(new_fp_id), ts_features_profile_dir))
                current_logger.error('error :: create_features_profile :: %s' % (e))
            # Any error saying that the directory doesn't exist
            except OSError as e:
                trace = traceback.format_exc()
                current_logger.error('%s' % trace)
                current_logger.error('error :: create_features_profile :: OSError error - fp_id - %s - training data not copied to %s' % (str(new_fp_id), ts_features_profile_dir))
                current_logger.error('error :: create_features_profile :: %s' % (e))
        current_logger.info('create_features_profile :: fp_id - %s - training data copied to %s' % (str(new_fp_id), ts_features_profile_dir))
    else:
        current_logger.error('error :: create_features_profile :: fp_id - %s - training data not copied to %s' % (str(new_fp_id), ts_features_profile_dir))

    current_logger.info('create_features_profile :: disposing of any engine')
    try:
        if engine:
            fp_create_engine_disposal(current_skyline_app, engine)
        else:
            current_logger.info('create_features_profile :: no engine to dispose of' % (str(new_fp_id), ts_features_profile_dir))
    except:
        trace = traceback.format_exc()
        current_logger.error('%s' % trace)
        current_logger.error('error :: create_features_profile :: OSError error - fp_id - %s - training data not copied to %s' % (str(new_fp_id), ts_features_profile_dir))

    # @added 20170113 - Feature #1854: Ionosphere learn - Redis ionosphere.learn.work namespace
    # Ionosphere learn needs Redis works sets
    # When a features profile is created there needs to be work added to a Redis
    # set. When a human makes a features profile, we want Ionosphere to make a
    # use_full_duration_days features profile valid_learning_duration (e.g.
    # 3361) later.
    if settings.IONOSPHERE_LEARN and new_fp_id:
        create_redis_work_item = False
        if context == 'training_data' and ionosphere_job == 'learn_fp_human':
            create_redis_work_item = True
            # @modified 20170120 -  Feature #1854: Ionosphere learn - generations
            # Added fp_learn parameter to allow the user to not learn the
            # use_full_duration_days
            if not fp_learn:
                create_redis_work_item = False
                current_logger.info('fp_learn is False not adding an item to Redis ionosphere.learn.work set')

        if ionosphere_job == 'learn_fp_automatic':
            create_redis_work_item = True
            # @added 20170131 - Feature #1886 Ionosphere learn - child like parent with evolutionary maturity
            # TODO: here a check may be required to evaluate whether the origin_fp_id
            #       had a use_full_duration features profile created, however
            #       due to the fact that it is in learn, suggests that it did
            #       have, not 100% sure.
            origin_fp_id_was_allowed_to_learn = False
            child_use_full_duration_count_of_origin_fp_id = 1
            # TODO: Determine the state
            # child_use_full_duration_count_of_origin_fp_id = SELECT COUNT(id) FROM ionosphere WHERE parent_id=origin_fp_id AND full_duration=use_full_duration
            if child_use_full_duration_count_of_origin_fp_id == 0:
                current_logger.info('the origin parent was not allowed to learn not adding to Redis ionosphere.learn.work set')
                create_redis_work_item = False

        if create_redis_work_item:
            try:
                current_logger.info(
                    'adding work to Redis ionosphere.learn.work set - [\'Soft\', \'%s\', %s, \'%s\', %s, %s] to make a learn features profile later' % (
                        str(ionosphere_job), str(requested_timestamp), base_name,
                        str(new_fp_id), str(fp_generation)))
                redis_conn = StrictRedis(unix_socket_path=settings.REDIS_SOCKET_PATH)
                redis_conn.sadd('ionosphere.learn.work', ['Soft', str(ionosphere_job), int(requested_timestamp), base_name, int(new_fp_id), int(fp_generation)])
            except:
                current_logger.error(traceback.format_exc())
                current_logger.error(
                    'error :: failed adding work to Redis ionosphere.learn.work set - [\'Soft\', \'%s\', %s, \'%s\', %s, %s] to make a learn features profile later' % (
                        str(ionosphere_job), str(requested_timestamp), base_name,
                        str(new_fp_id), str(fp_generation)))

        # @added 20170806 - Bug #2130: MySQL - Aborted_clients
        # Added missing disposal
        if engine:
            fp_create_engine_disposal(current_skyline_app, engine)

    return str(new_fp_id), True, False, fail_msg, trace
Example #18
0
class ScannedLocation(db.Model):
    __tablename__ = 'scannedlocation'

    cellid = db.Column(BIGINT(unsigned=True), primary_key=True)
    latitude = db.Column(DOUBLE(asdecimal=False), nullable=False)
    longitude = db.Column(DOUBLE(asdecimal=False), nullable=False)
    last_modified = db.Column(db.DateTime)

    __table_args__ = (
        Index('scannedlocation_last_modified', 'last_modified'),
        Index('scannedlocation_latitude_longitude', 'latitude', 'longitude'),
    )

    @staticmethod
    def get_recent(swLat,
                   swLng,
                   neLat,
                   neLng,
                   oSwLat=None,
                   oSwLng=None,
                   oNeLat=None,
                   oNeLng=None,
                   timestamp=0,
                   geofences=None,
                   exclude_geofences=None):
        query = db.session.query(ScannedLocation.cellid,
                                 ScannedLocation.latitude,
                                 ScannedLocation.longitude,
                                 ScannedLocation.last_modified)

        if timestamp > 0:
            # If timestamp is known only send last scanned locations.
            t = datetime.utcfromtimestamp(timestamp / 1000)
            query = query.filter(ScannedLocation.last_modified > t)
        else:
            # Only send locations scanned in last 15 minutes.
            active_time = datetime.utcnow() - timedelta(minutes=15)
            query = query.filter(ScannedLocation.last_modified > active_time)

        if swLat and swLng and neLat and neLng:
            query = query.filter(ScannedLocation.latitude >= swLat,
                                 ScannedLocation.longitude >= swLng,
                                 ScannedLocation.latitude <= neLat,
                                 ScannedLocation.longitude <= neLng)

        if oSwLat and oSwLng and oNeLat and oNeLng:
            # Exclude scanned locations within old boundaries.
            query = query.filter(
                ~and_(ScannedLocation.latitude >= oSwLat, ScannedLocation.
                      longitude >= oSwLng, ScannedLocation.latitude <= oNeLat,
                      ScannedLocation.longitude <= oNeLng))

        if geofences:
            sql = geofences_to_query(geofences, 'scannedlocation')
            query = query.filter(text(sql))

        if exclude_geofences:
            sql = geofences_to_query(exclude_geofences, 'scannedlocation')
            query = query.filter(~text(sql))

        result = query.all()

        return [loc._asdict() for loc in result]
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

from sqlalchemy.schema import PrimaryKeyConstraint, UniqueConstraint
from sqlalchemy import (Column, String, DateTime, Boolean, Integer, JSON,
                        TIMESTAMP, func, text, Date, SmallInteger)
from sqlalchemy.dialects.mysql import DOUBLE

from . import DeclarativeBase, SQLSourceMixin, create_all_tables

Double = DOUBLE(asdecimal=False)


class IoTDevice(DeclarativeBase, SQLSourceMixin):
    __tablename__ = 'device.iot'

    device_id = Column(String(15), primary_key=True)
    name = Column(String(63), )
    desc = Column(String(127), )
    type = Column(String(31), )
    lat = Column(Double, )
    lon = Column(Double, )
    alt = Column(Double, )
    reference = Column(Boolean, )
    display = Column(Boolean, )
    device_type = Column(String(15), )
    owner_id = Column(String(31), )
    mobile = Column(Boolean, )
    outdoor = Column(Boolean, )
    manufacturer_id = Column(String(31), )
    mac_id = Column(String(31), )
Example #20
0
           VARCHAR(length=40),
           ForeignKey(metric.c.uid,
                      name="metric_data_to_metric_fk",
                      onupdate="CASCADE",
                      ondelete="CASCADE"),
           primary_key=True,
           nullable=False,
           server_default=""),
    Column("rowid",
           INTEGER(),
           primary_key=True,
           autoincrement=False,
           nullable=False,
           server_default="0"),
    Column("timestamp", DATETIME(), nullable=False),
    Column("metric_value", DOUBLE(asdecimal=False), nullable=False),
    Column("raw_anomaly_score", DOUBLE(asdecimal=False)),
    Column("anomaly_score", DOUBLE(asdecimal=False)),
    Column("display_value", INTEGER(), autoincrement=False),
    schema=None,
)

Index("timestamp_idx", metric_data.c.timestamp)
Index("anomaly_score_idx", metric_data.c.anomaly_score)

lock = Table("lock",
             metadata,
             Column("name",
                    VARCHAR(length=40),
                    primary_key=True,
                    nullable=False),
Example #21
0
class GDCmscontent(Base):
    __tablename__ = 'cms_content'  # 表的名字  影视数据表
    # code = Column(String(64), charset='utf8',
    id = Column(BIGINT, primary_key=True)
    code = Column(String(64), default=None, comment='内容code')
    type_name = Column(String(20), default=None, comment='内容分类名')
    user_id = Column(Integer, default=None, comment='操作用户id')
    code_cp_id = Column(String(100),
                        default=None,
                        comment='内容所属平台id (南传 百事通 广信)')
    series_flag = Column(Integer,
                         default=None,
                         comment='内容类型  100:普通点播 110:连续剧父集 120:连续剧子集')
    parent_code = Column(String(100),
                         default=None,
                         comment='连续剧父集编号,如果不是连续剧子集则该字段为空')
    director = Column(String(255), default=None, comment='导演')
    name = Column(
        String(255),
        default=None,
    )
    actor = Column(String(455), default=None, comment='演员')
    image_v = Column(String(255), default=None, comment='竖版海报')
    image_h = Column(String(255), default=None, comment='横版海报')
    image_s = Column(String(255), default=None, comment='方海报')
    status = Column(Integer, default=None, comment='内容状态 0未激活 1激活')
    alias = Column(String(200), default=None, comment='别名')
    small_type = Column(String(100), default=None, comment='小分类')
    kind = Column(String(100), default=None, comment='种类')
    show_time = Column(DATE, default=None, comment='发布时间')
    create_time = Column(
        DATETIME,
        default=None,
    )
    update_time = Column(
        DATETIME,
        default=None,
    )
    language = Column(String(20), default=None, comment='语言')
    region = Column(String(100), default=None, comment='地区')
    duration = Column(String(20), default=None, comment='时长')
    all_episode = Column(BIGINT, default=None, comment='总集数')
    summary = Column(MEDIUMTEXT, default=None, comment='简介')
    source = Column(String(30), default=None, comment='来源')
    keyword = Column(String(255), default=None, comment='核心词')
    score = Column(Integer, default=None, comment='评分')
    attention_degree = Column(Integer, default=None, comment='关注度')
    search_volume = Column(BIGINT, default=0, comment='搜索量')
    comment_volume = Column(BIGINT, default=0, comment='评论量')
    playback_volume = Column(BIGINT, default=0, comment='播放量')
    weight = Column(Integer, default=None, comment='权重')
    year = Column(String(10), default=None, comment='年份')
    sequence = Column(Integer, default=None, comment='顺序')
    content_type = Column(Integer,
                          default=None,
                          comment='内容类型 1:视频 2: 图片 3:图文 4:直播 5:WEB')
    thumbnail = Column(String(30), default=None, comment='缩略图')
    searchkey = Column(String(30), default=None, comment='搜索码')
    stills = Column(String(30), default=None, comment='剧照')
    check_time = Column(DATETIME, default=None, comment='审核时间')
    expire_time = Column(DATETIME, default=None, comment='过期时间')
    preview_start_time = Column(DATETIME, default=None, comment='预览开始时间')
    preview_stop_time = Column(DATETIME, default=None, comment='预览结束时间')
    episode = Column(Integer, default=None, comment='第几集')
    screen_writer = Column(String(255), default=None, comment='编剧')
    tag = Column(String(255), default=None, comment='标签')
    biz_domain = Column(Integer, default=None, comment='领域 无领域  单领域 双领域')
    definition = Column(String(128),
                        default=None,
                        comment='码流大小列表 1:高清,2:标清, 默认,11:移动高清,15:移动清晰,20:移动流畅')
    resolution = Column(String(128), default=None, comment='分辨率列表,多值,逗号间隔')
    carrier_code = Column(String(128), default=None, comment='业务运营商CODE')
    start_date_time = Column(DATETIME, default=None, comment='内容开始时间(上线时间)')
    end_date_time = Column(DATETIME, default=None, comment='内容结束时间(下线时间)')
    price = Column(DOUBLE(50, 0), default=None, comment='价格')
    control_level = Column(Integer, default=None, comment='控制级别')
    recommend_level = Column(Integer,
                             default=None,
                             comment='推荐级别(0:不支持推荐1:热点推荐2:强档推荐)')
    copyright = Column(String(128), default=None, comment='版权号')
    copyright_end_time = Column(DATE, default=None, comment='版权到期时间')
    genres = Column(String(384), default=None, comment='主题')
    styles = Column(String(500), default=None, comment='风格')
    country = Column(String(500), default=None, comment='国家')
    area_ids = Column(String(500), default=None, comment='区域标识列表')
    service_ids = Column(String(500), default=None, comment='服务标识列表')
    media_id = Column(String(128), default=None, comment='关联的媒体编号,如果不需要该字段')
    unknow_1 = Column(
        String(50),
        default=None,
    )
    unknow_2 = Column(
        String(50),
        default=None,
    )
    data_flag = Column(
        Integer,
        default=None,
    )
Example #22
0
class DBCache:

    fort_ids_within_range = []
    unknown_fort_id = None
    not_a_fort_id = None


if config.DB_ENGINE.startswith('mysql'):
    from sqlalchemy.dialects.mysql import TINYINT, MEDIUMINT, BIGINT, DOUBLE, LONGTEXT

    TINY_TYPE = TINYINT(unsigned=True)  # 0 to 255
    MEDIUM_TYPE = MEDIUMINT(unsigned=True)  # 0 to 4294967295
    UNSIGNED_HUGE_TYPE = BIGINT(unsigned=True)  # 0 to 18446744073709551615
    HUGE_TYPE = BigInteger
    PRIMARY_HUGE_TYPE = HUGE_TYPE
    FLOAT_TYPE = DOUBLE(precision=18, scale=14, asdecimal=False)
    LONG_TEXT = LONGTEXT
elif config.DB_ENGINE.startswith('postgres'):
    from sqlalchemy.dialects.postgresql import DOUBLE_PRECISION, TEXT

    class NumInt(TypeDecorator):
        '''Modify Numeric type for integers'''
        impl = Numeric

        def process_bind_param(self, value, dialect):
            if value is None:
                return None
            return int(value)

        def process_result_value(self, value, dialect):
            if value is None:
Example #23
0
class ScannedLocation(db.Model):
    __tablename__ = 'scannedlocation'

    cellid = db.Column(BIGINT(unsigned=True), primary_key=True)
    latitude = db.Column(DOUBLE(asdecimal=False), nullable=False)
    longitude = db.Column(DOUBLE(asdecimal=False), nullable=False)
    last_modified = db.Column(db.DateTime)

    __table_args__ = (
        Index('scannedlocation_last_modified', 'last_modified'),
        Index('scannedlocation_latitude_longitude', 'latitude', 'longitude'),
    )

    @staticmethod
    def get_recent(swLat,
                   swLng,
                   neLat,
                   neLng,
                   oSwLat=None,
                   oSwLng=None,
                   oNeLat=None,
                   oNeLng=None,
                   timestamp=0):
        query = db.session.query(ScannedLocation.cellid,
                                 ScannedLocation.latitude,
                                 ScannedLocation.longitude,
                                 ScannedLocation.last_modified)

        active_time = datetime.utcnow() - timedelta(minutes=15)

        if not (swLat and swLng and neLat and neLng):
            query = query.filter(ScannedLocation.last_modified >= active_time)
        elif timestamp > 0:
            query = query.filter(
                ScannedLocation.last_modified >= datetime.utcfromtimestamp(
                    timestamp / 1000), ScannedLocation.latitude >= swLat,
                ScannedLocation.longitude >= swLng,
                ScannedLocation.latitude <= neLat,
                ScannedLocation.longitude <= neLng)
        elif oSwLat and oSwLng and oNeLat and oNeLng:
            # Send scanned locations in view but exclude those within old
            # boundaries. Only send newly uncovered scanned locations.
            query = query.filter(
                ScannedLocation.last_modified >= active_time,
                ScannedLocation.latitude >= swLat,
                ScannedLocation.longitude >= swLng,
                ScannedLocation.latitude <= neLat,
                ScannedLocation.longitude <= neLng,
                ~and_(ScannedLocation.latitude >= oSwLat,
                      ScannedLocation.longitude >= oSwLng,
                      ScannedLocation.latitude <= oNeLat,
                      ScannedLocation.longitude <= oNeLng))
        else:
            query = query.filter(ScannedLocation.last_modified >= active_time,
                                 ScannedLocation.latitude >= swLat,
                                 ScannedLocation.longitude >= swLng,
                                 ScannedLocation.latitude <= neLat,
                                 ScannedLocation.longitude <= neLng)

        result = query.all()

        return [loc._asdict() for loc in result]