class Symbols(Base): __tablename__ = "symbols" symbol_id = Column(Integer(), primary_key=True) symbol = Column(Text())
def create_text_column(self): self.op.add_column('tasks', Column('text', Text()))
class DialectTypesTest(fixtures.TestBase, AssertsCompiledSQL): __dialect__ = oracle.OracleDialect() def test_no_clobs_for_string_params(self): """test that simple string params get a DBAPI type of VARCHAR, not CLOB. This is to prevent setinputsizes from setting up cx_oracle.CLOBs on string-based bind params [ticket:793].""" class FakeDBAPI(object): def __getattr__(self, attr): return attr dialect = oracle.OracleDialect() dbapi = FakeDBAPI() b = bindparam("foo", "hello world!") eq_(b.type.dialect_impl(dialect).get_dbapi_type(dbapi), "STRING") b = bindparam("foo", "hello world!") eq_(b.type.dialect_impl(dialect).get_dbapi_type(dbapi), "STRING") def test_long(self): self.assert_compile(oracle.LONG(), "LONG") @testing.combinations( (Date(), cx_oracle._OracleDate), (oracle.OracleRaw(), cx_oracle._OracleRaw), (String(), String), (VARCHAR(), cx_oracle._OracleString), (DATE(), cx_oracle._OracleDate), (oracle.DATE(), oracle.DATE), (String(50), cx_oracle._OracleString), (Unicode(), cx_oracle._OracleUnicodeStringCHAR), (Text(), cx_oracle._OracleText), (UnicodeText(), cx_oracle._OracleUnicodeTextCLOB), (CHAR(), cx_oracle._OracleChar), (NCHAR(), cx_oracle._OracleNChar), (NVARCHAR(), cx_oracle._OracleUnicodeStringNCHAR), (oracle.RAW(50), cx_oracle._OracleRaw), ) def test_type_adapt(self, start, test): dialect = cx_oracle.dialect() assert isinstance( start.dialect_impl(dialect), test), "wanted %r got %r" % (test, start.dialect_impl(dialect)) @testing.combinations( (String(), String), (VARCHAR(), cx_oracle._OracleString), (String(50), cx_oracle._OracleString), (Unicode(), cx_oracle._OracleUnicodeStringNCHAR), (Text(), cx_oracle._OracleText), (UnicodeText(), cx_oracle._OracleUnicodeTextNCLOB), (NCHAR(), cx_oracle._OracleNChar), (NVARCHAR(), cx_oracle._OracleUnicodeStringNCHAR), ) def test_type_adapt_nchar(self, start, test): dialect = cx_oracle.dialect(use_nchar_for_unicode=True) assert isinstance( start.dialect_impl(dialect), test), "wanted %r got %r" % (test, start.dialect_impl(dialect)) def test_raw_compile(self): self.assert_compile(oracle.RAW(), "RAW") self.assert_compile(oracle.RAW(35), "RAW(35)") def test_char_length(self): self.assert_compile(VARCHAR(50), "VARCHAR(50 CHAR)") oracle8dialect = oracle.dialect() oracle8dialect.server_version_info = (8, 0) self.assert_compile(VARCHAR(50), "VARCHAR(50)", dialect=oracle8dialect) self.assert_compile(NVARCHAR(50), "NVARCHAR2(50)") self.assert_compile(CHAR(50), "CHAR(50)") @testing.combinations( (String(50), "VARCHAR2(50 CHAR)"), (Unicode(50), "VARCHAR2(50 CHAR)"), (NVARCHAR(50), "NVARCHAR2(50)"), (VARCHAR(50), "VARCHAR(50 CHAR)"), (oracle.NVARCHAR2(50), "NVARCHAR2(50)"), (oracle.VARCHAR2(50), "VARCHAR2(50 CHAR)"), (String(), "VARCHAR2"), (Unicode(), "VARCHAR2"), (NVARCHAR(), "NVARCHAR2"), (VARCHAR(), "VARCHAR"), (oracle.NVARCHAR2(), "NVARCHAR2"), (oracle.VARCHAR2(), "VARCHAR2"), ) def test_varchar_types(self, typ, exp): dialect = oracle.dialect() self.assert_compile(typ, exp, dialect=dialect) @testing.combinations( (String(50), "VARCHAR2(50 CHAR)"), (Unicode(50), "NVARCHAR2(50)"), (NVARCHAR(50), "NVARCHAR2(50)"), (VARCHAR(50), "VARCHAR(50 CHAR)"), (oracle.NVARCHAR2(50), "NVARCHAR2(50)"), (oracle.VARCHAR2(50), "VARCHAR2(50 CHAR)"), (String(), "VARCHAR2"), (Unicode(), "NVARCHAR2"), (NVARCHAR(), "NVARCHAR2"), (VARCHAR(), "VARCHAR"), (oracle.NVARCHAR2(), "NVARCHAR2"), (oracle.VARCHAR2(), "VARCHAR2"), ) def test_varchar_use_nchar_types(self, typ, exp): dialect = oracle.dialect(use_nchar_for_unicode=True) self.assert_compile(typ, exp, dialect=dialect) @testing.combinations( (oracle.INTERVAL(), "INTERVAL DAY TO SECOND"), (oracle.INTERVAL(day_precision=3), "INTERVAL DAY(3) TO SECOND"), (oracle.INTERVAL(second_precision=5), "INTERVAL DAY TO SECOND(5)"), ( oracle.INTERVAL(day_precision=2, second_precision=5), "INTERVAL DAY(2) TO SECOND(5)", ), ) def test_interval(self, type_, expected): self.assert_compile(type_, expected)
def MediumText(): return Text().with_variant(MEDIUMTEXT(), 'mysql')
class States(Base): # type: ignore """State change history.""" __table_args__ = ( # Used for fetching the state of entities at a specific time # (get_states in history.py) Index("ix_states_entity_id_last_updated", "entity_id", "last_updated"), { "mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci" }, ) __tablename__ = TABLE_STATES state_id = Column(Integer, Identity(), primary_key=True) domain = Column(String(MAX_LENGTH_STATE_DOMAIN)) entity_id = Column(String(MAX_LENGTH_STATE_ENTITY_ID)) state = Column(String(MAX_LENGTH_STATE_STATE)) attributes = Column(Text().with_variant(mysql.LONGTEXT, "mysql")) event_id = Column(Integer, ForeignKey("events.event_id", ondelete="CASCADE"), index=True) last_changed = Column(DATETIME_TYPE, default=dt_util.utcnow) last_updated = Column(DATETIME_TYPE, default=dt_util.utcnow, index=True) created = Column(DATETIME_TYPE, default=dt_util.utcnow) old_state_id = Column(Integer, ForeignKey("states.state_id"), index=True) event = relationship("Events", uselist=False) old_state = relationship("States", remote_side=[state_id]) def __repr__(self) -> str: """Return string representation of instance for debugging.""" return ( f"<recorder.States(" f"id={self.state_id}, domain='{self.domain}', entity_id='{self.entity_id}', " f"state='{self.state}', event_id='{self.event_id}', " f"last_updated='{self.last_updated.isoformat(sep=' ', timespec='seconds')}', " f"old_state_id={self.old_state_id}" f")>") @staticmethod def from_event(event): """Create object from a state_changed event.""" entity_id = event.data["entity_id"] state = event.data.get("new_state") dbstate = States(entity_id=entity_id) # State got deleted if state is None: dbstate.state = "" dbstate.domain = split_entity_id(entity_id)[0] dbstate.attributes = "{}" dbstate.last_changed = event.time_fired dbstate.last_updated = event.time_fired else: dbstate.domain = state.domain dbstate.state = state.state dbstate.attributes = json.dumps(dict(state.attributes), cls=JSONEncoder, separators=(",", ":")) dbstate.last_changed = state.last_changed dbstate.last_updated = state.last_updated return dbstate def to_native(self, validate_entity_id=True): """Convert to an HA state object.""" try: return State( self.entity_id, self.state, json.loads(self.attributes), process_timestamp(self.last_changed), process_timestamp(self.last_updated), # Join the events table on event_id to get the context instead # as it will always be there for state_changed events context=Context(id=None), validate_entity_id=validate_entity_id, ) except ValueError: # When json.loads fails _LOGGER.exception("Error converting row to state: %s", self) return None
class Connection(Base, LoggingMixin): # pylint: disable=too-many-instance-attributes """ Placeholder to store information about different database instances connection information. The idea here is that scripts use references to database instances (conn_id) instead of hard coding hostname, logins and passwords when using operators or hooks. .. seealso:: For more information on how to use this class, see: :doc:`/howto/connection` :param conn_id: The connection ID. :type conn_id: str :param conn_type: The connection type. :type conn_type: str :param description: The connection description. :type description: str :param host: The host. :type host: str :param login: The login. :type login: str :param password: The password. :type password: str :param schema: The schema. :type schema: str :param port: The port number. :type port: int :param extra: Extra metadata. Non-standard data such as private/SSH keys can be saved here. JSON encoded object. :type extra: str :param uri: URI address describing connection parameters. :type uri: str """ __tablename__ = "connection" id = Column(Integer(), primary_key=True) conn_id = Column(String(ID_LEN), unique=True, nullable=False) conn_type = Column(String(500), nullable=False) description = Column(Text(5000)) host = Column(String(500)) schema = Column(String(500)) login = Column(String(500)) _password = Column('password', String(5000)) port = Column(Integer()) is_encrypted = Column(Boolean, unique=False, default=False) is_extra_encrypted = Column(Boolean, unique=False, default=False) _extra = Column('extra', String(5000)) def __init__( # pylint: disable=too-many-arguments self, conn_id: Optional[str] = None, conn_type: Optional[str] = None, description: Optional[str] = None, host: Optional[str] = None, login: Optional[str] = None, password: Optional[str] = None, schema: Optional[str] = None, port: Optional[int] = None, extra: Optional[str] = None, uri: Optional[str] = None, ): super().__init__() self.conn_id = conn_id self.description = description if uri and ( # pylint: disable=too-many-boolean-expressions conn_type or host or login or password or schema or port or extra): raise AirflowException( "You must create an object using the URI or individual values " "(conn_type, host, login, password, schema, port or extra)." "You can't mix these two ways to create this object.") if uri: self._parse_from_uri(uri) else: self.conn_type = conn_type self.host = host self.login = login self.password = password self.schema = schema self.port = port self.extra = extra def parse_from_uri(self, **uri): """This method is deprecated. Please use uri parameter in constructor.""" warnings.warn( "This method is deprecated. Please use uri parameter in constructor.", DeprecationWarning) self._parse_from_uri(**uri) def _parse_from_uri(self, uri: str): uri_parts = urlparse(uri) conn_type = uri_parts.scheme if conn_type == 'postgresql': conn_type = 'postgres' elif '-' in conn_type: conn_type = conn_type.replace('-', '_') self.conn_type = conn_type self.host = _parse_netloc_to_hostname(uri_parts) quoted_schema = uri_parts.path[1:] self.schema = unquote( quoted_schema) if quoted_schema else quoted_schema self.login = unquote( uri_parts.username) if uri_parts.username else uri_parts.username self.password = unquote( uri_parts.password) if uri_parts.password else uri_parts.password self.port = uri_parts.port if uri_parts.query: self.extra = json.dumps( dict(parse_qsl(uri_parts.query, keep_blank_values=True))) def get_uri(self) -> str: """Return connection in URI format""" uri = '{}://'.format(str(self.conn_type).lower().replace('_', '-')) authority_block = '' if self.login is not None: authority_block += quote(self.login, safe='') if self.password is not None: authority_block += ':' + quote(self.password, safe='') if authority_block > '': authority_block += '@' uri += authority_block host_block = '' if self.host: host_block += quote(self.host, safe='') if self.port: if host_block > '': host_block += f':{self.port}' else: host_block += f'@:{self.port}' if self.schema: host_block += '/{}'.format(quote(self.schema, safe='')) uri += host_block if self.extra_dejson: uri += '?{}'.format(urlencode(self.extra_dejson)) return uri def get_password(self) -> Optional[str]: """Return encrypted password.""" if self._password and self.is_encrypted: fernet = get_fernet() if not fernet.is_encrypted: raise AirflowException( "Can't decrypt encrypted password for login={}, \ FERNET_KEY configuration is missing".format(self.login)) return fernet.decrypt(bytes(self._password, 'utf-8')).decode() else: return self._password def set_password(self, value: Optional[str]): """Encrypt password and set in object attribute.""" if value: fernet = get_fernet() self._password = fernet.encrypt(bytes(value, 'utf-8')).decode() self.is_encrypted = fernet.is_encrypted @declared_attr def password(cls): # pylint: disable=no-self-argument """Password. The value is decrypted/encrypted when reading/setting the value.""" return synonym('_password', descriptor=property(cls.get_password, cls.set_password)) def get_extra(self) -> Dict: """Return encrypted extra-data.""" if self._extra and self.is_extra_encrypted: fernet = get_fernet() if not fernet.is_encrypted: raise AirflowException( "Can't decrypt `extra` params for login={},\ FERNET_KEY configuration is missing".format(self.login)) return fernet.decrypt(bytes(self._extra, 'utf-8')).decode() else: return self._extra def set_extra(self, value: str): """Encrypt extra-data and save in object attribute to object.""" if value: fernet = get_fernet() self._extra = fernet.encrypt(bytes(value, 'utf-8')).decode() self.is_extra_encrypted = fernet.is_encrypted else: self._extra = value self.is_extra_encrypted = False @declared_attr def extra(cls): # pylint: disable=no-self-argument """Extra data. The value is decrypted/encrypted when reading/setting the value.""" return synonym('_extra', descriptor=property(cls.get_extra, cls.set_extra)) def rotate_fernet_key(self): """Encrypts data with a new key. See: :ref:`security/fernet`""" fernet = get_fernet() if self._password and self.is_encrypted: self._password = fernet.rotate( self._password.encode('utf-8')).decode() if self._extra and self.is_extra_encrypted: self._extra = fernet.rotate(self._extra.encode('utf-8')).decode() def get_hook(self): """Return hook based on conn_type.""" hook_class_name, conn_id_param = ProvidersManager().hooks.get( self.conn_type, (None, None)) if not hook_class_name: raise AirflowException(f'Unknown hook type "{self.conn_type}"') hook_class = import_string(hook_class_name) return hook_class(**{conn_id_param: self.conn_id}) def __repr__(self): return self.conn_id def log_info(self): """ This method is deprecated. You can read each field individually or use the default representation (`__repr__`). """ warnings.warn( "This method is deprecated. You can read each field individually or " "use the default representation (__repr__).", DeprecationWarning, stacklevel=2, ) return "id: {}. Host: {}, Port: {}, Schema: {}, Login: {}, Password: {}, extra: {}".format( self.conn_id, self.host, self.port, self.schema, self.login, "XXXXXXXX" if self.password else None, "XXXXXXXX" if self.extra_dejson else None, ) def debug_info(self): """ This method is deprecated. You can read each field individually or use the default representation (`__repr__`). """ warnings.warn( "This method is deprecated. You can read each field individually or " "use the default representation (__repr__).", DeprecationWarning, stacklevel=2, ) return "id: {}. Host: {}, Port: {}, Schema: {}, Login: {}, Password: {}, extra: {}".format( self.conn_id, self.host, self.port, self.schema, self.login, "XXXXXXXX" if self.password else None, self.extra_dejson, ) @property def extra_dejson(self) -> Dict: """Returns the extra property by deserializing json.""" obj = {} if self.extra: try: obj = json.loads(self.extra) except JSONDecodeError as e: self.log.exception(e) self.log.error("Failed parsing the json for conn_id %s", self.conn_id) return obj @classmethod def get_connections_from_secrets(cls, conn_id: str) -> List['Connection']: """ Get all connections as an iterable. :param conn_id: connection id :return: array of connections """ for secrets_backend in ensure_secrets_loaded(): conn_list = secrets_backend.get_connections(conn_id=conn_id) if conn_list: return list(conn_list) raise AirflowNotFoundException( f"The conn_id `{conn_id}` isn't defined")
class Connection(Base, LoggingMixin): """ Placeholder to store information about different database instances connection information. The idea here is that scripts use references to database instances (conn_id) instead of hard coding hostname, logins and passwords when using operators or hooks. .. seealso:: For more information on how to use this class, see: :doc:`/howto/connection` :param conn_id: The connection ID. :type conn_id: str :param conn_type: The connection type. :type conn_type: str :param description: The connection description. :type description: str :param host: The host. :type host: str :param login: The login. :type login: str :param password: The password. :type password: str :param schema: The schema. :type schema: str :param port: The port number. :type port: int :param extra: Extra metadata. Non-standard data such as private/SSH keys can be saved here. JSON encoded object. :type extra: str :param uri: URI address describing connection parameters. :type uri: str """ EXTRA_KEY = '__extra__' __tablename__ = "connection" id = Column(Integer(), primary_key=True) conn_id = Column(String(ID_LEN), unique=True, nullable=False) conn_type = Column(String(500), nullable=False) description = Column(Text(5000)) host = Column(String(500)) schema = Column(String(500)) login = Column(String(500)) _password = Column('password', String(5000)) port = Column(Integer()) is_encrypted = Column(Boolean, unique=False, default=False) is_extra_encrypted = Column(Boolean, unique=False, default=False) _extra = Column('extra', Text()) def __init__( self, conn_id: Optional[str] = None, conn_type: Optional[str] = None, description: Optional[str] = None, host: Optional[str] = None, login: Optional[str] = None, password: Optional[str] = None, schema: Optional[str] = None, port: Optional[int] = None, extra: Optional[Union[str, dict]] = None, uri: Optional[str] = None, ): super().__init__() self.conn_id = conn_id self.description = description if extra and not isinstance(extra, str): extra = json.dumps(extra) if uri and (conn_type or host or login or password or schema or port or extra): raise AirflowException( "You must create an object using the URI or individual values " "(conn_type, host, login, password, schema, port or extra)." "You can't mix these two ways to create this object.") if uri: self._parse_from_uri(uri) else: self.conn_type = conn_type self.host = host self.login = login self.password = password self.schema = schema self.port = port self.extra = extra if self.password: mask_secret(self.password) @reconstructor def on_db_load(self): if self.password: mask_secret(self.password) def parse_from_uri(self, **uri): """This method is deprecated. Please use uri parameter in constructor.""" warnings.warn( "This method is deprecated. Please use uri parameter in constructor.", DeprecationWarning) self._parse_from_uri(**uri) def _parse_from_uri(self, uri: str): uri_parts = urlparse(uri) conn_type = uri_parts.scheme if conn_type == 'postgresql': conn_type = 'postgres' elif '-' in conn_type: conn_type = conn_type.replace('-', '_') self.conn_type = conn_type self.host = _parse_netloc_to_hostname(uri_parts) quoted_schema = uri_parts.path[1:] self.schema = unquote( quoted_schema) if quoted_schema else quoted_schema self.login = unquote( uri_parts.username) if uri_parts.username else uri_parts.username self.password = unquote( uri_parts.password) if uri_parts.password else uri_parts.password self.port = uri_parts.port if uri_parts.query: query = dict(parse_qsl(uri_parts.query, keep_blank_values=True)) if self.EXTRA_KEY in query: self.extra = query[self.EXTRA_KEY] else: self.extra = json.dumps(query) def get_uri(self) -> str: """Return connection in URI format""" uri = f"{str(self.conn_type).lower().replace('_', '-')}://" authority_block = '' if self.login is not None: authority_block += quote(self.login, safe='') if self.password is not None: authority_block += ':' + quote(self.password, safe='') if authority_block > '': authority_block += '@' uri += authority_block host_block = '' if self.host: host_block += quote(self.host, safe='') if self.port: if host_block > '': host_block += f':{self.port}' else: host_block += f'@:{self.port}' if self.schema: host_block += f"/{quote(self.schema, safe='')}" uri += host_block if self.extra: try: query: Optional[str] = urlencode(self.extra_dejson) except TypeError: query = None if query and self.extra_dejson == dict( parse_qsl(query, keep_blank_values=True)): uri += '?' + query else: uri += '?' + urlencode({self.EXTRA_KEY: self.extra}) return uri def get_password(self) -> Optional[str]: """Return encrypted password.""" if self._password and self.is_encrypted: fernet = get_fernet() if not fernet.is_encrypted: raise AirflowException( f"Can't decrypt encrypted password for login={self.login} " f"FERNET_KEY configuration is missing") return fernet.decrypt(bytes(self._password, 'utf-8')).decode() else: return self._password def set_password(self, value: Optional[str]): """Encrypt password and set in object attribute.""" if value: fernet = get_fernet() self._password = fernet.encrypt(bytes(value, 'utf-8')).decode() self.is_encrypted = fernet.is_encrypted @declared_attr def password(cls): """Password. The value is decrypted/encrypted when reading/setting the value.""" return synonym('_password', descriptor=property(cls.get_password, cls.set_password)) def get_extra(self) -> Dict: """Return encrypted extra-data.""" if self._extra and self.is_extra_encrypted: fernet = get_fernet() if not fernet.is_encrypted: raise AirflowException( f"Can't decrypt `extra` params for login={self.login}, " f"FERNET_KEY configuration is missing") return fernet.decrypt(bytes(self._extra, 'utf-8')).decode() else: return self._extra def set_extra(self, value: str): """Encrypt extra-data and save in object attribute to object.""" if value: fernet = get_fernet() self._extra = fernet.encrypt(bytes(value, 'utf-8')).decode() self.is_extra_encrypted = fernet.is_encrypted else: self._extra = value self.is_extra_encrypted = False @declared_attr def extra(cls): """Extra data. The value is decrypted/encrypted when reading/setting the value.""" return synonym('_extra', descriptor=property(cls.get_extra, cls.set_extra)) def rotate_fernet_key(self): """Encrypts data with a new key. See: :ref:`security/fernet`""" fernet = get_fernet() if self._password and self.is_encrypted: self._password = fernet.rotate( self._password.encode('utf-8')).decode() if self._extra and self.is_extra_encrypted: self._extra = fernet.rotate(self._extra.encode('utf-8')).decode() def get_hook(self, *, hook_params=None): """Return hook based on conn_type""" hook = ProvidersManager().hooks.get(self.conn_type, None) if hook is None: raise AirflowException(f'Unknown hook type "{self.conn_type}"') try: hook_class = import_string(hook.hook_class_name) except ImportError: warnings.warn( "Could not import %s when discovering %s %s", hook.hook_class_name, hook.hook_name, hook.package_name, ) raise if hook_params is None: hook_params = {} return hook_class(**{hook.connection_id_attribute_name: self.conn_id}, **hook_params) def __repr__(self): return self.conn_id def log_info(self): """ This method is deprecated. You can read each field individually or use the default representation (`__repr__`). """ warnings.warn( "This method is deprecated. You can read each field individually or " "use the default representation (__repr__).", DeprecationWarning, stacklevel=2, ) return ( f"id: {self.conn_id}. Host: {self.host}, Port: {self.port}, Schema: {self.schema}, " f"Login: {self.login}, Password: {'XXXXXXXX' if self.password else None}, " f"extra: {'XXXXXXXX' if self.extra_dejson else None}") def debug_info(self): """ This method is deprecated. You can read each field individually or use the default representation (`__repr__`). """ warnings.warn( "This method is deprecated. You can read each field individually or " "use the default representation (__repr__).", DeprecationWarning, stacklevel=2, ) return ( f"id: {self.conn_id}. Host: {self.host}, Port: {self.port}, Schema: {self.schema}, " f"Login: {self.login}, Password: {'XXXXXXXX' if self.password else None}, " f"extra: {self.extra_dejson}") def test_connection(self): """Calls out get_hook method and executes test_connection method on that.""" status, message = False, '' try: hook = self.get_hook() if getattr(hook, 'test_connection', False): status, message = hook.test_connection() else: message = ( f"Hook {hook.__class__.__name__} doesn't implement or inherit test_connection method" ) except Exception as e: message = str(e) return status, message @property def extra_dejson(self) -> Dict: """Returns the extra property by deserializing json.""" obj = {} if self.extra: try: obj = json.loads(self.extra) except JSONDecodeError: self.log.exception("Failed parsing the json for conn_id %s", self.conn_id) # Mask sensitive keys from this list mask_secret(obj) return obj @classmethod def get_connection_from_secrets(cls, conn_id: str) -> 'Connection': """ Get connection by conn_id. :param conn_id: connection id :return: connection """ for secrets_backend in ensure_secrets_loaded(): try: conn = secrets_backend.get_connection(conn_id=conn_id) if conn: return conn except Exception: log.exception( 'Unable to retrieve connection from secrets backend (%s). ' 'Checking subsequent secrets backend.', type(secrets_backend).__name__, ) raise AirflowNotFoundException( f"The conn_id `{conn_id}` isn't defined")
class PendingCertificate(db.Model): __tablename__ = 'pending_certs' id = Column(Integer, primary_key=True) external_id = Column(String(128)) owner = Column(String(128), nullable=False) name = Column(String(256), unique=True) description = Column(String(1024)) notify = Column(Boolean, default=True) number_attempts = Column(Integer) rename = Column(Boolean, default=True) cn = Column(String(128)) csr = Column(Text(), nullable=False) chain = Column(Text()) private_key = Column(Vault, nullable=True) date_created = Column(ArrowType, PassiveDefault(func.now()), nullable=False) dns_provider_id = Column( Integer, ForeignKey('dns_providers.id', ondelete="CASCADE")) status = Column(Text(), nullable=True) last_updated = Column(ArrowType, PassiveDefault(func.now()), onupdate=func.now(), nullable=False) rotation = Column(Boolean, default=False) user_id = Column(Integer, ForeignKey('users.id')) authority_id = Column(Integer, ForeignKey('authorities.id', ondelete="CASCADE")) root_authority_id = Column( Integer, ForeignKey('authorities.id', ondelete="CASCADE")) rotation_policy_id = Column(Integer, ForeignKey('rotation_policies.id')) notifications = relationship( 'Notification', secondary=pending_cert_notification_associations, backref='pending_cert', passive_deletes=True) destinations = relationship( 'Destination', secondary=pending_cert_destination_associations, backref='pending_cert', passive_deletes=True) sources = relationship('Source', secondary=pending_cert_source_associations, backref='pending_cert', passive_deletes=True) roles = relationship('Role', secondary=pending_cert_role_associations, backref='pending_cert', passive_deletes=True) replaces = relationship('Certificate', secondary=pending_cert_replacement_associations, backref='pending_cert', passive_deletes=True) options = Column(JSONType) rotation_policy = relationship("RotationPolicy") sensitive_fields = ('private_key', ) def __init__(self, **kwargs): self.csr = kwargs.get('csr') self.private_key = kwargs.get('private_key', "") if self.private_key: # If the request does not send private key, the key exists but the value is None self.private_key = self.private_key.strip() self.external_id = kwargs.get('external_id') # when destinations are appended they require a valid name. if kwargs.get('name'): self.name = get_or_increase_name( defaults.text_to_slug(kwargs['name']), 0) self.rename = False else: # TODO: Fix auto-generated name, it should be renamed on creation self.name = get_or_increase_name( defaults.certificate_name(kwargs['common_name'], kwargs['authority'].name, dt.now(), dt.now(), False), self.external_id) self.rename = True self.cn = defaults.common_name(utils.parse_csr(self.csr)) self.owner = kwargs['owner'] self.number_attempts = 0 if kwargs.get('chain'): self.chain = kwargs['chain'].strip() self.notify = kwargs.get('notify', True) self.destinations = kwargs.get('destinations', []) self.notifications = kwargs.get('notifications', []) self.description = kwargs.get('description') self.roles = list(set(kwargs.get('roles', []))) self.replaces = kwargs.get('replaces', []) self.rotation = kwargs.get('rotation') self.rotation_policy = kwargs.get('rotation_policy') try: self.dns_provider_id = kwargs.get('dns_provider').id except (AttributeError, KeyError, TypeError, Exception): pass
class BotOption(Base): __tablename__ = "bot_options" option = Column(String(64), nullable=False, primary_key=True) value = Column(Text(), nullable=True)
Column("relative", Integer(), nullable=False), ) cheesecake_subindices = Table( "cheesecake_subindices", db.metadata, Column( "main_index_id", Integer(), ForeignKey("cheesecake_main_indices.id"), primary_key=True, nullable=False, ), Column("name", Text(), primary_key=True, nullable=False), Column("value", Integer(), nullable=False), Column("details", Text(), nullable=False), ) comments = Table( "comments", db.metadata, Column("id", Integer(), primary_key=True, nullable=False), Column( "rating", Integer(), ForeignKey("ratings.id", ondelete="CASCADE"), ),
class ReportSchedule(Model, AuditMixinNullable): """ Report Schedules, supports alerts and reports """ __tablename__ = "report_schedule" __table_args__ = (UniqueConstraint("name", "type"),) id = Column(Integer, primary_key=True) type = Column(String(50), nullable=False) name = Column(String(150), nullable=False) description = Column(Text) context_markdown = Column(Text) active = Column(Boolean, default=True, index=True) crontab = Column(String(1000), nullable=False) creation_method = Column( String(255), server_default=ReportCreationMethod.ALERTS_REPORTS ) timezone = Column(String(100), default="UTC", nullable=False) report_format = Column(String(50), default=ReportDataFormat.VISUALIZATION) sql = Column(Text()) # (Alerts/Reports) M-O to chart chart_id = Column(Integer, ForeignKey("slices.id"), nullable=True) chart = relationship(Slice, backref="report_schedules", foreign_keys=[chart_id]) # (Alerts/Reports) M-O to dashboard dashboard_id = Column(Integer, ForeignKey("dashboards.id"), nullable=True) dashboard = relationship( Dashboard, backref="report_schedules", foreign_keys=[dashboard_id] ) # (Alerts) M-O to database database_id = Column(Integer, ForeignKey("dbs.id"), nullable=True) database = relationship(Database, foreign_keys=[database_id]) owners = relationship(security_manager.user_model, secondary=report_schedule_user) # (Alerts) Stamped last observations last_eval_dttm = Column(DateTime) last_state = Column(String(50), default=ReportState.NOOP) last_value = Column(Float) last_value_row_json = Column(Text) # (Alerts) Observed value validation related columns validator_type = Column(String(100)) validator_config_json = Column(Text, default="{}") # Log retention log_retention = Column(Integer, default=90) # (Alerts) After a success how long to wait for a new trigger (seconds) grace_period = Column(Integer, default=60 * 60 * 4) # (Alerts/Reports) Unlock a possible stalled working state working_timeout = Column(Integer, default=60 * 60 * 1) # Store the selected dashboard tabs etc. extra = Column(Text, default="{}") # (Reports) When generating a screenshot, bypass the cache? force_screenshot = Column(Boolean, default=False) def __repr__(self) -> str: return str(self.name) @renders("crontab") def crontab_humanized(self) -> str: return get_description(self.crontab) @validates("extra") # pylint: disable=unused-argument,no-self-use def validate_extra(self, key: str, value: Dict[Any, Any]) -> Optional[str]: if value is not None: return json.dumps(value) return None
Column('key', Integer, primary_key=True), Column('message_key', Integer, ForeignKey("message.key"), index=True), Column('file_key', Integer, ForeignKey("file.key"), index=True), Column('account_key', Integer, ForeignKey("account.key"), index=True), Column('id', Integer, index=True), #, unique=True, nullable=False), Column('in_reply_to', Integer, index=True), Column('created', DateTime, index=True, nullable=False), Column('updated', DateTime, nullable=False), Column('parent', Boolean, nullable=False), Column('commit_id', String(64), nullable=False), Column('original_commit_id', String(64), nullable=False), Column('line', Integer, index=True), Column('original_line', Integer), Column('message', Text, nullable=False), Column('draft', Boolean, index=True, nullable=False), Column('url', Text()), ) approval_table = Table( 'approval', metadata, Column('key', Integer, primary_key=True), Column('pr_key', Integer, ForeignKey("pull_request.key"), index=True), Column('account_key', Integer, ForeignKey("account.key"), index=True), Column('state', String(32), index=True, nullable=False), Column('sha', String(64), nullable=False), Column('draft', Boolean, index=True, nullable=False), UniqueConstraint('pr_key', 'account_key', 'sha', name='approval_pr_key_account_key_sha_const'), )
logger = logging.getLogger(__name__) def handle_potential_registration(model: dict): if "hid" not in model and "uid" in model: model["hid"] = generate_feeder_hid(model["uid"]) if "discoveredAt" not in model: model["discoveredAt"] = get_current_timestamp() return model gateways = Table( "kronos_gateway", metadata, Column("hid", Text(), primary_key=True, index=True), Column("name", Text(), nullable=True), Column("uid", Text(), nullable=True), Column("osName", Text(), nullable=True), Column("type", Text(), nullable=True), Column("softwareName", Text(), nullable=True), Column("softwareVersion", Text(), nullable=True), Column("sdkVersion", Text(), nullable=True), Column("discoveredAt", Integer(), nullable=False), Column("apiKey", Text(), nullable=False), ) class KronosGateways: @classmethod async def get(cls, gateway_hid=""):
class MediaItem(Base, TablePrefix, SurrogateKeyId): hash = Column(Binary(32), unique=True, nullable=False) path = Column(Text(), nullable=False)
class StateMixin(Mixin): #: the unique name of the service, e.g. recorder,factor,tag state_name = Column(String(length=128)) #: json string state = Column(Text())
class ResultTest(Base): __tablename__ = "result_test" id = Column(String(50), primary_key=True) to_number = Column(String(20)) label = Column(String(50)) success = Column(BOOLEAN) call_status = Column(String(30)) transcription = Column(String(30)) transcripted_text = Column(Text()) transcripted_quality = Column(Float()) created_at = Column(DateTime()) updated_at = Column(DateTime()) start_at = Column(DateTime()) end_at = Column(DateTime()) call_duration = Column(Integer()) recording_duration = Column(Integer()) recording_sid = Column(Text()) recording_url = Column(Text()) error_code = Column(Integer()) alarmed_at = Column(DateTime()) def json(self): return { "id": self.id, "to-number": self.to_number, "label": self.label, "success": self.success, "call-status": self.call_status, "transcription": self.transcription, "transcripted-text": self.transcripted_text, "transcripted-quality": self.transcripted_quality, "created-at": str(self.created_at), "updated-at": str(self.updated_at), "start-at": str(self.start_at), "end-at": str(self.end_at), "call-duration": self.call_duration, "recording-duration": self.recording_duration, "recording-sid": self.recording_sid, "recording-url": self.recording_url, "error-code": self.error_code, "alarmed_at": str(self.alarmed_at), } def minimal_json(self): return { "id": self.id, "to-number": self.to_number, "label": self.label, "success": self.success, "call-status": self.call_status, "transcription": self.transcription, "start-at": str(self.start_at), "recording-duration": self.recording_duration, "alarmed_at": str(self.alarmed_at), } @classmethod def find_by_id(cls, id): return session.query(cls).filter_by(id=id).one_or_none() @classmethod def find_by_to_number(cls, number, limit=3): return (session.query(cls).filter_by(to_number=number).order_by( ResultTest.start_at.desc()).limit(limit).all()) @classmethod def find_all(cls): return session.query(cls).all() @classmethod def get_last_unlarmed_faileds(cls): unlarmeds = {} for ura in Ura.get_uras_numbers(): results_obj = (session.query(cls).filter_by( to_number=ura, success=False, alarmed_at=None).order_by(ResultTest.start_at.desc()).all()) unlarmeds.update({ura: results_obj}) return unlarmeds @classmethod def get_failures_per_day(cls, from_date, to_date): """ SELECT count(success), date(start_at) from result_test where success=0 group by date(start_at) ORDER BY (start_at) DESC LIMIT 1 """ res = [] with engine.connect() as con: failures = con.execute( f"SELECT COUNT(success), DATE(start_at) FROM result_test WHERE success=FALSE AND DATE(start_at) >= DATE('{from_date}') AND DATE(start_at) <= DATE('{to_date}') GROUP BY success, DATE(start_at) ORDER BY DATE(start_at) DESC" ) for row in failures: res.append(list(row)) return res @classmethod def get_successes_per_day(cls, from_date, to_date): """ SELECT COUNT(success), DATE(start_at) FROM result_test WHERE success=true AND DATE(start_at) >= DATE('2020-03-18') AND DATE(start_at) <= DATE('2020-03-20') GROUP BY DATE(start_at) ORDER BY (start_at) DESC """ res = [] with engine.connect() as con: successes = con.execute( f"SELECT COUNT(success), DATE(start_at) FROM result_test WHERE success=TRUE AND DATE(start_at) >= DATE('{from_date}') AND DATE(start_at) <= DATE('{to_date}') GROUP BY success, DATE(start_at) ORDER BY DATE(start_at) DESC" ) for row in successes: res.append(list(row)) return res @classmethod def find_results(cls, uras=list(), n_last_results=3): results = {} uras = uras if uras else Ura.get_uras_numbers() for ura in uras: results_obj = ResultTest.find_by_to_number(ura, limit=n_last_results) test_results = [r.minimal_json() for r in results_obj] results.update({ura: test_results}) return results def save_to_db(self): session.add(self) session.commit() session.close() def delete_from_db(self): session.delete(self) session.commit() session.close()
def validates_requires_python(self, *args, **kwargs): raise RuntimeError("Cannot set File.requires_python") class Filename(db.ModelBase): __tablename__ = "file_registry" id = Column(Integer, primary_key=True, nullable=False) filename = Column(Text, unique=True, nullable=False) release_classifiers = Table( "release_classifiers", db.metadata, Column("name", Text()), Column("version", Text()), Column("trove_id", Integer(), ForeignKey("trove_classifiers.id")), ForeignKeyConstraint( ["name", "version"], ["releases.name", "releases.version"], onupdate="CASCADE", ), Index("rel_class_name_idx", "name"), Index("rel_class_name_version_idx", "name", "version"), Index("rel_class_trove_id_idx", "trove_id"), Index("rel_class_version_id_idx", "version"), ) class JournalEntry(db.ModelBase):
class Certificate(db.Model): __tablename__ = 'certificates' id = Column(Integer, primary_key=True) owner = Column(String(128), nullable=False) name = Column(String(128), unique=True) description = Column(String(1024)) notify = Column(Boolean, default=True) body = Column(Text(), nullable=False) chain = Column(Text()) private_key = Column(Vault) issuer = Column(String(128)) serial = Column(String(128)) cn = Column(String(128)) deleted = Column(Boolean, index=True) not_before = Column(ArrowType) not_after = Column(ArrowType) date_created = Column(ArrowType, PassiveDefault(func.now()), nullable=False) signing_algorithm = Column(String(128)) status = Column(String(128)) bits = Column(Integer()) san = Column(String(1024)) # TODO this should be migrated to boolean rotation = Column(Boolean, default=False) user_id = Column(Integer, ForeignKey('users.id')) authority_id = Column(Integer, ForeignKey('authorities.id', ondelete="CASCADE")) root_authority_id = Column(Integer, ForeignKey('authorities.id', ondelete="CASCADE")) notifications = relationship('Notification', secondary=certificate_notification_associations, backref='certificate') destinations = relationship('Destination', secondary=certificate_destination_associations, backref='certificate') sources = relationship('Source', secondary=certificate_source_associations, backref='certificate') domains = relationship('Domain', secondary=certificate_associations, backref='certificate') roles = relationship('Role', secondary=roles_certificates, backref='certificate') replaces = relationship('Certificate', secondary=certificate_replacement_associations, primaryjoin=id == certificate_replacement_associations.c.certificate_id, # noqa secondaryjoin=id == certificate_replacement_associations.c.replaced_certificate_id, # noqa backref='replaced') logs = relationship('Log', backref='certificate') endpoints = relationship('Endpoint', backref='certificate') def __init__(self, **kwargs): cert = lemur.common.utils.parse_certificate(kwargs['body']) self.issuer = defaults.issuer(cert) self.cn = defaults.common_name(cert) self.san = defaults.san(cert) self.not_before = defaults.not_before(cert) self.not_after = defaults.not_after(cert) # when destinations are appended they require a valid name. if kwargs.get('name'): self.name = get_or_increase_name(kwargs['name']) else: self.name = get_or_increase_name(defaults.certificate_name(self.cn, self.issuer, self.not_before, self.not_after, self.san)) self.owner = kwargs['owner'] self.body = kwargs['body'].strip() if kwargs.get('private_key'): self.private_key = kwargs['private_key'].strip() if kwargs.get('chain'): self.chain = kwargs['chain'].strip() self.notify = kwargs.get('notify', True) self.destinations = kwargs.get('destinations', []) self.notifications = kwargs.get('notifications', []) self.description = kwargs.get('description') self.roles = list(set(kwargs.get('roles', []))) self.replaces = kwargs.get('replaces', []) self.rotation = kwargs.get('rotation') self.signing_algorithm = defaults.signing_algorithm(cert) self.bits = defaults.bitstrength(cert) self.serial = defaults.serial(cert) for domain in defaults.domains(cert): self.domains.append(Domain(name=domain)) @property def active(self): return self.notify @property def organization(self): cert = lemur.common.utils.parse_certificate(self.body) return defaults.organization(cert) @property def organizational_unit(self): cert = lemur.common.utils.parse_certificate(self.body) return defaults.organizational_unit(cert) @property def country(self): cert = lemur.common.utils.parse_certificate(self.body) return defaults.country(cert) @property def state(self): cert = lemur.common.utils.parse_certificate(self.body) return defaults.state(cert) @property def location(self): cert = lemur.common.utils.parse_certificate(self.body) return defaults.location(cert) @property def key_type(self): cert = lemur.common.utils.parse_certificate(self.body) if isinstance(cert.public_key(), rsa.RSAPublicKey): return 'RSA{key_size}'.format(key_size=cert.public_key().key_size) @property def validity_remaining(self): return abs(self.not_after - arrow.utcnow()) @property def validity_range(self): return self.not_after - self.not_before @property def subject(self): cert = lemur.common.utils.parse_certificate(self.body) return cert.subject @property def public_key(self): cert = lemur.common.utils.parse_certificate(self.body) return cert.public_key() @hybrid_property def expired(self): if self.not_after <= arrow.utcnow(): return True @expired.expression def expired(cls): return case( [ (cls.not_after <= arrow.utcnow(), True) ], else_=False ) @hybrid_property def revoked(self): if 'revoked' == self.status: return True @revoked.expression def revoked(cls): return case( [ (cls.status == 'revoked', True) ], else_=False ) @property def extensions(self): # setup default values return_extensions = { 'sub_alt_names': {'names': []} } try: cert = lemur.common.utils.parse_certificate(self.body) for extension in cert.extensions: value = extension.value if isinstance(value, x509.BasicConstraints): return_extensions['basic_constraints'] = value elif isinstance(value, x509.SubjectAlternativeName): return_extensions['sub_alt_names']['names'] = value elif isinstance(value, x509.ExtendedKeyUsage): return_extensions['extended_key_usage'] = value elif isinstance(value, x509.KeyUsage): return_extensions['key_usage'] = value elif isinstance(value, x509.SubjectKeyIdentifier): return_extensions['subject_key_identifier'] = {'include_ski': True} elif isinstance(value, x509.AuthorityInformationAccess): return_extensions['certificate_info_access'] = {'include_aia': True} elif isinstance(value, x509.AuthorityKeyIdentifier): aki = { 'use_key_identifier': False, 'use_authority_cert': False } if value.key_identifier: aki['use_key_identifier'] = True if value.authority_cert_issuer: aki['use_authority_cert'] = True return_extensions['authority_key_identifier'] = aki # TODO: Don't support CRLDistributionPoints yet https://github.com/Netflix/lemur/issues/662 elif isinstance(value, x509.CRLDistributionPoints): current_app.logger.warning('CRLDistributionPoints not yet supported for clone operation.') # TODO: Not supporting custom OIDs yet. https://github.com/Netflix/lemur/issues/665 else: current_app.logger.warning('Custom OIDs not yet supported for clone operation.') except InvalidCodepoint as e: current_app.logger.warning('Unable to parse extensions due to underscore in dns name') except ValueError as e: current_app.logger.warning('Unable to parse') current_app.logger.exception(e) return return_extensions def get_arn(self, account_number): """ Generate a valid AWS IAM arn :rtype : str :param account_number: :return: """ return "arn:aws:iam::{}:server-certificate/{}".format(account_number, self.name) def __repr__(self): return "Certificate(name={name})".format(name=self.name)
class MyStory(Base): __tablename__ = "my_stories" text = Column(Text()) story_id = Column(Integer, ForeignKey("stories.id"))
class dialog(Base): __tablename__ = 'dialog_log' user_id = Column(Text(), primary_key=True) log = Column(LONGTEXT())
class Vulnerability(FaradayEntity, Base): DOC_TYPE = ['Vulnerability', 'VulnerabilityWeb'] # Table schema __tablename__ = 'vulnerability' id = Column(Integer, primary_key=True) name = Column(String(250), nullable=False) description = Column(Text(), nullable=False) confirmed = Column(Boolean) vuln_type = Column(String(250)) data = Column(Text()) easeofresolution = Column(String(50)) refs = Column(Text()) resolution = Column(Text()) severity = Column(String(50)) owned = Column(Boolean) attachments = Column(Text(), nullable=True) policyviolations = Column(Text()) impact_accountability = Column(Boolean) impact_availability = Column(Boolean) impact_confidentiality = Column(Boolean) impact_integrity = Column(Boolean) method = Column(String(50)) params = Column(String(500)) path = Column(String(500)) pname = Column(String(250)) query = Column(Text()) request = Column(Text()) response = Column(Text()) website = Column(String(250)) status = Column(String(250)) entity_metadata = relationship(EntityMetadata, uselist=False, cascade="all, delete-orphan", single_parent=True) entity_metadata_id = Column(Integer, ForeignKey(EntityMetadata.id), index=True) host_id = Column(Integer, ForeignKey(Host.id), index=True) host = relationship('Host', back_populates='vulnerabilities') service_id = Column(Integer, ForeignKey(Service.id), index=True) service = relationship('Service', back_populates='vulnerabilities') def update_from_document(self, document): self.name = document.get('name') self.description=document.get('desc') self.confirmed=document.get('confirmed') self.vuln_type=document.get('type') self.data=document.get('data') self.easeofresolution=document.get('easeofresolution') self.refs=json.dumps(document.get('refs', [])) self.resolution=document.get('resolution') self.severity=document.get('severity') self.owned=document.get('owned', False) self.attachments = json.dumps(document.get('_attachments', {})) self.policyviolations = json.dumps(document.get('policyviolations', [])) self.impact_accountability=document.get('impact', {}).get('accountability') self.impact_availability=document.get('impact', {}).get('availability') self.impact_confidentiality=document.get('impact', {}).get('confidentiality') self.impact_integrity=document.get('impact', {}).get('integrity') self.method=document.get('method') self.path=document.get('path') self.pname=document.get('pname') self.query=document.get('query') self.request=document.get('request') self.response=document.get('response') self.website=document.get('website') self.status=document.get('status', 'opened') params = document.get('params', u'') if isinstance(params, (list, tuple)): self.params = (u' '.join(params)).strip() else: self.params = params if params is not None else u'' def add_relationships_from_dict(self, entities): couchdb_id = self.entity_metadata.couchdb_id host_id = couchdb_id.split('.')[0] if host_id not in entities: raise EntityNotFound(host_id) self.host = entities[host_id] parent_id = '.'.join(couchdb_id.split('.')[:-1]) if parent_id != host_id: if parent_id not in entities: raise EntityNotFound(parent_id) self.service = entities[parent_id] def add_relationships_from_db(self, session): couchdb_id = self.entity_metadata.couchdb_id host_id = couchdb_id.split('.')[0] query = session.query(Host).join(EntityMetadata).filter(EntityMetadata.couchdb_id == host_id) self.host = query.one() parent_id = '.'.join(couchdb_id.split('.')[:-1]) if parent_id != host_id: query = session.query(Service).join(EntityMetadata).filter(EntityMetadata.couchdb_id == parent_id) self.service = query.one()
class Involved(Base): __tablename__ = "involved" id = Column(BigInteger(), primary_key=True) provider_and_id = Column(BigInteger()) provider_code = Column(Integer()) file_type_police = Column(Integer()) accident_id = Column(BigInteger()) involved_type = Column(Integer()) license_acquiring_date = Column(Integer()) age_group = Column(Integer()) sex = Column(Integer()) vehicle_type = Column(Integer()) safety_measures = Column(Integer()) involve_yishuv_symbol = Column(Integer()) involve_yishuv_name = Column(Text()) injury_severity = Column(Integer()) injured_type = Column(Integer()) injured_position = Column(Integer()) population_type = Column(Integer()) home_region = Column(Integer()) home_district = Column(Integer()) home_natural_area = Column(Integer()) home_municipal_status = Column(Integer()) home_yishuv_shape = Column(Integer()) hospital_time = Column(Integer()) medical_type = Column(Integer()) release_dest = Column(Integer()) safety_measures_use = Column(Integer()) late_deceased = Column(Integer()) car_id = Column(Integer()) involve_id = Column(Integer()) accident_year = Column(Integer()) accident_month = Column(Integer()) injury_severity_mais = Column(Integer()) __table_args__ = (ForeignKeyConstraint( [accident_id, provider_code, accident_year], [ AccidentMarker.id, AccidentMarker.provider_code, AccidentMarker.accident_year ], ondelete="CASCADE"), Index('accident_id_idx_involved', 'accident_id', unique=False), Index('provider_and_id_idx_involved', 'provider_and_id', unique=False), {}) def serialize(self): return { "id": self.id, "provider_code": self.provider_code, "accident_id": self.accident_id, "involved_type": self.involved_type, "license_acquiring_date": self.license_acquiring_date, "age_group": self.age_group, "sex": self.sex, "vehicle_type": self.vehicle_type, "safety_measures": self.safety_measures, "involve_yishuv_symbol": self.involve_yishuv_symbol, "injury_severity": self.injury_severity, "injured_type": self.injured_type, "injured_position": self.injured_position, "population_type": self.population_type, "home_region": self.home_region, "home_district": self.home_district, "home_natural_area": self.home_natural_area, "home_municipal_status": self.home_municipal_status, "home_yishuv_shape": self.home_yishuv_shape, "hospital_time": self.hospital_time, "medical_type": self.medical_type, "release_dest": self.release_dest, "safety_measures_use": self.safety_measures_use, "late_deceased": self.late_deceased } # Flask-Login integration def is_authenticated(self): return True def is_active(self): return True def is_anonymous(self): return False def get_id(self): return self.id
def MediumText() -> Variant: # pylint:disable=invalid-name return Text().with_variant(MEDIUMTEXT(), "mysql")
class AccidentMarker(MarkerMixin, Base): __tablename__ = "markers" __table_args__ = ( Index('acc_long_lat_idx', 'latitude', 'longitude'), Index('id_idx_markers', 'id', unique=False), Index('provider_and_id_idx_markers', 'provider_and_id', unique=False), Index('idx_markers_geom', 'geom', unique=False), ) __mapper_args__ = {'polymorphic_identity': CONST.MARKER_TYPE_ACCIDENT} id = Column(BigInteger(), primary_key=True) provider_and_id = Column(BigInteger()) provider_code = Column(Integer(), primary_key=True) file_type_police = Column(Integer()) description = Column(Text()) accident_type = Column(Integer()) accident_severity = Column(Integer()) address = Column(Text()) location_accuracy = Column(Integer()) road_type = Column(Integer()) road_shape = Column(Integer()) day_type = Column(Integer()) police_unit = Column(Integer()) mainStreet = Column(Text()) secondaryStreet = Column(Text()) junction = Column(Text()) one_lane = Column(Integer()) multi_lane = Column(Integer()) speed_limit = Column(Integer()) road_intactness = Column(Integer()) road_width = Column(Integer()) road_sign = Column(Integer()) road_light = Column(Integer()) road_control = Column(Integer()) weather = Column(Integer()) road_surface = Column(Integer()) road_object = Column(Integer()) object_distance = Column(Integer()) didnt_cross = Column(Integer()) cross_mode = Column(Integer()) cross_location = Column(Integer()) cross_direction = Column(Integer()) involved = relationship("Involved") vehicles = relationship("Vehicle") video_link = Column(Text()) road1 = Column(Integer()) road2 = Column(Integer()) km = Column(Float()) km_raw = Column(Text()) km_accurate = Column(Boolean()) yishuv_symbol = Column(Integer()) yishuv_name = Column(Text()) geo_area = Column(Integer()) day_night = Column(Integer()) day_in_week = Column(Integer()) traffic_light = Column(Integer()) region = Column(Integer()) district = Column(Integer()) natural_area = Column(Integer()) municipal_status = Column(Integer()) yishuv_shape = Column(Integer()) street1 = Column(Integer()) street1_hebrew = Column(Text()) street2 = Column(Integer()) street2_hebrew = Column(Text()) house_number = Column(Integer()) urban_intersection = Column(Integer()) non_urban_intersection = Column(Integer()) non_urban_intersection_hebrew = Column(Text()) accident_year = Column(Integer(), primary_key=True) accident_month = Column(Integer()) accident_day = Column(Integer()) accident_hour_raw = Column(Integer()) accident_hour = Column(Integer()) accident_minute = Column(Integer()) x = Column(Float()) y = Column(Float()) vehicle_type_rsa = Column(Text()) violation_type_rsa = Column(Text()) geom = Column(Geometry('POINT')) non_urban_intersection_by_junction_number = Column(Text()) @staticmethod def json_to_description(msg): description = json.loads(msg, encoding=db_encoding) return "\n".join([ AccidentMarker.format_description(field, value) for field, value in iteritems(description) ]) def serialize(self, is_thin=False): fields = { "id": str(self.id), "provider_code": self.provider_code, "accident_year": self.accident_year, "latitude": self.latitude, "longitude": self.longitude, "accident_severity": self.accident_severity, "location_accuracy": self.location_accuracy, "created": self.created.isoformat(), } if not is_thin: fields.update({ "title": self.title, "address": self.address, "type": self.type, "accident_type": self.accident_type, "road_type": self.road_type, "road_shape": self.road_shape, "day_type": self.day_type, "police_unit": self.police_unit, "mainStreet": self.mainStreet, "secondaryStreet": self.secondaryStreet, "junction": self.junction, }) # United Hatzala accidents description are not json: if self.provider_code == CONST.UNITED_HATZALA_CODE: fields.update({"description": self.description}) else: fields.update({ "description": AccidentMarker.json_to_description(self.description) }) optional = { "one_lane": self.one_lane, "multi_lane": self.multi_lane, "speed_limit": self.speed_limit, "road_intactness": self.road_intactness, "road_width": self.road_width, "road_sign": self.road_sign, "road_light": self.road_light, "road_control": self.road_control, "weather": self.weather, "road_surface": self.road_surface, "road_object": self.road_object, "object_distance": self.object_distance, "didnt_cross": self.didnt_cross, "cross_mode": self.cross_mode, "cross_location": self.cross_location, "cross_direction": self.cross_direction, "video_link": self.video_link, "road1": self.road1, "road2": self.road2, "km": self.km } for name, value in iteritems(optional): if value != 0: fields[name] = value return fields def update(self, data, current_user): self.title = data["title"] self.description = data["description"] self.type = data["type"] self.latitude = data["latitude"] self.longitude = data["longitude"] self.put() @staticmethod def bounding_box_query(is_thin=False, yield_per=None, involved_and_vehicles=False, **kwargs): from anyway.apis.common.models import MarkerResult, Involved, Vehicle approx = kwargs.get('approx', True) accurate = kwargs.get('accurate', True) page = kwargs.get('page') per_page = kwargs.get('per_page') if not kwargs.get('show_markers', True): return MarkerResult( accident_markers=db.session.query(AccidentMarker).filter( sql.false()), rsa_markers=db.session.query(AccidentMarker).filter( sql.false()), total_records=0) sw_lat = float(kwargs['sw_lat']) sw_lng = float(kwargs['sw_lng']) ne_lat = float(kwargs['ne_lat']) ne_lng = float(kwargs['ne_lng']) polygon_str = 'POLYGON(({0} {1},{0} {3},{2} {3},{2} {1},{0} {1}))'.format( sw_lng, sw_lat, ne_lng, ne_lat) markers = db.session.query(AccidentMarker) \ .filter(AccidentMarker.geom.intersects(polygon_str)) \ .filter(AccidentMarker.created >= kwargs['start_date']) \ .filter(AccidentMarker.created < kwargs['end_date']) \ .filter(AccidentMarker.provider_code != CONST.RSA_PROVIDER_CODE) \ .order_by(desc(AccidentMarker.created)) rsa_markers = db.session.query(AccidentMarker) \ .filter(AccidentMarker.geom.intersects(polygon_str)) \ .filter(AccidentMarker.created >= kwargs['start_date']) \ .filter(AccidentMarker.created < kwargs['end_date']) \ .filter(AccidentMarker.provider_code == CONST.RSA_PROVIDER_CODE) \ .order_by(desc(AccidentMarker.created)) if not kwargs['show_rsa']: rsa_markers = db.session.query(AccidentMarker).filter(sql.false()) if not kwargs['show_accidents']: markers = markers.filter( and_( AccidentMarker.provider_code != CONST.CBS_ACCIDENT_TYPE_1_CODE, AccidentMarker.provider_code != CONST.CBS_ACCIDENT_TYPE_3_CODE, AccidentMarker.provider_code != CONST.UNITED_HATZALA_CODE)) if yield_per: markers = markers.yield_per(yield_per) if accurate and not approx: markers = markers.filter(AccidentMarker.location_accuracy == 1) elif approx and not accurate: markers = markers.filter(AccidentMarker.location_accuracy != 1) elif not accurate and not approx: return MarkerResult( accident_markers=db.session.query(AccidentMarker).filter( sql.false()), rsa_markers=db.session.query(AccidentMarker).filter( sql.false()), total_records=0) if not kwargs.get('show_fatal', True): markers = markers.filter(AccidentMarker.accident_severity != 1) if not kwargs.get('show_severe', True): markers = markers.filter(AccidentMarker.accident_severity != 2) if not kwargs.get('show_light', True): markers = markers.filter(AccidentMarker.accident_severity != 3) if kwargs.get('show_urban', 3) != 3: if kwargs['show_urban'] == 2: markers = markers.filter(AccidentMarker.road_type >= 1).filter( AccidentMarker.roadType <= 2) elif kwargs['show_urban'] == 1: markers = markers.filter(AccidentMarker.road_type >= 3).filter( AccidentMarker.roadType <= 4) else: return MarkerResult( accident_markers=db.session.query(AccidentMarker).filter( sql.false()), rsa_markers=rsa_markers, total_records=rsa_markers.count()) if kwargs.get('show_intersection', 3) != 3: if kwargs['show_intersection'] == 2: markers = markers.filter(AccidentMarker.road_type != 2).filter( AccidentMarker.roadType != 4) elif kwargs['show_intersection'] == 1: markers = markers.filter(AccidentMarker.road_type != 1).filter( AccidentMarker.roadType != 3) else: return MarkerResult( accident_markers=db.session.query(AccidentMarker).filter( sql.false()), rsa_markers=rsa_markers, total_records=rsa_markers.count()) if kwargs.get('show_lane', 3) != 3: if kwargs['show_lane'] == 2: markers = markers.filter(AccidentMarker.one_lane >= 2).filter( AccidentMarker.one_lane <= 3) elif kwargs['show_lane'] == 1: markers = markers.filter(AccidentMarker.one_lane == 1) else: return MarkerResult( accident_markers=db.session.query(AccidentMarker).filter( sql.false()), rsa_markers=rsa_markers, total_records=rsa_markers.count()) if kwargs.get('show_day', 7) != 7: markers = markers.filter( func.extract("dow", AccidentMarker.created) == kwargs['show_day']) if kwargs.get('show_holiday', 0) != 0: markers = markers.filter( AccidentMarker.day_type == kwargs['show_holiday']) if kwargs.get('show_time', 24) != 24: if kwargs['show_time'] == 25: # Daylight (6-18) markers = markers.filter(func.extract("hour", AccidentMarker.created) >= 6) \ .filter(func.extract("hour", AccidentMarker.created) < 18) elif kwargs['show_time'] == 26: # Darktime (18-6) markers = markers.filter( (func.extract("hour", AccidentMarker.created) >= 18) | (func.extract("hour", AccidentMarker.created) < 6)) else: markers = markers.filter(func.extract("hour", AccidentMarker.created) >= kwargs['show_time']) \ .filter(func.extract("hour", AccidentMarker.created) < kwargs['show_time'] + 6) elif kwargs['start_time'] != 25 and kwargs['end_time'] != 25: markers = markers.filter(func.extract("hour", AccidentMarker.created) >= kwargs['start_time']) \ .filter(func.extract("hour", AccidentMarker.created) < kwargs['end_time']) if kwargs.get('weather', 0) != 0: markers = markers.filter( AccidentMarker.weather == kwargs['weather']) if kwargs.get('road', 0) != 0: markers = markers.filter( AccidentMarker.road_shape == kwargs['road']) if kwargs.get('separation', 0) != 0: markers = markers.filter( AccidentMarker.multi_lane == kwargs['separation']) if kwargs.get('surface', 0) != 0: markers = markers.filter( AccidentMarker.road_surface == kwargs['surface']) if kwargs.get('acctype', 0) != 0: if kwargs['acctype'] <= 20: markers = markers.filter( AccidentMarker.accident_type == kwargs['acctype']) elif kwargs['acctype'] == CONST.BIKE_ACCIDENTS: markers = markers.filter( AccidentMarker.vehicles.any( Vehicle.vehicle_type == CONST.VEHICLE_TYPE_BIKE)) if kwargs.get('controlmeasure', 0) != 0: markers = markers.filter( AccidentMarker.road_control == kwargs['controlmeasure']) if kwargs.get('district', 0) != 0: markers = markers.filter( AccidentMarker.police_unit == kwargs['district']) if kwargs.get('case_type', 0) != 0: markers = markers.filter( AccidentMarker.provider_code == kwargs['case_type']) if is_thin: markers = markers.options(load_only("id", "longitude", "latitude")) if kwargs.get('age_groups'): age_groups_list = kwargs.get('age_groups').split(',') markers = markers.filter( AccidentMarker.involved.any( Involved.age_group.in_(age_groups_list))) else: markers = db.session.query(AccidentMarker).filter(sql.false()) total_records = markers.count() + rsa_markers.count() if page and per_page: markers = markers.offset((page - 1) * per_page).limit(per_page) if involved_and_vehicles: fetch_markers = kwargs.get('fetch_markers', True) fetch_vehicles = kwargs.get('fetch_vehicles', True) fetch_involved = kwargs.get('fetch_involved', True) markers_ids = [marker.id for marker in markers] markers = None vehicles = None involved = None if fetch_markers: markers = db.session.query(AccidentMarker).filter( AccidentMarker.id.in_(markers_ids)) if fetch_vehicles: vehicles = db.session.query(Vehicle).filter( Vehicle.accident_id.in_(markers_ids)) if fetch_involved: involved = db.session.query(Involved).filter( Involved.accident_id.in_(markers_ids)) result = markers.all() if markers is not None else [], vehicles.all() if vehicles is not None else [], \ involved.all() if involved is not None else [] return MarkerResult( accident_markers=result, rsa_markers=db.session.query(AccidentMarker).filter( sql.false()), total_records=len(result)) else: return MarkerResult(accident_markers=markers, rsa_markers=rsa_markers, total_records=total_records) @staticmethod def get_marker(marker_id): return db.session.query(AccidentMarker).filter_by(id=marker_id) @classmethod def parse(cls, data): return AccidentMarker(type=CONST.MARKER_TYPE_ACCIDENT, title=data["title"], description=data["description"], latitude=data["latitude"], longitude=data["longitude"])
class Events(Base): # type: ignore """Event history data.""" __table_args__ = ( # Used for fetching events at a specific time # see logbook Index("ix_events_event_type_time_fired", "event_type", "time_fired"), { "mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci" }, ) __tablename__ = TABLE_EVENTS event_id = Column(Integer, Identity(), primary_key=True) event_type = Column(String(MAX_LENGTH_EVENT_EVENT_TYPE)) event_data = Column(Text().with_variant(mysql.LONGTEXT, "mysql")) origin = Column(String(MAX_LENGTH_EVENT_ORIGIN)) time_fired = Column(DATETIME_TYPE, index=True) created = Column(DATETIME_TYPE, default=dt_util.utcnow) context_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID), index=True) context_user_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID), index=True) context_parent_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID), index=True) def __repr__(self) -> str: """Return string representation of instance for debugging.""" return ( f"<recorder.Events(" f"id={self.event_id}, type='{self.event_type}', data='{self.event_data}', " f"origin='{self.origin}', time_fired='{self.time_fired}'" f")>") @staticmethod def from_event(event, event_data=None): """Create an event database object from a native event.""" return Events( event_type=event.event_type, event_data=event_data or json.dumps(event.data, cls=JSONEncoder, separators=(",", ":")), origin=str(event.origin.value), time_fired=event.time_fired, context_id=event.context.id, context_user_id=event.context.user_id, context_parent_id=event.context.parent_id, ) def to_native(self, validate_entity_id=True): """Convert to a native HA Event.""" context = Context( id=self.context_id, user_id=self.context_user_id, parent_id=self.context_parent_id, ) try: return Event( self.event_type, json.loads(self.event_data), EventOrigin(self.origin), process_timestamp(self.time_fired), context=context, ) except ValueError: # When json.loads fails _LOGGER.exception("Error converting to event: %s", self) return None
class Command(db.Model): """ The information of the script run on one node within a jobstep: the contents of the script are included, and later the command can be updated with status/return code. changes-client has no real magic beyond running commands, so the list of commands it ran basically tells you everything that happened. Looks like only mesos/lxc builds (DefaultBuildStep) """ __tablename__ = 'command' __table_args__ = (UniqueConstraint('jobstep_id', 'order', name='unq_command_order'), ) id = Column(GUID, primary_key=True, default=uuid.uuid4) jobstep_id = Column(GUID, ForeignKey('jobstep.id', ondelete="CASCADE"), nullable=False) label = Column(String(128), nullable=False) status = Column(EnumType(Status), nullable=False, default=Status.unknown) return_code = Column(Integer, nullable=True) script = Column(Text(), nullable=False) env = Column(JSONEncodedDict, nullable=True) cwd = Column(String(256), nullable=True) artifacts = Column(ARRAY(String(256)), nullable=True) date_started = Column(DateTime) date_finished = Column(DateTime) date_created = Column(DateTime, default=datetime.utcnow) data = Column(JSONEncodedDict) order = Column(Integer, default=0, server_default='0', nullable=False) type = Column(EnumType(CommandType), nullable=False, default=CommandType.default, server_default='0') jobstep = relationship('JobStep', backref=backref('commands', order_by='Command.order')) __repr__ = model_repr('jobstep_id', 'script') def __init__(self, **kwargs): super(Command, self).__init__(**kwargs) if self.id is None: self.id = uuid.uuid4() if self.status is None: self.status = Status.unknown if self.date_created is None: self.date_created = datetime.utcnow() if self.data is None: self.data = {} @property def duration(self): """ Return the duration (in milliseconds) that this item was in-progress. """ if self.date_started and self.date_finished: duration = (self.date_finished - self.date_started).total_seconds() * 1000 else: duration = None return duration
class Malware(Base): __tablename__ = 'malware' id = Column(Integer(), primary_key=True) name = Column(String(255), nullable=True) size = Column(Integer(), nullable=False) type = Column(Text(), nullable=True) mime = Column(String(255), nullable=True) md5 = Column(String(32), nullable=False, index=True) crc32 = Column(String(8), nullable=False) sha1 = Column(String(40), nullable=False) sha256 = Column(String(64), nullable=False, index=True) sha512 = Column(String(128), nullable=False) ssdeep = Column(String(255), nullable=True) created_at = Column(DateTime(timezone=False), default=datetime.now(), nullable=False) parent_id = Column(Integer(), ForeignKey('malware.id')) parent = relationship('Malware', lazy='subquery', remote_side=[id]) tag = relationship('Tag', secondary=association_table, backref=backref('malware')) note = relationship('Note', cascade='all, delete', secondary=association_table, backref=backref('malware')) analysis = relationship('Analysis', cascade='all, delete', secondary=association_table, backref=backref('malware')) __table_args__ = (Index('hash_index', 'md5', 'crc32', 'sha1', 'sha256', 'sha512', unique=True), ) def to_dict(self): row_dict = {} for column in self.__table__.columns: value = getattr(self, column.name) row_dict[column.name] = value return row_dict def __repr__(self): return "<Malware('{0}','{1}')>".format(self.id, self.md5) def __init__(self, md5, crc32, sha1, sha256, sha512, size, type=None, mime=None, ssdeep=None, name=None, parent=None): self.md5 = md5 self.sha1 = sha1 self.crc32 = crc32 self.sha256 = sha256 self.sha512 = sha512 self.size = size self.type = type self.mime = mime self.ssdeep = ssdeep self.name = name self.parent = parent
class Database(Base): __tablename__ = table SortingIndex = Column(Integer) ItemType = Column(String(20)) Label = Column(Text()) Response = Column(Text()) Comment = Column(Text()) MediaHypertextReference = Column(Text()) Latitude = Column(String(50)) Longitude = Column(String(50)) ItemScore = Column(Float) ItemMaxScore = Column(Float) ItemScorePercentage = Column(Float) Mandatory = Column(Boolean) FailedResponse = Column(Boolean) Inactive = Column(Boolean) AuditID = Column(String(100), primary_key=True, autoincrement=False) ItemID = Column(String(100), primary_key=True, autoincrement=False) if merge is False: DatePK = Column(BigInteger, primary_key=True, autoincrement=False) else: DatePK = Column(BigInteger) ResponseID = Column(Text()) ParentID = Column(String(100)) AuditOwner = Column(Text()) AuditAuthor = Column(Text()) AuditOwnerID = Column(Text()) AuditAuthorID = Column(String(100)) AuditName = Column(Text()) AuditScore = Column(Float) AuditMaxScore = Column(Float) AuditScorePercentage = Column(Float) AuditDuration = Column(Float) DateStarted = Column(DateTime) DateCompleted = Column(DateTime) DateModified = Column(DateTime) TemplateID = Column(String(100)) TemplateName = Column(Text()) TemplateAuthor = Column(Text()) TemplateAuthorID = Column(String(100)) ItemCategory = Column(Text()) RepeatingSectionParentID = Column(String(100)) DocumentNo = Column(Text()) ConductedOn = Column(DateTime) PreparedBy = Column(Text()) Location = Column(Text()) Personnel = Column(Text()) ClientSite = Column(Text()) AuditSite = Column(Text()) AuditArea = Column(Text()) AuditRegion = Column(Text()) Archived = Column(Boolean)
def MediumText() -> Variant: return Text().with_variant(MEDIUMTEXT(), "mysql")
class Users(Base): __tablename__ = "users" id = Column(BigInteger(), primary_key=True) user_id = Column(Text()) username = Column(Text())