class DistributedQueryResult(SurrogatePK, Model): columns = Column(JSONB) timestamp = Column(db.DateTime, default=dt.datetime.utcnow) distributed_query_task_id = reference_col('distributed_query_task', nullable=False) distributed_query_task = relationship( 'DistributedQueryTask', backref=db.backref('results', cascade='all, delete-orphan', lazy='joined'), ) distributed_query_id = reference_col('distributed_query', nullable=False) distributed_query = relationship( 'DistributedQuery', backref=db.backref('results', cascade='all, delete-orphan', lazy='joined'), ) def __init__(self, columns, distributed_query=None, distributed_query_task=None): self.columns = columns self.distributed_query = distributed_query self.distributed_query_task = distributed_query_task def to_dict(self): dict = {c.name: getattr(self, c.name) for c in self.__table__.columns} # dict['tasks'] = self.tasks return dict def to_dict_obj(self): dict = {'columns': self.columns} # dict['tasks'] = self.tasks return dict
class AlertEmail(SurrogatePK, Model): alert_id = reference_col('alerts', nullable=False) alert = relationship( 'Alerts', backref=db.backref('alert_email', lazy='dynamic'), ) node = Column(db.String, nullable=False) status = Column(db.String, nullable=True) node_id = reference_col('node', nullable=False) node = relationship( 'Node', backref=db.backref('alert_email', lazy='dynamic'), ) body = Column(db.String, nullable=False) created_at = Column(db.DateTime, nullable=False, default=dt.datetime.utcnow) updated_at = Column(db.DateTime, nullable=False) def __init__(self, node=None, node_id=None, alert=None, alert_id=None, body=None, status=None, updated_at=dt.datetime.utcnow()): if node: self.node = node elif node_id: self.node_id = node_id if alert: self.alert = alert elif alert_id: self.alert_id = alert_id self.updated_at = updated_at self.body = body self.status = status
class DistributedQuery(SurrogatePK, Model): description = Column(db.String, nullable=True) sql = Column(db.String, nullable=False) timestamp = Column(db.DateTime, default=dt.datetime.utcnow) not_before = Column(db.DateTime, default=dt.datetime.utcnow) alert_id = reference_col('alerts', nullable=True) alert = relationship( 'Alerts', backref=db.backref('distributed_query'), ) # tasks = relationship( # 'DistributedQueryTask', # backref=db.backref('distributed_query_task'), # # ) def __init___(self, sql, description=None, not_before=None, alert_id=None): self.sql = sql self.alert_id = alert_id self.description = description self.not_before = not_before def to_dict(self): dict = {c.name: getattr(self, c.name) for c in self.__table__.columns} # dict['tasks'] = self.tasks return dict
class CarveSession(SurrogatePK, Model): # StatusQueried for queried carves that did not hit nodes yet StatusQueried = "QUERIED" # StatusInitialized for initialized carves StatusInitialized = "INITIALIZED" # StatusInProgress for carves that are on-going StatusInProgress = "IN PROGRESS" # StatusCompleted for carves that finalized StatusCompleted = "COMPLETED" node_id = reference_col('node', nullable=False) session_id = Column(db.String, nullable=False) carve_guid = Column(db.String, nullable=False) carve_size = Column(db.Integer) block_size = Column(db.Integer) block_count = Column(db.Integer) completed_blocks = Column(db.Integer, default=0) archive = Column(db.String()) request_id = Column(db.String, nullable=False) status = Column(db.String, nullable=False) created_at = Column(db.DateTime, nullable=False, default=dt.datetime.utcnow) updated_at = Column(db.DateTime, nullable=False) node = relationship('Node', backref=db.backref('carve_session', lazy='dynamic')) def __init___(self, node_id, session_id=None, carve_guid=None, carve_size=0, block_size=0, block_count=0, archive=None, request_id=None): self.node_id = node_id self.session_id = session_id self.carve_guid = carve_guid self.carve_size = carve_size self.block_size = block_size self.block_count = block_count self.archive = archive self.request_id = request_id def to_dict(self): """Return object data in easily serializeable format""" return { 'id': self.id, 'node_id': self.node_id, 'session_id': self.session_id, 'carve_guid': self.carve_guid, 'carve_size': self.carve_size, 'block_count': self.block_count, 'archive': self.archive, 'created_at': dump_datetime(self.created_at), }
class StatusLog(SurrogatePK, Model): line = Column(db.Integer) message = Column(db.String) severity = Column(db.Integer) filename = Column(db.String) created = Column(db.DateTime, default=dt.datetime.utcnow) version = Column(db.String) node_id = reference_col('node', nullable=False) node = relationship( 'Node', backref=db.backref('status_logs', lazy='dynamic') ) def __init__(self, line=None, message=None, severity=None, filename=None, created=None, node=None, node_id=None, version=None, **kwargs): self.line = int(line) self.message = message self.severity = int(severity) self.filename = filename self.created = created self.version = version if node: self.node = node elif node_id: self.node_id = node_id @declared_attr def __table_args__(cls): return ( Index('idx_%s_node_id_created_desc' % cls.__tablename__, 'node_id', cls.created.desc()), )
class AlertEmail(SurrogatePK, Model): alert_id = db.Column(db.Integer, db.ForeignKey('alerts.id', ondelete='CASCADE')) alert = relationship( 'Alerts', backref=db.backref('alert_email', lazy='dynamic', passive_deletes=True), ) status = Column(db.String, nullable=True) node_id = db.Column(db.Integer, db.ForeignKey('node.id', ondelete='CASCADE')) node = relationship('Node', backref=db.backref('alert_email', passive_deletes=True, lazy='dynamic')) body = Column(db.String, nullable=False) created_at = Column(db.DateTime, nullable=False, default=dt.datetime.utcnow) updated_at = Column(db.DateTime, nullable=False) def __init__(self, node=None, node_id=None, alert=None, alert_id=None, body=None, status=None, updated_at=dt.datetime.utcnow()): if node: self.node = node elif node_id: self.node_id = node_id if alert: self.alert = alert elif alert_id: self.alert_id = alert_id self.updated_at = updated_at self.body = body self.status = status
class ResultLog(SurrogatePK, Model): NEW = 0 PENDING = 1 COMPLETE = 2 name = Column(db.String, nullable=False) timestamp = Column(db.DateTime, default=dt.datetime.utcnow) action = Column(db.String) columns = Column(JSONB) node_id = reference_col('node', nullable=False) node = relationship('Node', backref=db.backref('result_logs', lazy='dynamic')) uuid = Column(db.String, nullable=True) status = Column(db.Integer, default=NEW, nullable=False) task_id = Column(db.String, nullable=True) def __init__(self, name=None, action=None, columns=None, timestamp=None, node=None, node_id=None, uuid=None, **kwargs): self.name = name self.action = action self.columns = columns or {} self.timestamp = timestamp self.uuid = uuid if node: self.node = node elif node_id: self.node_id = node_id def to_json(self): return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4) def as_dict(self): return {c.name: getattr(self, c.name) for c in self.__table__.columns} def to_dict(self): dictionary = {} for c in self.__table__.columns: if not c.name == "timestamp": dictionary[c.name] = getattr(self, c.name) else: dictionary[c.name] = getattr( self, c.name).strftime('%m/%d/%Y %H/%M/%S') return dictionary @declared_attr def __table_args__(cls): return (Index('idx_%s_node_id_timestamp_desc' % cls.__tablename__, 'node_id', cls.timestamp.desc()), )
class DefaultFilters(SurrogatePK, Model): ARCH_x86 = "x86" ARCH_x64 = "x86_64" filters = Column(JSONB) platform = Column(db.String, nullable=False) arch = Column(db.String) apply_by_default = Column(db.Boolean, nullable=False, default=False) created_at = Column(db.DateTime, nullable=False) updated_at = Column(db.DateTime, nullable=False, default=dt.datetime.utcnow) config_id = reference_col('config', nullable=False) config = relationship('Config', backref=db.backref('default_filters', lazy='dynamic')) def __init__(self, filters, platform, created_at, apply_by_default=False, config_id=None, arch=ARCH_x64, **kwargs): self.filters = filters self.platform = platform self.apply_by_default = apply_by_default self.created_at = created_at self.updated_at = dt.datetime.utcnow() self.arch = arch self.config_id = config_id @property def serialize(self): """Return object data in easily serializeable format""" return { 'id': self.id, 'filters': json.loads(self.filters), 'platform': self.platform, 'arch': self.arch, 'type': self.type, 'created_at': dump_datetime(self.created_at), 'updated_at': dump_datetime(self.updated_at) }
class NodeData(SurrogatePK, Model): # config = Column(JSONB) data = Column(JSONB, default={}, nullable=False) name = Column(db.String, nullable=False) node_id = reference_col('node', nullable=False) node = relationship( 'Node', backref=db.backref('node_data', cascade='all, delete-orphan', lazy='dynamic'), ) created_at = Column(db.DateTime, nullable=False, default=dt.datetime.utcnow) updated_at = Column(db.DateTime, nullable=False) def to_dict(self): return { 'name': self.name, 'data': json.dumps(self.data), 'updated_at': dump_datetime(self.updated_at) } def to_dict_obj(self): return { 'name': self.name, 'data': self.data, 'updated_at': dump_datetime(self.updated_at) } def __init__(self, node=None, node_id=None, data=None, name=False, updated_at=None): if node: self.node = node elif node_id: self.node_id = node_id self.data = data self.name = name self.updated_at = updated_at
class NodeReconData(SurrogatePK, Model): columns = Column(JSONB, default={}, nullable=False) node_data_id = reference_col('node_data', nullable=False) node_data = relationship( 'NodeData', backref=db.backref('node_recon_data', cascade='all, delete-orphan', lazy='dynamic'), ) created_at = Column(db.DateTime, nullable=False, default=dt.datetime.utcnow) updated_at = Column(db.DateTime, nullable=False) def to_dict(self): return { 'columns': json.dumps(self.columns), 'updated_at': dump_datetime(self.updated_at) } def to_dict_obj(self): return { 'columns': self.columns, 'updated_at': dump_datetime(self.updated_at) } def __init__(self, node_data=None, node_data_id=None, columns=None, updated_at=None): if node_data: self.node_data = node_data elif node_data_id: self.node_data_id = node_data_id self.columns = columns self.updated_at = updated_at
class DistributedQueryTask(SurrogatePK, Model): NEW = 0 PENDING = 1 COMPLETE = 2 FAILED = 3 NOT_SENT = 4 HIGH = 0 LOW = 1 save_results_in_db = Column(db.Boolean, nullable=False, default=False) guid = Column(db.String, nullable=False, unique=True) status = Column(db.Integer, default=0, nullable=False) timestamp = Column(db.DateTime) data = Column(JSONB) updated_at = Column(db.DateTime, nullable=True, default=None) viewed_at = Column(db.DateTime, nullable=True, default=None) priority = Column(db.Integer, default=0, nullable=False) sql = Column(db.String, nullable=True) distributed_query_id = reference_col('distributed_query', nullable=False) distributed_query = relationship( 'DistributedQuery', backref=db.backref('tasks', cascade='all, delete-orphan', lazy='dynamic'), ) node_id = db.Column(db.Integer, db.ForeignKey('node.id', ondelete='CASCADE')) node = relationship('Node', backref=db.backref('distributed_queries', passive_deletes=True, lazy='dynamic')) def __init__(self, node=None, node_id=None, distributed_query=None, save_results_in_db=False, distributed_query_id=None, updated_at=None, priority=0, viewed_at=None, data=None): self.guid = str(uuid.uuid4()) self.updated_at = updated_at self.viewed_at = viewed_at self.save_results_in_db = save_results_in_db self.data = data self.priority = priority if node: self.node = node elif node_id: self.node_id = node_id if distributed_query: self.distributed_query = distributed_query elif distributed_query_id: self.distributed_query_id = distributed_query_id @declared_attr def __table_args__(cls): return (Index('idx_%s_node_id_status' % cls.__tablename__, 'node_id', 'status'), ) def to_dict_obj(self): return { 'id': self.id, 'distributed_query': { 'description': self.distributed_query.description, 'sql': self.distributed_query.sql }, 'results': self.data, }
class ResultLog(SurrogatePK, Model): NEW = 0 PENDING = 1 COMPLETE = 2 name = Column(db.String, nullable=False) timestamp = Column(db.DateTime, default=dt.datetime.utcnow) action = Column(db.String) columns = Column(JSONB) node_id = reference_col('node', nullable=False) node = relationship('Node', backref=db.backref('result_logs', cascade='all, delete-orphan', lazy='dynamic')) uuid = Column(db.String, nullable=True) status = Column(db.Integer, default=NEW, nullable=False) task_id = Column(db.String, nullable=True) def __init__(self, name=None, action=None, columns=None, timestamp=None, node=None, node_id=None, uuid=None, **kwargs): self.name = name self.action = action self.columns = columns or {} self.timestamp = timestamp self.uuid = uuid if node: self.node = node elif node_id: self.node_id = node_id def to_json(self): return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4) def as_dict(self): return {c.name: getattr(self, c.name) for c in self.__table__.columns} def to_dict(self): dictionary = {} for c in self.__table__.columns: if not c.name == "timestamp": dictionary[c.name] = getattr(self, c.name) else: dictionary[c.name] = getattr( self, c.name).strftime('%m/%d/%Y %H/%M/%S') return dictionary @declared_attr def __table_args__(cls): return ( Index('idx_%s_node_id_timestamp_desc' % cls.__tablename__, 'node_id', cls.timestamp.desc()), Index('idx_%s_name' % cls.__tablename__, 'name'), Index('idx_%s_on_columns_md5' % cls.__tablename__, sqlalchemy.text("(columns->'md5')"), postgresql_using='btree'), Index('idx_%s_on_columns_domain_name' % cls.__tablename__, sqlalchemy.text("(columns->'domain_name')"), postgresql_using='btree'), Index('idx_%s_on_columns_ja3_md5' % cls.__tablename__, sqlalchemy.text("(columns->'ja3_md5')"), postgresql_using='btree'), Index('idx_%s_on_columns_sha256' % cls.__tablename__, sqlalchemy.text("(columns->'sha256')"), postgresql_using='btree'), Index('idx_%s_on_columns_process_guid' % cls.__tablename__, sqlalchemy.text("(columns->'process_guid')"), postgresql_using='btree'), Index('idx_%s_on_columns_parent_process_guid' % cls.__tablename__, sqlalchemy.text("(columns->'parent_process_guid')"), postgresql_using='btree'), Index('idx_%s_on_columns_time' % cls.__tablename__, sqlalchemy.text("(columns->'time')"), postgresql_using='btree'), Index('idx_%s_on_columns_target_path' % cls.__tablename__, sqlalchemy.text("(columns->'target_path')"), postgresql_using='btree'), Index('idx_%s_on_columns_target_name' % cls.__tablename__, sqlalchemy.text("(columns->'target_name')"), postgresql_using='btree'), Index('idx_%s_on_columns_process_name' % cls.__tablename__, sqlalchemy.text("(columns->'process_name')"), postgresql_using='btree'), Index('idx_%s_on_columns_remote_address' % cls.__tablename__, sqlalchemy.text("(columns->'remote_address')"), postgresql_using='btree'), )
class DefaultQuery(SurrogatePK, Model): ARCH_x86 = "x86" ARCH_x64 = "x86_64" name = Column(db.String, nullable=False) sql = Column(db.String, nullable=False) interval = Column(db.Integer, default=3600) platform = Column(db.String) arch = Column(db.String) version = Column(db.String) description = Column(db.String) value = Column(db.String) removed = Column(db.Boolean, nullable=False, default=True) snapshot = Column(db.Boolean, nullable=False, default=False) shard = Column(db.Integer) status = Column(db.Boolean, nullable=False, default=False) config_id = reference_col('config', nullable=True) config = relationship('Config', backref=db.backref('default_query', lazy='dynamic')) def __init__(self, name, query=None, sql=None, interval=3600, platform=None, version=None, description=None, value=None, removed=False, config_id=None, shard=None, status=None, snapshot=False, arch=ARCH_x64, **kwargs): self.name = name self.sql = query or sql self.interval = int(interval) self.platform = platform self.version = version self.description = description self.value = value self.removed = removed self.snapshot = snapshot self.shard = shard self.status = status self.arch = arch self.config_id = config_id def __repr__(self): return '<Query: {0.name}>'.format(self) def to_dict(self): return { 'id': self.id, 'query': self.sql, 'interval': self.interval, 'platform': self.platform, 'version': self.version, 'description': self.description, 'value': self.value, 'removed': False, 'shard': self.shard, 'snapshot': self.snapshot, 'status': self.status }
class Alerts(SurrogatePK, Model): RULE = "rule" THREAT_INTEL = "Threat Intel" SOURCE_IOC = "IOC" RESOLVED = "RESOLVED" OPEN = "OPEN" CRITICAL = "CRITICAL" WARNING = "WARNING" LOW = "LOW" INFO = "INFO" query_name = Column(db.String, nullable=False) message = Column(JSONB) rule_id = reference_col('rule', nullable=True) rule = relationship( 'Rule', backref=db.backref('alerts', lazy='dynamic'), ) node_id = reference_col('node', nullable=False) node = relationship( 'Node', backref=db.backref('alerts', cascade='all, delete-orphan', lazy='dynamic'), ) severity = Column(db.String, nullable=True) type = Column(db.String, nullable=True) recon_queries = Column(JSONB) result_log_uid = Column(db.String) source = Column(db.String) source_data = Column(JSONB) status = Column(db.String, default=OPEN) created_at = Column(db.DateTime, nullable=False, default=dt.datetime.utcnow) def __init__(self, message, query_name, node_id, rule_id, recon_queries, result_log_uid, type, source, source_data, severity): self.message = message self.query_name = query_name self.recon_queries = recon_queries self.node_id = node_id self.rule_id = rule_id self.type = type self.source = source self.source_data = source_data self.result_log_uid = result_log_uid self.severity = severity def to_dict(self): return {c.name: getattr(self, c.name) for c in self.__table__.columns} def as_dict(self): dictionary = {} for c in self.__table__.columns: if not c.name == "created_at": dictionary[c.name] = getattr(self, c.name) else: dictionary[c.name] = getattr( self, c.name).strftime('%m/%d/%Y %H/%M/%S') return dictionary @declared_attr def __table_args__(cls): return (Index('idx_%s_created_at_desc' % cls.__tablename__, 'created_at', cls.created_at.desc()), Index('idx_%s_status' % cls.__tablename__, 'status'))
class Alerts(SurrogatePK, Model): RULE = "rule" THREAT_INTEL = "Threat Intel" SOURCE_IOC = "IOC" RESOLVED = "RESOLVED" OPEN = "OPEN" query_name = Column(db.String, nullable=False) message = Column(JSONB) node_id = reference_col('node', nullable=False) rule_id = reference_col('rule', nullable=False) rule = relationship( 'Rule', backref=db.backref('alerts', lazy='dynamic'), ) severity = Column(db.String, nullable=True) type = Column(db.String, nullable=True) node = relationship( 'Node', backref=db.backref('alerts', lazy='dynamic'), ) created_at = Column(db.DateTime, nullable=False, default=dt.datetime.utcnow) severity = Column(db.String) recon_queries = Column(JSONB) result_log_uid = Column(db.String) type = Column(db.String) source = Column(db.String) source_data = Column(JSONB) status = Column(db.String, default=OPEN) def __init__(self, message, query_name, node_id, rule_id, recon_queries, result_log_uid, type, source, source_data, severity): self.message = message self.query_name = query_name self.recon_queries = recon_queries self.node_id = node_id self.rule_id = rule_id self.type = type self.source = source self.source_data = source_data self.result_log_uid = result_log_uid self.severity = severity # self.node=node def to_dict(self): return {c.name: getattr(self, c.name) for c in self.__table__.columns} def as_dict(self): dictionary = {} for c in self.__table__.columns: if not c.name == "created_at": dictionary[c.name] = getattr(self, c.name) else: dictionary[c.name] = getattr( self, c.name).strftime('%m/%d/%Y %H/%M/%S') return dictionary