def node_activity(node_id): node = Node.query.filter_by(id=node_id) \ .options(db.lazyload('*')).first() try: timestamp = request.args.get('timestamp') timestamp = dt.datetime.fromtimestamp(float(timestamp)) except Exception: timestamp = dt.datetime.utcnow() timestamp -= dt.timedelta(days=7) recent = node.result_logs.filter(ResultLog.timestamp > timestamp).all() queries = db.session.query(DistributedQueryTask) \ .join(DistributedQuery) \ .join(DistributedQueryResult) \ .join(Node) \ .options( db.lazyload('*'), db.contains_eager(DistributedQueryTask.results), db.contains_eager(DistributedQueryTask.distributed_query), db.contains_eager(DistributedQueryTask.node) ) \ .filter( DistributedQueryTask.node == node, or_( DistributedQuery.timestamp >= timestamp, DistributedQueryTask.timestamp >= timestamp, ) ).all() return render_template('activity.html', node=node, recent=recent, queries=queries)
def assemble_distributed_queries(node): ''' Retrieve all distributed queries assigned to a particular node in the NEW state. This function will change the state of the distributed query to PENDING, however will not commit the change. It is the responsibility of the caller to commit or rollback on the current database session. ''' now = dt.datetime.utcnow() query = db.session.query(DistributedQueryTask) \ .join(DistributedQuery) \ .filter( DistributedQueryTask.node == node, DistributedQueryTask.status == DistributedQueryTask.NEW, DistributedQuery.not_before < now, ).options( db.lazyload('*'), db.contains_eager(DistributedQueryTask.distributed_query) ) queries = {} for task in query: queries[task.guid] = task.distributed_query.sql task.update(status=DistributedQueryTask.PENDING, timestamp=now, commit=False) # add this query to the session, but don't commit until we're # as sure as we possibly can be that it's been received by the # osqueryd client. unfortunately, there are no guarantees though. db.session.add(task) return queries
def file_paths(self): return db.session.object_session(self) \ .query(FilePath) \ .join(file_path_tags, file_path_tags.c['file_path.id'] == FilePath.id) \ .join(node_tags, node_tags.c['tag.id'] == file_path_tags.c['tag.id']) \ .filter(node_tags.c['node.id'] == self.id) \ .options(db.lazyload('*'))
def packs(self): return db.session.object_session(self) \ .query(Pack) \ .join(pack_tags, pack_tags.c['pack.id'] == Pack.id) \ .join(node_tags, node_tags.c['tag.id'] == pack_tags.c['tag.id']) \ .filter(node_tags.c['node.id'] == self.id) \ .options(db.lazyload('*'))
def queries(self): return db.session.object_session(self) \ .query(Query) \ .join(query_tags, query_tags.c['query.id'] == Query.id) \ .join(node_tags, node_tags.c['tag.id'] == query_tags.c['tag.id']) \ .filter(node_tags.c['node.id'] == self.id) \ .options(db.lazyload('*'))
def queries(): queries = Query.query \ .options( db.lazyload('*'), db.joinedload(Query.packs), db.contains_eager(Query.tags), ).join(Query.tags).all() return render_template('queries.html', queries=queries)
def packs(): packs = Pack.query \ .options( db.lazyload('*'), db.joinedload(Pack.queries, innerjoin=True).joinedload(Query.packs, innerjoin=True), db.joinedload(Pack.queries, Query.tags, innerjoin=True), db.joinedload(Pack.tags, innerjoin=True), ).all() return render_template('packs.html', packs=packs)
def decorated_function(*args, **kwargs): # in v1.7.4, the Content-Encoding header is set when # --logger_tls_compress=true if 'Content-Encoding' in request.headers and \ request.headers['Content-Encoding'] == 'gzip': request._cached_data = gzip.GzipFile( fileobj=BytesIO(request.get_data())).read() request_json = request.get_json() if not request_json or 'node_key' not in request_json: current_app.logger.error( "%s - Request did not contain valid JSON data. This could " "be an attempt to gather information about this endpoint " "or an automated scanner.", request.remote_addr ) # Return nothing return "" node_key = request_json.get('node_key') node = Node.query.filter_by(node_key=node_key) \ .options(db.lazyload('*')).first() if not node: current_app.logger.error( "%s - Could not find node with node_key %s", request.remote_addr, node_key ) return jsonify(node_invalid=True) if not node.is_active: current_app.logger.error( "%s - Node %s came back from the dead!", request.remote_addr, node_key ) return jsonify(node_invalid=True) node.update( last_checkin=dt.datetime.utcnow(), last_ip=request.remote_addr, commit=False ) return f(node=node, *args, **kwargs)
def decorated_function(*args, **kwargs): # in v1.7.4, the Content-Encoding header is set when # --logger_tls_compress=true if 'Content-Encoding' in request.headers and \ request.headers['Content-Encoding'] == 'gzip': request._cached_data = gzip.GzipFile( fileobj=BytesIO(request.get_data())).read() request_json = request.get_json() if not request_json or 'node_key' not in request_json: current_app.logger.error( "%s - Request did not contain valid JSON data. This could " "be an attempt to gather information about this endpoint " "or an automated scanner.", request.remote_addr) # Return nothing return "" node_key = request_json.get('node_key') node = Node.query.filter_by(node_key=node_key) \ .options(db.lazyload('*')).first() if not node: current_app.logger.error( "%s - Could not find node with node_key %s", request.remote_addr, node_key) return jsonify(node_invalid=True) if not node.is_active: current_app.logger.error("%s - Node %s came back from the dead!", request.remote_addr, node_key) return jsonify(node_invalid=True) node.update(last_checkin=dt.datetime.utcnow(), last_ip=request.remote_addr, commit=False) return f(node=node, *args, **kwargs)
def assemble_file_paths(node): file_paths = {} for file_path in node.file_paths.options(db.lazyload('*')): file_paths.update(file_path.to_dict()) return file_paths
def assemble_schedule(node): schedule = {} for query in node.queries.options(db.lazyload('*')): schedule[query.name] = query.to_dict() return schedule