def build_osquery_conf(machine): schedule = { DEFAULT_ZENTRAL_INVENTORY_QUERY: { 'query': "SELECT 'os_version' as table_name, name, major, minor, " "patch, build from os_version;" "SELECT 'system_info' as table_name, " "computer_name, hostname, hardware_model, hardware_serial, " "cpu_type, cpu_subtype, cpu_brand, cpu_physical_cores, " "cpu_logical_cores, physical_memory from system_info", 'snapshot': True, 'interval': 600 } } file_paths = {} osquery_probes = ProbeList().class_filter(OSQueryProbe) # ProbeList to avoid cache inconsistency for probe in osquery_probes.machine_filtered(machine): for osquery_query_key, osquery_query in probe.iter_schedule_queries(): osquery_query = osquery_query.copy() osquery_query.pop('key', None) if osquery_query_key in schedule: raise ImproperlyConfigured( 'Query key {} already in schedule'.format(osquery_query_key) ) schedule[osquery_query_key] = osquery_query for category, paths in probe.file_paths.items(): if category in file_paths: raise ImproperlyConfigured( 'File path category {} not unique'.format(category) ) file_paths[category] = paths return {'schedule': schedule, 'file_paths': file_paths}
def get_context_data(self, **kwargs): context = super(ProbesView, self).get_context_data(**kwargs) context['osquery'] = True pl = ProbeList() # not all_probes to avoid cache inconsistency context['probes'] = pl.class_filter(OSQueryProbe) context['event_type_probes'] = pl.module_prefix_filter("osquery").exclude_class(OSQueryProbe) return context
def build_santa_conf(machine): """ Build the santa conf. The santa conf is the source of the json document that is sent to the santa client when it connects to zentral. It is a list of all the rules found in all the configured probes for that client. """ rules = [] santa_probes = ProbeList().class_filter(SantaProbe) # ProbeList to avoid cache inconsistency for probe in santa_probes.machine_filtered(machine): rules.extend(probe.policies) return {'rules': rules}
def new_queries_for_machine(self, machine): queries = {} seen_probe_id = { dqpm.probe_source_id for dqpm in self.filter( machine_serial_number=machine.serial_number) } def probe_model_filter(probe): return probe.get_model() in [ 'OsqueryDistributedQueryProbe', 'OsqueryFileCarveProbe' ] def not_seen_probe_filter(probe): return probe.pk not in seen_probe_id min_age = timezone.now() - MAX_DISTRIBUTED_QUERY_AGE def recent_probe_filter(probe): return probe.created_at > min_age # TODO: slow # could filter the probes that are too old in the db probe_list = (ProbeList().filter(probe_model_filter).machine_filtered( machine).filter(not_seen_probe_filter).filter(recent_probe_filter)) for probe in probe_list: dqpm, created = self.get_or_create( probe_source_id=probe.pk, machine_serial_number=machine.serial_number) if created: queries[probe.distributed_query_name] = probe.distributed_query return queries
def build_osquery_conf(machine, enrollment): schedule = { INVENTORY_QUERY_NAME: { 'query': get_inventory_query_for_machine(machine), 'snapshot': True, 'interval': 1001 } } packs = {} file_paths = {} file_accesses = [] # ProbeList() to avoid cache inconsistency # TODO: check performances for probe in (ProbeList().model_filter( "OsqueryProbe", "OsqueryComplianceProbe", "OsqueryFIMProbe").machine_filtered(machine)): # packs or schedule if probe.pack_key: pack_conf = packs.setdefault(probe.pack_key, { "discovery": probe.pack_discovery_queries, "queries": {} }) query_dict = pack_conf["queries"] else: query_dict = schedule # add probe queries to query_dict for osquery_query in probe.iter_scheduled_queries(): if osquery_query.name in query_dict: logger.warning("Query %s skipped, already seen", osquery_query.name) else: query_dict[ osquery_query.name] = osquery_query.to_configuration() # file paths / file accesses for file_path in getattr(probe, "file_paths", []): file_paths[file_path.category] = [file_path.file_path] if file_path.file_access: file_accesses.append(file_path.category) conf = {'decorators': DECORATORS, 'schedule': schedule} if enrollment: conf['options'] = enrollment.configuration.get_dynamic_flags() if packs: conf['packs'] = packs if file_paths: conf['file_paths'] = file_paths if file_accesses: conf['file_accesses'] = list(set(file_accesses)) return conf
def build_santa_conf(machine): """ Build the santa conf. The santa conf is the source of the json document that is sent to the santa client when it connects to zentral. It is a list of all the rules found in all the configured probes for that client. """ rules = [] for probe in (ProbeList().model_filter( "SantaProbe") # ProbeList to avoid cache inconsistency .machine_filtered(machine)): # TODO test duplicated rules rules.extend(r.to_configuration() for r in probe.rules) return {'rules': rules}
def handle(self, **options): feed_name = "Export {}".format(timezone.now().strftime("%Y-%m-%d %H:%M:%S")) with open(options["output_file"][0], "w", encoding="utf-8") as f: f.write(export_feed(feed_name, ProbeList()))