def do_node_post(self, data): data_data = data.pop("data") block_id = data["block_id"] cb = CarveBlock.objects.create(carve_session=self.carve_session, block_id=int(block_id)) cb.file.save(str(block_id), SimpleUploadedFile(str(block_id), b64decode(data_data))) session_finished = (CarveBlock.objects.filter( carve_session=self.carve_session).count() == self.carve_session.block_count) probe_source = self.carve_session.probe_source post_file_carve_events(self.machine_serial_number, self.user_agent, self.ip, [{ "probe": { "id": probe_source.pk, "name": probe_source.name }, "action": "continue", "block_id": block_id, "block_size": len(data_data), "session_finished": session_finished, "session_id": self.session_id }]) if session_finished: post_finished_file_carve_session(self.session_id) return {}
def do_node_post(self): try: block_id = int(self.data["block_id"]) except KeyError: raise SuspiciousOperation("Missing block_id") except ValueError: raise SuspiciousOperation("Invalid block_id") else: block_filename = str(block_id) try: block_data = b64decode(self.data["data"]) except KeyError: raise SuspiciousOperation("Missing block data") except Exception: raise SuspiciousOperation("Could not read block data") cb = FileCarvingBlock.objects.create(file_carving_session=self.session, block_id=block_id) cb.file.save(block_filename, SimpleUploadedFile(block_filename, block_data)) session_finished = (FileCarvingBlock.objects.filter(file_carving_session=self.session).count() == self.session.block_count) post_file_carve_events(self.machine.serial_number, self.user_agent, self.ip, [{"action": "continue", "block_id": block_id, "session_finished": session_finished, "session_id": str(self.session.pk)}]) if session_finished: transaction.on_commit(lambda: build_file_carving_session_archive.apply_async((str(self.session.pk),))) return {}
def do_node_post(self, data): probe_source_id = int(data["request_id"].split("_")[-1]) probe_source = ProbeSource.objects.get(pk=probe_source_id) session_id = get_random_string(64) CarveSession.objects.create(probe_source=probe_source, machine_serial_number=self.machine_serial_number, session_id=session_id, carve_guid=data["carve_id"], carve_size=int(data["carve_size"]), block_size=int(data["block_size"]), block_count=int(data["block_count"])) post_file_carve_events(self.machine_serial_number, self.user_agent, self.ip, [{"probe": {"id": probe_source.pk, "name": probe_source.name}, "action": "start", "session_id": session_id}]) return {"session_id": session_id}
def do_node_post(self): request_id = self.data.get("request_id") if not request_id: raise SuspiciousOperation("Missing request_id") # origin distributed_query = pack_query = None try: # distributed queries are sent with the distributed query machine pk as key dqm_pk = int(request_id) except ValueError: # pack query try: pack_query = PackQuery.objects.get_with_config_key(request_id) except ValueError: raise SuspiciousOperation("Unknown request_id format") except PackQuery.DoesNotExist: raise Http404("Unknown pack query") else: try: dqm = DistributedQueryMachine.objects.select_related("distributed_query").get(pk=dqm_pk) except DistributedQueryMachine.DoesNotExist: raise Http404("Unknown distributed query") distributed_query = dqm.distributed_query fcs = FileCarvingSession.objects.create( id=uuid.uuid4(), distributed_query=distributed_query, pack_query=pack_query, serial_number=self.machine.serial_number, carve_guid=self.data["carve_id"], carve_size=int(self.data["carve_size"]), block_size=int(self.data["block_size"]), block_count=int(self.data["block_count"]) ) session_id = str(fcs.pk) post_file_carve_events(self.machine.serial_number, self.user_agent, self.ip, [{"action": "start", "session_id": session_id}]) return {"session_id": session_id}
def do_node_post(self, data): dq_payloads = [] fc_payloads = [] def get_probe_pk(key): return int(key.split('_')[-1]) queries = data['queries'] ps_d = { ps.id: ps for ps in ProbeSource.objects.filter(pk__in=[ get_probe_pk(k) for k in queries.keys() if not k.startswith(INVENTORY_DISTRIBUTED_QUERY_PREFIX) ]).filter( Q(model='OsqueryDistributedQueryProbe') | Q(model='OsqueryFileCarveProbe')) } inventory_snapshot = [] for key, val in queries.items(): try: status = int(data['statuses'][key]) except KeyError: # osquery < 2.1.2 has no statuses status = 0 if key.startswith(INVENTORY_DISTRIBUTED_QUERY_PREFIX): if status == 0 and val: inventory_snapshot.extend(val) else: logger.warning( "Inventory distributed query write with status = %s and val = %s", status, val) else: try: probe_source = ps_d[get_probe_pk(key)] except KeyError: logger.error("Unknown distributed query probe %s", key) else: payload = { 'probe': { 'id': probe_source.pk, 'name': probe_source.name } } if status > 0: # error payload["error"] = True payload["empty"] = True elif status == 0: payload["error"] = False if val: payload["result"] = val payload["empty"] = False else: payload["empty"] = True else: raise ValueError( "Unknown distributed query status '{}'".format( status)) if probe_source.model == 'OsqueryDistributedQueryProbe': dq_payloads.append(payload) else: fc_payloads.append(payload) if dq_payloads: post_distributed_query_result(self.machine_serial_number, self.user_agent, self.ip, dq_payloads) if fc_payloads: post_file_carve_events(self.machine_serial_number, self.user_agent, self.ip, fc_payloads) if inventory_snapshot: self.commit_inventory_query_result(inventory_snapshot) return {}