def store_sd(self, sd): #secdesc = SECURITY_DESCRIPTOR.from_bytes(sd.nTSecurityDescriptor) # if sd.objectClass[-1] in ['user', 'group']: obj_type = sd.objectClass[-1] elif sd.objectClass[-1] == 'computer': obj_type = 'machine' elif sd.objectClass[-1] == 'groupPolicyContainer': obj_type = 'gpo' elif sd.objectClass[-1] == 'organizationalUnit': obj_type = 'ou' else: obj_type = sd.objectClass[-1] jdsd = JackDawSD() jdsd.ad_id = self.ad_id jdsd.guid = str(sd.objectGUID) if sd.objectSid: jdsd.sid = str(sd.objectSid) jdsd.object_type = obj_type jdsd.sd = base64.b64encode(sd.nTSecurityDescriptor) self.session.add(jdsd) if self.sd_ctr % 1000 == 0: self.session.commit() else: self.session.flush()
async def store_sd(self, sd): if sd['adsec'] is None: return jdsd = JackDawSD() jdsd.ad_id = self.ad_id jdsd.guid = sd['guid'] jdsd.sid = sd['sid'] jdsd.object_type = sd['object_type'] jdsd.sd = base64.b64encode(sd['adsec']).decode() jdsd.sd_hash = sha1(sd['adsec']).hexdigest() self.sd_file.write(jdsd.to_json().encode() + b'\r\n')
async def store_sd(self, sd): #secdesc = SECURITY_DESCRIPTOR.from_bytes(sd.nTSecurityDescriptor) # #print(str(sd)) if sd['adsec'] is None: return jdsd = JackDawSD() jdsd.ad_id = self.ad_id jdsd.guid = sd['guid'] jdsd.sid = sd['sid'] jdsd.object_type = sd['object_type'] jdsd.sd = base64.b64encode(sd['adsec']).decode() jdsd.sd_hash = sha1(sd['adsec']).hexdigest() self.sd_file.write(jdsd.to_json().encode() + b'\r\n')
async def stop_sds_collection(self, sds_p): sds_p.disable = True try: self.sd_file.close() cnt = 0 with gzip.GzipFile(self.sd_file_path, 'r') as f: for line in tqdm(f, desc='Uploading security descriptors to DB', total=self.spn_finish_ctr): sd = JackDawSD.from_json(line.strip()) self.session.add(sd) cnt += 1 if cnt % 10000 == 0: self.session.commit() self.session.commit() os.remove(self.sd_file_path) except Exception as e: logger.exception('Error while uploading sds from file to DB')
async def calc_sds_mp(self): await self.log_msg('Calculating SD edges') logger.debug('starting calc_sds_mp') try: cnt = 0 total = self.session.query(func.count(JackDawSD.id)).filter(JackDawSD.ad_id == self.ad_id).scalar() logger.debug('calc_sds_mp total SDs %s' % str(total)) q = self.session.query(JackDawSD).filter_by(ad_id = self.ad_id) if self.progress_queue is not None: msg = GathererProgress() msg.type = GathererProgressType.SDCALC msg.msg_type = MSGTYPE.STARTED msg.adid = self.ad_id msg.domain_name = self.domain_name await self.progress_queue.put(msg) sdcalc_pbar = None if self.show_progress is True: sdcalc_pbar = tqdm(desc ='Writing SD edges to file', total=total, disable=self.disable_tqdm) sdfilename = 'sdcalc.csv' if self.work_dir is not None: sdfilename = str(self.work_dir.joinpath('sdcalc.csv')) testfile = open(sdfilename, 'w+', newline = '') #tempfile.TemporaryFile('w+', newline = '') buffer = [] if self.mp_pool is None: try: self.mp_pool = mp.Pool() except ImportError: self.mp_pool = None logger.debug('calc_sds_mp starting calc') tf = 0 last_stat_cnt = 0 try: for adsd in windowed_query(q, JackDawSD.id, self.buffer_size): tf += 1 adsd = JackDawSD.from_dict(adsd.to_dict()) buffer.append(adsd) if len(buffer) == self.buffer_size: self.calc_sds_batch(buffer, testfile) buffer = [] if sdcalc_pbar is not None: sdcalc_pbar.update(self.buffer_size) if self.progress_queue is not None and tf % self.progress_step_size == 0: last_stat_cnt += self.progress_step_size now = datetime.datetime.utcnow() td = (now - self.progress_last_updated).total_seconds() self.progress_last_updated = now msg = GathererProgress() msg.type = GathererProgressType.SDCALC msg.msg_type = MSGTYPE.PROGRESS msg.adid = self.ad_id msg.domain_name = self.domain_name msg.total = total msg.total_finished = tf if td > 0: msg.speed = str(self.progress_step_size // td) msg.step_size = self.progress_step_size await self.progress_queue.put(msg) await asyncio.sleep(0) if len(buffer) > 0: self.calc_sds_batch(buffer, testfile) if self.progress_queue is not None: now = datetime.datetime.utcnow() td = (now - self.progress_last_updated).total_seconds() self.progress_last_updated = now msg = GathererProgress() msg.type = GathererProgressType.SDCALC msg.msg_type = MSGTYPE.PROGRESS msg.adid = self.ad_id msg.domain_name = self.domain_name msg.total = total msg.total_finished = tf if td > 0: msg.speed = str(len(buffer) // td) msg.step_size = tf - last_stat_cnt await self.progress_queue.put(msg) await asyncio.sleep(0) buffer = [] if self.progress_queue is not None: msg = GathererProgress() msg.type = GathererProgressType.SDCALC msg.msg_type = MSGTYPE.FINISHED msg.adid = self.ad_id msg.domain_name = self.domain_name await self.progress_queue.put(msg) if self.show_progress is True and sdcalc_pbar is not None: sdcalc_pbar.refresh() sdcalc_pbar.disable = True except Exception as e: logger.exception('SD calc exception!') raise e finally: if self.foreign_pool is False and self.mp_pool is not None: self.mp_pool.close() if self.progress_queue is not None: msg = GathererProgress() msg.type = GathererProgressType.SDCALCUPLOAD msg.msg_type = MSGTYPE.STARTED msg.adid = self.ad_id msg.domain_name = self.domain_name await self.progress_queue.put(msg) logger.debug('Writing SD edge file contents to DB') await self.log_msg('Writing SD edge file contents to DB') sdcalcupload_pbar = None if self.show_progress is True: sdcalcupload_pbar = tqdm(desc = 'Writing SD edge file contents to DB', total = cnt, disable=self.disable_tqdm) engine = self.session.get_bind() print(engine) testfile.seek(0,0) last_stat_cnt = 0 i = 0 insert_buffer = [] for line in testfile: i += 1 line = line.strip() src_id, dst_id, label, _ = line.split(',') insert_buffer.append( { "ad_id": self.ad_id, 'graph_id' : self.graph_id, 'src' : int(src_id), 'dst' : int(dst_id), 'label' : label } ) if i % (self.buffer_size*100) == 0: engine.execute(Edge.__table__.insert(), insert_buffer) if self.show_progress is True: sdcalcupload_pbar.update(self.buffer_size*100) insert_buffer = [] if self.progress_queue is not None and i % self.progress_step_size == 0: last_stat_cnt += self.progress_step_size now = datetime.datetime.utcnow() td = (now - self.progress_last_updated).total_seconds() self.progress_last_updated = now msg = GathererProgress() msg.type = GathererProgressType.SDCALCUPLOAD msg.msg_type = MSGTYPE.PROGRESS msg.adid = self.ad_id msg.domain_name = self.domain_name msg.total = self.sd_edges_written msg.total_finished = i if td > 0: msg.speed = str(self.progress_step_size // td) msg.step_size = self.progress_step_size await self.progress_queue.put(msg) await asyncio.sleep(0) if len(insert_buffer) > 0: engine.execute(Edge.__table__.insert(), insert_buffer) if self.show_progress is True: sdcalcupload_pbar.update(len(insert_buffer)) insert_buffer = [] if self.progress_queue is not None: msg = GathererProgress() msg.type = GathererProgressType.SDCALCUPLOAD msg.msg_type = MSGTYPE.FINISHED msg.adid = self.ad_id msg.domain_name = self.domain_name await self.progress_queue.put(msg) self.session.commit() return True, None except Exception as e: logger.exception('sdcalc!') return False, e finally: os.remove(sdfilename)
async def store_file_data(self): try: self.progress_last_updated = datetime.datetime.utcnow() if self.progress_queue is not None: msg = GathererProgress() msg.type = GathererProgressType.SDUPLOAD msg.msg_type = MSGTYPE.STARTED msg.adid = self.ad_id msg.domain_name = self.domain_name await self.progress_queue.put(msg) if self.show_progress is True: self.sd_upload_pbar = tqdm(desc='uploading SD to DB', total=self.total_targets) if self.sd_file is not None: self.sd_file.close() cnt = 0 buffer = [] last_stat_cnt = 0 with gzip.GzipFile(self.sd_file_path, 'r') as f: for line in f: buffer.append(JackDawSD.from_json(line.strip())) await asyncio.sleep(0) cnt += 1 if cnt % 100 == 0: self.session.bulk_save_objects(buffer) buffer = [] if self.show_progress is True: self.sd_upload_pbar.update() if self.progress_queue is not None and cnt % self.progress_step_size == 0: last_stat_cnt += self.progress_step_size now = datetime.datetime.utcnow() td = (now - self.progress_last_updated).total_seconds() self.progress_last_updated = now msg = GathererProgress() msg.type = GathererProgressType.SDUPLOAD msg.msg_type = MSGTYPE.PROGRESS msg.adid = self.ad_id msg.domain_name = self.domain_name msg.total = self.total_targets msg.total_finished = cnt if td > 0: msg.speed = str(self.progress_step_size // td) msg.step_size = self.progress_step_size await self.progress_queue.put(msg) await asyncio.sleep(0) if len(buffer) > 0: self.session.bulk_save_objects(buffer) buffer = [] self.session.commit() if self.progress_queue is not None: now = datetime.datetime.utcnow() td = (now - self.progress_last_updated).total_seconds() self.progress_last_updated = now msg = GathererProgress() msg.type = GathererProgressType.SDUPLOAD msg.msg_type = MSGTYPE.PROGRESS msg.adid = self.ad_id msg.domain_name = self.domain_name msg.total = self.total_targets msg.total_finished = cnt if td > 0: msg.speed = str( (self.total_targets - last_stat_cnt) // td) msg.step_size = self.total_targets - last_stat_cnt await self.progress_queue.put(msg) await asyncio.sleep(0) except Exception as e: logger.exception('Error while uploading sds from file to DB') if self.progress_queue is not None: msg = GathererProgress() msg.type = GathererProgressType.SDUPLOAD msg.msg_type = MSGTYPE.ERROR msg.adid = self.ad_id msg.domain_name = self.domain_name msg.error = e await self.progress_queue.put(msg) finally: try: os.remove(self.sd_file_path) except: pass if self.show_progress is True and self.sd_upload_pbar is not None: self.sd_upload_pbar.refresh() self.sd_upload_pbar.disable = True if self.progress_queue is not None: msg = GathererProgress() msg.type = GathererProgressType.SDUPLOAD msg.msg_type = MSGTYPE.FINISHED msg.adid = self.ad_id msg.domain_name = self.domain_name await self.progress_queue.put(msg)