def export_detection_openioc(family, tlp_level): """ Exports the detection OPENIOC items. TODO: move openioc generation to a new file. """ generated_output = "" for item in family.detection_items: if item.TLP_sensibility <= tlp_level: if item.item_type == DetectionType.OPENIOC: generated_output = "<?xml version=\"1.0\" encoding=\"us-ascii\"?>\n" generated_output += '<ioc xmlns:xsd="http://www.w3.org/2001/XMLSchema" id="polichombr-' + \ str(family.id) + '-' + str(item.id) + '">\n' generated_output += '<short_description>' + family.name + \ ' custom IOC #' + str(item.id) + \ '</short_description>\n' generated_output += '<description>Cutom IOC for ' + \ family.name + ' samples family</description>\n' generated_output += '<tlp_sensibility>' + \ TLPLevel.tostring( item.TLP_sensibility) + '</tlp_sensibility>' generated_output += '<authored_date>Polichombr</authored_date>\n' generated_output += '<links />\n' generated_output += '<definition>\n' generated_output += item.abstract + '\n' generated_output += '</definition>\n</ioc>\n\n\n' return generated_output
def generate_samples_zip_file(family, tlp_level): """ Generates a sample ZIP file. We actually store it in the storage under a unique filename : family-tlp_level-sha256(samples sha256). By doing this we may avoid losing time generating already generated files. TODO: move to temp folder to avoid disk storage overload. """ if TLPLevel.tostring(int(tlp_level)) == "": return None zipname = "" for sample in family.samples: if sample.TLP_sensibility <= tlp_level: zipname += sample.sha256 zip_fname = family.name + "-" + \ str(tlp_level) + "-" + sha256(zipname).hexdigest() zip_fname += ".tar.gz" zip_path = os.path.join(app.config['STORAGE_PATH'], zip_fname) if os.path.exists(zip_path): return zip_path tarf = tarfile.open(zip_path, "w:gz") for x in family.samples: if x.TLP_sensibility <= tlp_level: if os.path.exists(x.storage_file): tarf.add(x.storage_file, arcname=x.sha256) tarf.close() return zip_path
def set_tlp_level(self, family, tlp_level, no_propagation=False): """ Change TLP level. Propagates to other items. """ if TLPLevel.tostring(tlp_level) == "": return False if family.parents: if family.parents.TLP_sensibility > tlp_level: return False family.TLP_sensibility = tlp_level if not no_propagation: for s in family.samples: if s.TLP_sensibility < tlp_level: s.TLP_sensibility = tlp_level for s in family.associated_files: if s.TLP_sensibility < tlp_level: s.TLP_sensibility = tlp_level for s in family.detection_items: if s.TLP_sensibility < tlp_level: s.TLP_sensibility = tlp_level for f in family.subfamilies: if f.TLP_sensibility < tlp_level: self.set_tlp_level(f, tlp_level, no_propagation) db.session.commit() return True
def set_tlp_level(tlp_level, yar): """ Change TLP level. """ if TLPLevel.tostring(tlp_level) is None: return False yar.TLP_sensibility = tlp_level db.session.commit() return True
def create_sample_from_file(self, file_data, orig_filename="", user=None, tlp_level=TLPLevel.TLPWHITE): """ Creates a sample from file data. Updates metadata, etc. """ if TLPLevel.tostring(tlp_level) == "": return None sha_256 = sha256(file_data).hexdigest() sample = None # check if we already had the file or not # If not, we will just update some information if Sample.query.filter_by(sha256=sha_256).count() != 0: sample = Sample.query.filter_by(sha256=sha_256).first() if sample.storage_file is not None and sample.storage_file != "" and os.path.exists( sample.storage_file): return sample # Create if needed if sample is None: sample = Sample() db.session.add(sample) sample.TLP_sensibility = tlp_level sample.family_id = None sample.file_date = datetime.datetime.now() elif sample.file_date is None: sample.file_date = datetime.datetime.now() # Drop file to disk filename = sha_256 + ".bin" file_path = os.path.join(app.config['STORAGE_PATH'], filename) with open(file_path, 'wb') as myfile: myfile.write(file_data) # Generic data sample.analysis_status = AnalysisStatus.TOSTART sample.storage_file = file_path mime_type = self.do_sample_type_detect(file_path) sample.mime_type = mime_type[0] sample.full_mime_type = mime_type[1] sample.md5 = md5(file_data).hexdigest() sample.sha1 = sha1(file_data).hexdigest() sample.sha256 = sha_256 sample.size = len(file_data) # Specific metadata, resulting from Tasks sample.import_hash = "" sample.machoc_hash = "" db.session.commit() if orig_filename != "": self.add_filename(sample, orig_filename) if user is not None: self.add_user(user, sample) return sample
def set_tlp_level(sample, tlp_level): """ Change file's TLP level. """ if TLPLevel.tostring(tlp_level) == "": return False for family in sample.families: if family.TLP_sensibility > tlp_level: return False sample.TLP_sensibility = tlp_level db.session.commit() return True
def export_detection_snort(family, tlp_level): """ Exports the yara detection SNORT rules. """ generated_output = "# SNORT ruleset for family " + family.name + "\n\n" for item in family.detection_items: if item.TLP_sensibility <= tlp_level: if item.item_type == DetectionType.SNORT: generated_output += "# rule internal name: " + item.name + "\n" generated_output += "# rule TLP sensibility: " + \ TLPLevel.tostring(item.TLP_sensibility) + "\n" generated_output += item.abstract + "\n\n" return generated_output
def export_detection_custom(family, tlp_level): """ Exports the yara detection CUSTOM items. """ generated_output = "Custom detection items for family " + family.name + "\n\n" for item in family.detection_items: if item.TLP_sensibility <= tlp_level: if item.item_type == DetectionType.CUSTOM: generated_output += "Name: " + item.name + "\n" generated_output += "TLP sensibility: " + \ TLPLevel.tostring(item.TLP_sensibility) + "\n" generated_output += "Content:\n" + item.abstract + "\n\n" return generated_output
def export_yara_ruleset(family, tlp_level): """ Exports the yara rules. """ generated_output = "/* Polichombr ruleset export */\n/* Family: " + \ family.name + " */\n\n" for yar in family.yaras: if yar.TLP_sensibility <= tlp_level: generated_output += "/* Internal name: " + yar.name + " */\n" generated_output += "/* TLP level: " + \ TLPLevel.tostring(yar.TLP_sensibility) + " */\n" generated_output += "/* Creation date: " + \ str(yar.creation_date) + " */\n" generated_output += yar.raw_rule + "\n\n" return generated_output
def add_file(filedata, filename, description, tlp_level, family): """ Creates an attached file. """ if TLPLevel.tostring(tlp_level) is None: return False storage_file_name = md5(str(int(time.time()))).hexdigest() + ".bin" stored_path = os.path.join(app.config['STORAGE_PATH'], storage_file_name) open(stored_path, 'wb').write(filedata) x = FamilyDataFile() x.filepath = stored_path x.filename = filename x.description = description x.TLP_sensibility = tlp_level family.associated_files.append(x) db.session.add(x) db.session.commit() return True
def add_file(filedata, filename, description, tlp_level, family): """ Creates an attached file. """ if TLPLevel.tostring(tlp_level) is None: return False storage_file_name = md5(str(int(time.time()))).hexdigest() + ".bin" stored_path = os.path.join( app.config['STORAGE_PATH'], storage_file_name) open(stored_path, 'wb').write(filedata) x = FamilyDataFile() x.filepath = stored_path x.filename = filename x.description = description x.TLP_sensibility = tlp_level family.associated_files.append(x) db.session.add(x) db.session.commit() return True
def create_detection_item(abstract, name, tlp_level, item_type, family): """ Creates a detection item. """ if DetectionType.tostring(item_type) == "": return False if TLPLevel.tostring(tlp_level) is None: return False if family.TLP_sensibility > tlp_level: tlp_level = family.TLP_sensibility item = DetectionElement() item.abstract = abstract item.name = name item.TLP_sensibility = tlp_level item.item_type = item_type family.detection_items.append(item) db.session.add(item) db.session.commit() return True
def create(self, name, raw_data, tlp_level): """ Creates a new rule. Checks the rule before insertion, and executes it on any database sample. """ if TLPLevel.tostring(tlp_level) is None: return False if YaraRule.query.filter_by(name=name).count() != 0: return None try: yara.compile(source=raw_data) except Exception as e: app.logger.exception(e) return None yar = YaraRule(name, raw_data, tlp_level) db.session.add(yar) db.session.commit() for s in Sample.query.all(): self.execute_on_sample(s, yar) return yar