コード例 #1
0
    def run(self, obj, config):
        start_offset = config['start']
        end_offset = config['end']
        user = self.current_task.user

        if not user.has_access_to(SampleACL.WRITE):
            self._info("User does not have permission to add Samples to CRITs")
            self._add_result(
                "Service Canceled",
                "User does not have permission to add Samples to CRITs")
            return

        # Start must be 0 or higher. If end is greater than zero it must
        # also be greater than start_offset.
        if start_offset < 0 or (end_offset > 0 and start_offset > end_offset):
            self._error("Invalid offsets.")
            return

        data = obj.filedata.read()[start_offset:end_offset]
        if not data:
            self._error("No data.")
        else:
            filename = hashlib.md5(data).hexdigest()
            handle_file(filename,
                        data,
                        obj.source,
                        related_id=str(obj.id),
                        related_type=str(obj._meta['crits_type']),
                        campaign=obj.campaign,
                        source_method=self.name,
                        relationship=RelationshipTypes.CONTAINS,
                        user=self.current_task.user)
            # Filename is just the md5 of the data...
            self._add_result("file_added", filename, {'md5': filename})
        return
コード例 #2
0
ファイル: __init__.py プロジェクト: TheDr1ver/crits_services
    def run(self, obj, config):
        try:
            self._debug("Version: %s" % pefile.__version__ )
            pe = pefile.PE(data=obj.filedata.read())
        except pefile.PEFormatError as e:
            self._error("A PEFormatError occurred: %s" % e)
            return
        self._get_sections(pe)
        self._get_pehash(pe)

        if hasattr(pe, 'DIRECTORY_ENTRY_RESOURCE'):
            self._dump_resource_data("ROOT",
                                     pe.DIRECTORY_ENTRY_RESOURCE,
                                     pe,
                                     config['resource'])
            for f in self.added_files:
                handle_file(f[0], f[1], obj.source,
                            related_id=str(obj.id),
                            related_type=str(obj._meta['crits_type']),
                            campaign=obj.campaign,
                            method=self.name,
                            relationship=RelationshipTypes.CONTAINED_WITHIN,
                            user=self.current_task.username)
                rsrc_md5 = hashlib.md5(f[1]).hexdigest()
                self._add_result("file_added", f[0], {'md5': rsrc_md5})
        else:
            self._debug("No resources")

        if hasattr(pe, 'DIRECTORY_ENTRY_IMPORT'):
            self._get_imports(pe)
        else:
            self._debug("No imports")

        if hasattr(pe, 'DIRECTORY_ENTRY_EXPORT'):
            self._get_exports(pe)
        else:
            self._debug("No exports")

        if hasattr(pe, 'VS_VERSIONINFO'):
            self._get_version_info(pe)
        else:
            self._debug("No Version information")

        if hasattr(pe, 'DIRECTORY_ENTRY_DEBUG'):
            self._get_debug_info(pe)
        else:
            self._debug("No debug info")

        if hasattr(pe, 'DIRECTORY_ENTRY_TLS'):
            self._get_tls_info(pe)
        else:
            self._debug("No TLS info")

        if callable(getattr(pe, 'get_imphash', None)):
            self._get_imphash(pe)
        else:
            self._debug("pefile does not support get_imphash, upgrade to 1.2.10-139")

        self._get_timestamp(pe)
        self._get_rich_header(pe)
コード例 #3
0
ファイル: __init__.py プロジェクト: apolkosnik/crits_services
    def run(self, obj, config):
        start_offset = config['start']
        end_offset = config['end']
        user = self.current_task.user

        if not user.has_access_to(SampleACL.WRITE):
            self._info("User does not have permission to add Samples to CRITs")
            self._add_result("Service Canceled", "User does not have permission to add Samples to CRITs")
            return

        # Start must be 0 or higher. If end is greater than zero it must
        # also be greater than start_offset.
        if start_offset < 0 or (end_offset > 0 and start_offset > end_offset):
            self._error("Invalid offsets.")
            return


        data = obj.filedata.read()[start_offset:end_offset]
        if not data:
            self._error("No data.")
        else:
            filename = hashlib.md5(data).hexdigest()
            handle_file(filename, data, obj.source,
                        related_id=str(obj.id),
                        related_type=str(obj._meta['crits_type']),
                        campaign=obj.campaign,
                        source_method=self.name,
                        relationship=RelationshipTypes.CONTAINS,
                        user=self.current_task.user)
            # Filename is just the md5 of the data...
            self._add_result("file_added", filename, {'md5': filename})
        return
コード例 #4
0
    def run(self, obj, config):
        start_offset = config['start']
        end_offset = config['end']
        # Start must be 0 or higher. If end is greater than zero it must
        # also be greater than start_offset.
        if start_offset < 0 or (end_offset > 0 and start_offset > end_offset):
            self._error("Invalid offsets.")
            return

        data = obj.filedata.read()[start_offset:end_offset]
        if not data:
            self._error("No data.")
        else:
            filename = hashlib.md5(data).hexdigest()
            handle_file(filename,
                        data,
                        obj.source,
                        related_id=str(obj.id),
                        related_type=str(obj._meta['crits_type']),
                        campaign=obj.campaign,
                        method=self.name,
                        relationship=RelationshipTypes.CONTAINS,
                        user=self.current_task.username)
            # Filename is just the md5 of the data...
            self._add_result("file_added", filename, {'md5': filename})
        return
コード例 #5
0
    def run(self, obj, config):
        self.config = config
        self.obj = obj
        data = io.BytesIO(obj.filedata.read())
        swf = bytearray()
        try:
            comp = data.read(3)
            header = data.read(5)
            if comp == 'CWS':
                swf = 'FWS' + header + zlib.decompress(data.read())
            if comp == 'ZWS':
                data.seek(12) # seek to LZMA props
                swf = 'FWS' + header + pylzma.decompress(data.read())
        except Exception as exc:
                self._error("unswf: (%s)." % exc)
                return

        if swf:
            h = md5(str(swf)).hexdigest()
            name = h
            self._info("New file: %s (%d bytes, %s)" % (name, len(swf), h))
            handle_file(name, swf, self.obj.source,
                related_id=str(self.obj.id),
                campaign=self.obj.campaign,
                method=self.name,
                relationship='Related_To',
                user=self.current_task.username)
            self._add_result("file_added", name, {'md5': h})
コード例 #6
0
    def _process_dropped(self, dropped):
        # Dropped is a byte string of the .tar.bz2 file
        self._debug("Processing dropped files.")
        self._notify()

        # TODO: Error handling
        t = tarfile.open(mode='r:bz2', fileobj=BytesIO(dropped))

        ignored = self.config.get('ignored_files', '').split('\r\n')
        for f in t.getmembers():
            if not f.isfile():
                continue

            data = t.extractfile(f).read()
            name = os.path.basename(f.name)
            if any([fnmatch.fnmatch(name, x) for x in ignored]):
                self._debug("Ignoring file: %s" % name)
                continue

            h = md5(data).hexdigest()
            self._info("New file: %s (%d bytes, %s)" % (name, len(data), h))
            handle_file(name,
                        data,
                        self.obj.source,
                        related_id=str(self.obj.id),
                        related_type=str(self.obj._meta['crits_type']),
                        campaign=self.obj.campaign,
                        method=self.name,
                        relationship=RelationshipTypes.RELATED_TO,
                        user=self.current_task.username)
            self._add_result("file_added", name, {'md5': h})

        t.close()
コード例 #7
0
    def run(self, obj, config):
        self.config = config
        self.obj = obj
        user = self.current_task.user
        data = io.BytesIO(obj.filedata.read())
        swf = bytearray()
        try:
            comp = data.read(3)
            header = data.read(5)
            if comp == 'CWS':
                swf = 'FWS' + header + zlib.decompress(data.read())
            if comp == 'ZWS':
                data.seek(12) # seek to LZMA props
                swf = 'FWS' + header + pylzma.decompress(data.read())
        except Exception as exc:
                self._error("unswf: (%s)." % exc)
                return

        if swf:
            h = md5(str(swf)).hexdigest()
            name = h
            if not user.has_access_to(SampleACL.WRITE):
                self._info("User does not have permission to add Samples to CRITs")
                self._add_result("Extract Canceled", "User does not have permission to add Samples to CRITs")
                return
                
            self._info("New file: %s (%d bytes, %s)" % (name, len(swf), h))
            handle_file(name, swf, self.obj.source,
                related_id=str(self.obj.id),
                related_type=str(self.obj._meta['crits_type']),
                campaign=self.obj.campaign,
                method=self.name,
                relationship=RelationshipTypes.RELATED_TO,
                user=self.current_task.user)
            self._add_result("file_added", name, {'md5': h})
コード例 #8
0
ファイル: __init__.py プロジェクト: TheDr1ver/crits_services
    def run(self, obj, config):
        self.config = config
        self.obj = obj
        data = io.BytesIO(obj.filedata.read())
        swf = bytearray()
        try:
            comp = data.read(3)
            header = data.read(5)
            if comp == 'CWS':
                swf = 'FWS' + header + zlib.decompress(data.read())
            if comp == 'ZWS':
                data.seek(12) # seek to LZMA props
                swf = 'FWS' + header + pylzma.decompress(data.read())
        except Exception as exc:
                self._error("unswf: (%s)." % exc)
                return

        if swf:
            h = md5(str(swf)).hexdigest()
            name = h
            self._info("New file: %s (%d bytes, %s)" % (name, len(swf), h))
            handle_file(name, swf, self.obj.source,
                related_id=str(self.obj.id),
                related_type=str(self.obj._meta['crits_type']),
                campaign=self.obj.campaign,
                method=self.name,
                relationship=RelationshipTypes.RELATED_TO,
                user=self.current_task.username)
            self._add_result("file_added", name, {'md5': h})
コード例 #9
0
ファイル: __init__.py プロジェクト: optionstvm/crits_services
    def _process_dropped(self, dropped):
        # Dropped is a byte string of the .tar.bz2 file
        self._debug("Processing dropped files.")
        self._notify()

        # TODO: Error handling
        t = tarfile.open(mode='r:bz2', fileobj=StringIO(dropped))

        ignored = self.config.get('ignored_files', '').split('\r\n')
        for f in t.getmembers():
            if not f.isfile():
                continue

            data = t.extractfile(f).read()
            name = os.path.basename(f.name)
            if any([fnmatch.fnmatch(name, x) for x in ignored]):
                self._debug("Ignoring file: %s" % name)
                continue

            h = md5(data).hexdigest()
            self._info("New file: %s (%d bytes, %s)" % (name, len(data), h))
            handle_file(name, data, self.obj.source,
                        parent_id=str(self.obj.id),
                        campaign=self.obj.campaign,
                        method=self.name,
                        relationship='Related_To',
                        user=self.current_task.username)
            self._add_result("file_added", name, {'md5': h})

        t.close()
コード例 #10
0
    def run(self, obj, config):
        try:
            pe = pefile.PE(data=obj.filedata.read())
        except pefile.PEFormatError as e:
            self._error("A PEFormatError occurred: %s" % e)
            return
        self._get_sections(pe)
        self._get_pehash(pe)

        if hasattr(pe, 'DIRECTORY_ENTRY_RESOURCE'):
            self._dump_resource_data("ROOT", pe.DIRECTORY_ENTRY_RESOURCE, pe,
                                     config['resource'])
            for f in self.added_files:
                handle_file(f[0],
                            f[1],
                            obj.source,
                            related_id=str(obj.id),
                            campaign=obj.campaign,
                            method=self.name,
                            relationship='Extracted_From',
                            user=self.current_task.username)
                rsrc_md5 = hashlib.md5(f[1]).hexdigest()
                self._add_result("file_added", f[0], {'md5': rsrc_md5})
        else:
            self._debug("No resources")

        if hasattr(pe, 'DIRECTORY_ENTRY_IMPORT'):
            self._get_imports(pe)
        else:
            self._debug("No imports")

        if hasattr(pe, 'DIRECTORY_ENTRY_EXPORT'):
            self._get_exports(pe)
        else:
            self._debug("No exports")

        if hasattr(pe, 'VS_VERSIONINFO'):
            self._get_version_info(pe)
        else:
            self._debug("No Version information")

        if hasattr(pe, 'DIRECTORY_ENTRY_DEBUG'):
            self._get_debug_info(pe)
        else:
            self._debug("No debug info")

        if hasattr(pe, 'DIRECTORY_ENTRY_TLS'):
            self._get_tls_info(pe)
        else:
            self._debug("No TLS info")

        if callable(getattr(pe, 'get_imphash', None)):
            self._get_imphash(pe)
        else:
            self._debug(
                "pefile does not support get_imphash, upgrade to 1.2.10-139")

        self._get_timestamp(pe)
        self._get_rich_header(pe)
コード例 #11
0
ファイル: __init__.py プロジェクト: apolkosnik/crits_services
    def run(self, obj, config):
        """
        Being plugin processing
        """
        #Load data from file as libchm will only accept a filename
        with self._write_to_file() as chm_file:
            try:
                self.chmparse.LoadCHM(chm_file)
            except Exception as e:
                raise e

        #Conduct analysis
        result = self.analyze()

        #Handle output of results
        if 'obj_items_summary' in result.keys():
            obj_items_summary = result.pop('obj_items_summary')

        #General CHM info
        for key, value in result.items():
            self._add_result('chm_overview', key, {'value': value})

        if config['chm_items']:
            #Data and details of each object/page in the CHM
            user = self.current_task.user
            if user.has_access_to(SampleACL.WRITE):
                for f in self.added_files:
                    handle_file(f[0], f[3], obj.source,
                                related_id=str(obj.id),
                                related_type=str(obj._meta['crits_type']),
                                campaign=obj.campaign,
                                source_method=self.name,
                                relationship=RelationshipTypes.CONTAINED_WITHIN,
                                user=self.current_task.user)
                    self._add_result("chm_items_added", f[0], {'size': f[1],'md5': f[2]})
            else:
                self._info("User does not have permission to add samples to CRITs.")
                self._add_result("chm_items_added","Items found but user does not have permission to add Samples to CRITs.")
        else:
            #Details of each object/page in the CHM
            for object_item in obj_items_summary:
                self._add_result('chm_items', object_item.get('name'),
                            {'size': object_item.get('size'),
                            'md5': object_item.get('md5')})

        #Detection results from CHM analysis
        for object_item in obj_items_summary:
            if object_item.get('detection'):
                for detection in object_item.get('detection'):
                    self._add_result('chm_detection', detection, {'chm_item': object_item.get('name')})

        #URLs and IPs found in CHM
        for object_item in obj_items_summary:
            if object_item.get('urls'):
                for url in object_item.get('urls'):
                    self._add_result('chm_urls', url, {'chm_item': object_item.get('name')})

        #Close file in memory
        self.chmparse.CloseCHM()
コード例 #12
0
ファイル: __init__.py プロジェクト: svr6/crits_services
    def run(self, obj, config):
        self.config = config
        self.obj = obj
        # self._doit(obj.filedata.read()[start_offset:end_offset], filters, analyze )
        # self._add_result('SEPLQ', "" % output, {'Value': output})
        datacq = bytearray(obj.filedata.read())
        (metaoutcsv, data) = ExtractPayloads(datacq)
        h = md5(data).hexdigest()
        name = h
        metaout = metaoutcsv.split(",")
        name = ntpath.basename(str(metaout[0]))
        fields = (
            "Filename",
            "Num Failed Remediations",
            "Num Legacy Infections",
            "Num Remediations",
            "Num Snapshots",
            "Record Type",
            "Remediations Type",
            "Restore To Orig Locn Unavailable",
            "Session Item Count",
            "Session Item Index",
            "Structure Version",
            "Extra Info (QF Time)",
            "Extra Info (SND Time)",
            "Extra Info (Unique ID",
            "Extra Info (VBin Time)",
            "Flags",
            "Full Path and LFN",
            "Log Line",
            "Record ID",
            "Size",
            "Storage Instance ID",
            "Storage Key",
            "Storage Name",
            "WDescription",
            "Timestamp (in local time)",
        )

        self._info("name: %s" % name)
        n = 0
        for i in metaout:
            if i and i != 0 and i != "0" and i != "":
                self._info("meta: %s" % str(i))
                self._add_result("SEPLQ", str(i))
            n += 1
        self._info("New file: %s (%d bytes, %s)" % (name, len(data), h))
        handle_file(
            name,
            io.BytesIO(data).read(),
            self.obj.source,
            related_id=str(obj.id),
            campaign=obj.campaign,
            method=self.name,
            relationship=RelationshipTypes.RELATED_TO,
            user=self.current_task.username,
        )
        self._add_result("file_added", name, {"md5": h})
コード例 #13
0
    def run(self, obj, config):
        self.config = config
        self.obj = obj
        #self._doit(obj.filedata.read()[start_offset:end_offset], filters, analyze )
        #self._add_result('SEPLQ', "" % output, {'Value': output})
        datacq = bytearray(obj.filedata.read())
        (metaoutcsv, data) = ExtractPayloads(datacq)
        h = md5(data).hexdigest()
        name = h
        metaout = metaoutcsv.split(",")
        name = ntpath.basename(str(metaout[0]))
        fields = (
        "Filename",
	"Num Failed Remediations",
	"Num Legacy Infections",
	"Num Remediations",
	"Num Snapshots",
	"Record Type",
	"Remediations Type",
	"Restore To Orig Locn Unavailable",
	"Session Item Count",
	"Session Item Index",
	"Structure Version",
	"Extra Info (QF Time)",
	"Extra Info (SND Time)",
	"Extra Info (Unique ID",
	"Extra Info (VBin Time)",
	"Flags",
	"Full Path and LFN",
	"Log Line",
	"Record ID",
	"Size",
	"Storage Instance ID",
	"Storage Key",
	"Storage Name",
	"WDescription",
	"Timestamp (in local time)"
	)



        self._info("name: %s" % name )
        n = 0
        for i in metaout:
            if i and i != 0 and i != "0" and i != "":
                self._info("meta: %s" % str(i))
                self._add_result('SEPLQ', str(i))
            n+=1
        self._info("New file: %s (%d bytes, %s)" % (name, len(data), h))
        handle_file(name, io.BytesIO(data).read(), self.obj.source,
                related_id=str(obj.id),
                related_type=str(obj._meta['crits_type']),
                campaign=obj.campaign,
                method=self.name,
                relationship=RelationshipTypes.RELATED_TO,
                user=self.current_task.username)
        self._add_result("file_added", name, {'md5': h})
コード例 #14
0
ファイル: __init__.py プロジェクト: sysnap2012/crits_services
    def run(self, obj, config):
        upx_path = config.get("upx_path", "")

        user = self.current_task.user
        if not user.has_access_to(SampleACL.WRITE):
            self._info("User does not have permission to add Samples to CRITs")
            self._add_result(
                "Unpacking Canceled",
                "User does not have permission to add Samples to CRITs")
            return

        # _write_to_file() will delete this file at the end of the "with" block.
        with self._write_to_file() as tmp_file:
            (working_dir, filename) = os.path.split(tmp_file)
            args = [upx_path, "-q", "-d", filename]

            # UPX does not generate a lot of output, so we should not have to
            # worry about this hanging because the buffer is full
            proc = subprocess.Popen(args,
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.STDOUT,
                                    cwd=working_dir)

            # Note that we are redirecting STDERR to STDOUT, so we can ignore
            # the second element of the tuple returned by communicate().
            output = proc.communicate()[0]
            self._debug(output)

            if proc.returncode:
                # UPX return code of 1 indicates an error.
                # UPX return code of 2 indicates a warning (usually, the
                # file was not packed by UPX).
                msg = ("UPX could not unpack the file.")
                self._warning(msg)
                return

            with open(tmp_file, "rb") as newfile:
                data = newfile.read()

            #TODO: check to make sure file was modified (new MD5), indicating
            # it was actually unpacked
            md5 = hashlib.md5(data).hexdigest()
            filename = md5 + ".upx"
            handle_file(filename,
                        data,
                        obj.source,
                        related_id=str(obj.id),
                        related_type=str(obj._meta['crits_type']),
                        campaign=obj.campaign,
                        method=self.name,
                        relationship=RelationshipTypes.PACKED_FROM,
                        user=self.current_task.user)
            # Filename is just the md5 of the data...
            self._add_result("file_added", filename, {'md5': filename})
コード例 #15
0
ファイル: __init__.py プロジェクト: sysnap2012/crits_services
 def run(self, obj, config):
     oparser = OfficeParser(obj.filedata.read())
     oparser.parse_office_doc()
     added_files = []
     user = self.current_task.user
     if not oparser.office_header.get('maj_ver'):
         self._error("Could not parse file as an office document")
         return
     self._add_result(
         'office_header', '%d.%d' % (oparser.office_header.get('maj_ver'),
                                     oparser.office_header.get('min_ver')))
     for curr_dir in oparser.directory:
         result = {
             'md5':
             curr_dir.get('md5', ''),
             'size':
             curr_dir.get('stream_size', 0),
             'mod_time':
             oparser.timestamp_string(curr_dir['modify_time'])[1],
             'create_time':
             oparser.timestamp_string(curr_dir['create_time'])[1],
         }
         name = curr_dir['norm_name'].decode('ascii', errors='ignore')
         self._add_result('directory', name, result)
         if user.has_access_to(SampleACL.WRITE) and config.get(
                 'save_streams', 0) == 1 and 'data' in curr_dir:
             handle_file(name,
                         curr_dir['data'],
                         obj.source,
                         related_id=str(obj.id),
                         related_type=str(obj._meta['crits_type']),
                         campaign=obj.campaign,
                         source_method=self.name,
                         relationship=RelationshipTypes.CONTAINED_WITHIN,
                         user=self.current_task.user)
             stream_md5 = hashlib.md5(curr_dir['data']).hexdigest()
             added_files.append((name, stream_md5))
     for prop_list in oparser.properties:
         for prop in prop_list['property_list']:
             prop_summary = oparser.summary_mapping.get(
                 binascii.unhexlify(prop['clsid']), None)
             prop_name = prop_summary.get('name', 'Unknown')
             for item in prop['properties']['properties']:
                 result = {
                     'name': item.get('name', 'Unknown'),
                     'value': item.get('date', item['value']),
                     'result': item.get('result', ''),
                 }
                 self._add_result('doc_meta', prop_name, result)
     for f in added_files:
         self._add_result("file_added", f[0], {'md5': f[1]})
コード例 #16
0
    def process_dropped_binaries(self, dropped_binaries, incident_report):
        archive = zipfile.ZipFile(io.BytesIO(dropped_binaries))
        archive.setpassword("infected")
        names = archive.namelist()

        for name in names:
            binary = archive.read(name)
            ret = handle_file(name, binary, self.obj.source,
                related_id=str(self.obj.id),
                related_type=str(self.obj._meta['crits_type']),
                campaign=self.obj.campaign,
                source_method=self.name,
                relationship=RelationshipTypes.DROPPED_BY,
                user=self.current_task.user,
                is_return_only_md5=False)

            if ret['success']:
                md5 = hashlib.md5(binary).hexdigest()
                files_in_report = [f for f in incident_report.findall("./dropped/file/md5/..")
                                              if f.find('md5').text == md5.upper()]

                for report_file in files_in_report:
                    data = {
                        'md5': md5,
                        'malicious': getattr(report_file.find('malicious'), 'text', 'unknown'),
                    }
                    filepath = getattr(report_file.find('name'), 'text', name)

                    self._add_result("Dropped Files", filepath, data)
            else:
                self._warning(ret["message"])

        self._notify()
コード例 #17
0
ファイル: handlers.py プロジェクト: theevilbit/crits_services
def import_object(request, type_, id_):
    setup_access()
    if type_ == "Threat Descriptors":
        obj = ThreatDescriptor(id=id_)
        obj.details(
            fields=[f for f in ThreatDescriptor._default_fields if f not in
                    (td.PRIVACY_MEMBERS, td.SUBMITTER_COUNT, td.METADATA)]
        )
        itype = getattr(IndicatorTypes, obj.get(td.TYPE))
        ithreat_type = getattr(IndicatorThreatTypes, obj.get(td.THREAT_TYPE))
        results = handle_indicator_ind(
            obj.get(td.RAW_INDICATOR),
            "ThreatExchange",
            itype,
            ithreat_type,
            None,
            request.user.username,
            method="ThreatExchange Service",
            reference="id: %s, owner: %s, share_level: %s" % (obj.get(td.ID),
                                                              obj.get(td.OWNER)['name'],
                                                              obj.get(td.SHARE_LEVEL)),
            add_domain=True,
            add_relationship=True,
            confidence=build_ci(obj.get(td.CONFIDENCE)),
            description=obj.get(td.DESCRIPTION)
        )
        return results
    elif type_ == "Malware Analyses":
        obj = Malware(id=id_)
        obj.details(
            fields=[f for f in Malware._fields if f not in
                    (td.PRIVACY_MEMBERS, td.METADATA)]
        )
        filename = obj.get(m.MD5)
        try:
            data = obj.rf
        except:
            data = None
        results = handle_file(
            filename,
            data,
            "ThreatExchange",
            method="ThreatExchange Service",
            reference="id: %s, share_level: %s" % (obj.get(td.ID),
                                                   obj.get(td.SHARE_LEVEL)),
            user=request.user.username,
            md5_digest = obj.get(m.MD5),
            sha1_digest = obj.get(m.SHA1),
            sha256_digest = obj.get(m.SHA256),
            size = obj.get(m.SAMPLE_SIZE),
            mimetype = obj.get(m.SAMPLE_TYPE),
        )
        return {'success': True,
                'md5': results}
    else:
        return {'success': False,
                'message': "Invalid Type"}
    return {'success': True}
コード例 #18
0
def import_object(request, type_, id_):
    setup_access()
    if type_ == "Threat Descriptors":
        obj = ThreatDescriptor(id=id_)
        obj.details(fields=[
            f for f in ThreatDescriptor._default_fields
            if f not in (td.PRIVACY_MEMBERS, td.METADATA)
        ])
        itype = get_mapped_itype(obj.get(td.TYPE))
        if itype is None:
            return {
                'success': False,
                'message': "Descriptor type is not supported by CRITs"
            }
        ithreat_type = getattr(IndicatorThreatTypes, obj.get(td.THREAT_TYPE))
        results = handle_indicator_ind(
            obj.get(td.RAW_INDICATOR),
            "ThreatExchange",
            itype,
            ithreat_type,
            None,
            request.user.username,
            method="ThreatExchange Service",
            reference="id: %s, owner: %s, share_level: %s" % (obj.get(
                td.ID), obj.get(td.OWNER)['name'], obj.get(td.SHARE_LEVEL)),
            add_domain=True,
            add_relationship=True,
            confidence=build_ci(obj.get(td.CONFIDENCE)),
            description=obj.get(td.DESCRIPTION))
        return results
    elif type_ == "Malware Analyses":
        obj = Malware(id=id_)
        obj.details(
            fields=[f for f in Malware._fields if f not in (m.METADATA)])
        filename = obj.get(m.MD5)
        try:
            data = obj.rf
        except:
            data = None
        results = handle_file(
            filename,
            data,
            "ThreatExchange",
            method="ThreatExchange Service",
            reference="id: %s, share_level: %s" %
            (obj.get(td.ID), obj.get(td.SHARE_LEVEL)),
            user=request.user.username,
            md5_digest=obj.get(m.MD5),
            sha1_digest=obj.get(m.SHA1),
            sha256_digest=obj.get(m.SHA256),
            size=obj.get(m.SAMPLE_SIZE),
            mimetype=obj.get(m.SAMPLE_TYPE),
        )
        return {'success': True, 'md5': results}
    else:
        return {'success': False, 'message': "Invalid Type"}
    return {'success': True}
コード例 #19
0
ファイル: __init__.py プロジェクト: apolkosnik/crits_services
 def run(self, obj, config):
     oparser = OfficeParser(obj.filedata.read())
     oparser.parse_office_doc()
     added_files = []
     user = self.current_task.user
     if not oparser.office_header.get('maj_ver'):
         self._error("Could not parse file as an office document")
         return
     self._add_result('office_header', '%d.%d' %
         (oparser.office_header.get('maj_ver'), oparser.office_header.get('min_ver')))
     for curr_dir in oparser.directory:
         result = {
             'md5':          curr_dir.get('md5', ''),
             'size':         curr_dir.get('stream_size', 0),
             'mod_time':     oparser.timestamp_string(curr_dir['modify_time'])[1],
             'create_time':  oparser.timestamp_string(curr_dir['create_time'])[1],
         }
         name = curr_dir['norm_name'].decode('ascii', errors='ignore')
         self._add_result('directory', name, result)
         if user.has_access_to(SampleACL.WRITE) and config.get('save_streams', 0) == 1 and 'data' in curr_dir:
             handle_file(name, curr_dir['data'], obj.source,
                         related_id=str(obj.id),
                         related_type=str(obj._meta['crits_type']),
                         campaign=obj.campaign,
                         source_method=self.name,
                         relationship=RelationshipTypes.CONTAINED_WITHIN,
                         user=self.current_task.user)
             stream_md5 = hashlib.md5(curr_dir['data']).hexdigest()
             added_files.append((name, stream_md5))
     for prop_list in oparser.properties:
         for prop in prop_list['property_list']:
             prop_summary = oparser.summary_mapping.get(binascii.unhexlify(prop['clsid']), None)
             prop_name = prop_summary.get('name', 'Unknown')
             for item in prop['properties']['properties']:
                 result = {
                     'name':             item.get('name', 'Unknown'),
                     'value':            item.get('date', item['value']),
                     'result':           item.get('result', ''),
                 }
                 self._add_result('doc_meta', prop_name, result)
     for f in added_files:
         self._add_result("file_added", f[0], {'md5': f[1]})
コード例 #20
0
ファイル: __init__.py プロジェクト: TheDr1ver/crits_services
    def run(self, obj, config):
        upx_path = config.get("upx_path", "")

        # _write_to_file() will delete this file at the end of the "with" block.
        with self._write_to_file() as tmp_file:
            (working_dir, filename) = os.path.split(tmp_file)
            args = [upx_path, "-q", "-d", filename]

            # UPX does not generate a lot of output, so we should not have to
            # worry about this hanging because the buffer is full
            proc = subprocess.Popen(args, stdout=subprocess.PIPE,
                                    stderr=subprocess.STDOUT, cwd=working_dir)

            # Note that we are redirecting STDERR to STDOUT, so we can ignore
            # the second element of the tuple returned by communicate().
            output = proc.communicate()[0]
            self._debug(output)

            if proc.returncode:
                # UPX return code of 1 indicates an error.
                # UPX return code of 2 indicates a warning (usually, the
                # file was not packed by UPX).
                msg = ("UPX could not unpack the file.")
                self._warning(msg)
                return

            with open(tmp_file, "rb") as newfile:
                data = newfile.read()

            #TODO: check to make sure file was modified (new MD5), indicating
            # it was actually unpacked
            md5 = hashlib.md5(data).hexdigest()
            filename = md5 + ".upx"
            handle_file(filename, data, obj.source,
                        related_id=str(obj.id),
                        related_type=str(obj._meta['crits_type']),
                        campaign=obj.campaign,
                        method=self.name,
                        relationship=RelationshipTypes.PACKED_FROM,
                        user=self.current_task.username)
            # Filename is just the md5 of the data...
            self._add_result("file_added", filename, {'md5': filename})
コード例 #21
0
ファイル: __init__.py プロジェクト: apolkosnik/crits_services
    def _process_dropped(self, dropped):
        # Dropped is a byte string of the .tar.bz2 file
        self._debug("Processing dropped files.")
        self._notify()
        user = get_user_info(str(self.current_task.user))
        if not user.has_access_to(SampleACL.WRITE):
            self._info("User does not have permission to add samples to CRITs")
            self._add_result("Processing Dropped Files Cancelled", "User does not have permission to add Samples to CRITs")
            return


        # TODO: Error handling
        t = tarfile.open(mode='r:bz2', fileobj=BytesIO(dropped))

        ignored = self.config.get('ignored_files', '').split('\r\n')
        for f in t.getmembers():
            if not f.isfile():
                continue

            data = t.extractfile(f).read()
            name = os.path.basename(f.name)
            if any([fnmatch.fnmatch(name, x) for x in ignored]):
                self._debug("Ignoring file: %s" % name)
                continue

            h = md5(data).hexdigest()
            self._info("New file: %s (%d bytes, %s)" % (name, len(data), h))

            handle_file(name, data, self.obj.source,
                        related_id=str(self.obj.id),
                        related_type=str(self.obj._meta['crits_type']),
                        campaign=self.obj.campaign,
                        source_method=self.name,
                        relationship=RelationshipTypes.RELATED_TO,
                        user=self.current_task.user)
            self._add_result("file_added", name, {'md5': h})

        t.close()
コード例 #22
0
ファイル: __init__.py プロジェクト: bushalo/crits_services
    def run(self, obj, config):
        start_offset = config['start']
        end_offset = config['end']
        # Start must be 0 or higher. If end is greater than zero it must
        # also be greater than start_offset.
        if start_offset < 0 or (end_offset > 0 and start_offset > end_offset):
            self._error("Invalid offsets.")
            return

        data = obj.filedata.read()[start_offset:end_offset]
        if not data:
            self._error("No data.")
        else:
            filename = hashlib.md5(data).hexdigest()
            handle_file(filename, data, obj.source,
                        related_id=str(obj.id),
                        campaign=obj.campaign,
                        method=self.name,
                        relationship=RelationshipTypes.CONTAINS,
                        user=self.current_task.username)
            # Filename is just the md5 of the data...
            self._add_result("file_added", filename, {'md5': filename})
        return
コード例 #23
0
ファイル: parsers.py プロジェクト: bushalo/crits_services
    def parse_cybox_object(self, cbx_obj, description='', ind_id=None):
        """
        Parse a CybOX object form a STIX doc. An object can contain
        multiple related_objects, which in turn can have their own
        related_objects, so this handles those recursively.

        :param cbx_obj: The CybOX object to parse.
        :type cbx_obj: A CybOX object.
        :param description: Parent-level (e.g. Observable) description.
        :type description: str
        :param ind_id: The ID of a parent STIX Indicator.
        :type ind_id: str
        """

        # check for missing attributes
        if not cbx_obj or not cbx_obj.properties:
            if cbx_obj.idref: # just a reference, so nothing to parse
                return
            else:
                cbx_id = getattr(cbx_obj, 'id_', 'None')
                self.failed.append(("No valid object_properties was found!",
                                    "Observable (%s)" % cbx_id,
                                    cbx_id)) # note for display in UI
                return

        # Don't parse if already been parsed
        # This is for artifacts that are related to CybOX File Objects
        if cbx_obj.id_ in self.parsed:
            return

        try: # try to create CRITs object from Cybox Object
            analyst = self.source_instance.analyst
            item = cbx_obj.properties
            val = cbx_obj.id_
            if isinstance(item, Address) and not ind_id:
                if item.category in ('cidr', 'ipv4-addr', 'ipv4-net',
                                     'ipv4-netmask', 'ipv6-addr',
                                     'ipv6-net', 'ipv6-netmask'):
                    imp_type = "IP"
                    for value in item.address_value.values:
                        val = str(value).strip()
                        if self.preview:
                            res = None
                        else:
                            iptype = get_crits_ip_type(item.category)
                            if iptype:
                                res = ip_add_update(val,
                                                    iptype,
                                                    [self.source],
                                                    analyst=analyst,
                                                    is_add_indicator=True)
                            else:
                                res = {'success': False, 'reason': 'No IP Type'}
                        self.parse_res(imp_type, val, cbx_obj, res, ind_id)
            if (not ind_id and (isinstance(item, DomainName) or
                (isinstance(item, URI) and item.type_ == 'Domain Name'))):
                imp_type = "Domain"
                for val in item.value.values:
                    if self.preview:
                        res = None
                    else:
                        res = upsert_domain(str(val),
                                            [self.source],
                                            username=analyst)
                    self.parse_res(imp_type, str(val), cbx_obj, res, ind_id)

            elif isinstance(item, HTTPSession):
                imp_type = "RawData"
                val = cbx_obj.id_
                try:
                    c_req = item.http_request_response[0].http_client_request
                    hdr = c_req.http_request_header
                    if hdr.raw_header:
                        data = hdr.raw_header.value
                        title = "HTTP Header from STIX: %s" % self.package.id_
                        method = self.source_instance.method
                        ref = self.source_instance.reference
                        if self.preview:
                            res = None
                            val = title
                        else:
                            res = handle_raw_data_file(data,
                                                    self.source.name,
                                                    user=analyst,
                                                    description=description,
                                                    title=title,
                                                    data_type="HTTP Header",
                                                    tool_name="STIX",
                                                    tool_version=None,
                                                    method=method,
                                                    reference=ref)
                    else:
                        imp_type = "Indicator"
                        ind_type = "HTTP Request Header Fields - User-Agent"
                        val = hdr.parsed_header.user_agent.value
                        val = ','.join(val) if isinstance(val, list) else val
                        if self.preview:
                            res = None
                        else:
                            res = handle_indicator_ind(val,
                                                    self.source,
                                                    ind_type,
                                                    IndicatorThreatTypes.UNKNOWN,
                                                    IndicatorAttackTypes.UNKNOWN,
                                                    analyst,
                                                    add_relationship=True,
                                                    description=description)
                except:
                    msg = "Unsupported use of 'HTTPSession' object."
                    res = {'success': False, 'reason': msg}

                self.parse_res(imp_type, val, cbx_obj, res, ind_id)
            elif isinstance(item, WhoisEntry):
                # No sure where else to put this
                imp_type = "RawData"
                val = cbx_obj.id_
                if item.remarks:
                    data = item.remarks.value
                    title = "WHOIS Entry from STIX: %s" % self.package.id_
                    if self.preview:
                        res = None
                        val = title
                    else:
                        res = handle_raw_data_file(data,
                                                self.source.name,
                                                user=analyst,
                                                description=description,
                                                title=title,
                                                data_type="Text",
                                                tool_name="WHOIS",
                                                tool_version=None,
                                                method=self.source_instance.method,
                                                reference=self.source_instance.reference)
                else:
                    msg = "Unsupported use of 'WhoisEntry' object."
                    res = {'success': False, 'reason': msg}

                self.parse_res(imp_type, val, cbx_obj, res, ind_id)
            elif isinstance(item, Artifact):
                # Not sure if this is right, and I believe these can be
                # encoded in a couple different ways.
                imp_type = "RawData"
                val = cbx_obj.id_
                rawdata = item.data.decode('utf-8')
                # TODO: find out proper ways to determine title, datatype,
                #       tool_name, tool_version
                title = "Artifact for Event: STIX Document %s" % self.package.id_
                if self.preview:
                    res = None
                    val = title
                else:
                    res = handle_raw_data_file(rawdata,
                                            self.source.name,
                                            user=analyst,
                                            description=description,
                                            title=title,
                                            data_type="Text",
                                            tool_name="STIX",
                                            tool_version=None,
                                            method=self.source_instance.method,
                                            reference=self.source_instance.reference)
                self.parse_res(imp_type, val, cbx_obj, res, ind_id)
            elif (isinstance(item, File) and
                  item.custom_properties and
                  item.custom_properties[0].name == "crits_type" and
                  item.custom_properties[0]._value == "Certificate"):
                imp_type = "Certificate"
                val = str(item.file_name)
                data = None
                if self.preview:
                    res = None
                else:
                    for rel_obj in item.parent.related_objects:
                        if isinstance(rel_obj.properties, Artifact):
                            data = rel_obj.properties.data
                            self.parsed.append(rel_obj.id_)
                    res = handle_cert_file(val,
                                           data,
                                           self.source,
                                           user=analyst,
                                           description=description)
                self.parse_res(imp_type, val, cbx_obj, res, ind_id)
            elif isinstance(item, File) and self.has_network_artifact(item):
                imp_type = "PCAP"
                val = str(item.file_name)
                data = None
                if self.preview:
                    res = None
                else:
                    for rel_obj in item.parent.related_objects:
                        if (isinstance(rel_obj.properties, Artifact) and
                            rel_obj.properties.type_ == Artifact.TYPE_NETWORK):
                            data = rel_obj.properties.data
                            self.parsed.append(rel_obj.id_)
                    res = handle_pcap_file(val,
                                           data,
                                           self.source,
                                           user=analyst,
                                           description=description)
                self.parse_res(imp_type, val, cbx_obj, res, ind_id)
            elif isinstance(item, File):
                imp_type = "Sample"
                md5 = item.md5
                if md5:
                    md5 = md5.lower()
                val = str(item.file_name or md5)
                # add sha1/sha256/ssdeep once handle_file supports it
                size = item.size_in_bytes
                data = None
                if item.file_path:
                    path = "File Path: " + str(item.file_path)
                    description += "\n" + path
                for rel_obj in item.parent.related_objects:
                    if (isinstance(rel_obj.properties, Artifact) and
                        rel_obj.properties.type_ == Artifact.TYPE_FILE):
                        data = rel_obj.properties.data
                        self.parsed.append(rel_obj.id_)
                if not md5 and not data and val and val != "None":
                    imp_type = "Indicator"
                    if self.preview:
                        res = None
                    else:
                        res = handle_indicator_ind(val,
                                                   self.source,
                                                   "Win File",
                                                   IndicatorThreatTypes.UNKNOWN,
                                                   IndicatorAttackTypes.UNKNOWN,
                                                   analyst,
                                                   add_domain=True,
                                                   add_relationship=True,
                                                   description=description)
                elif md5 or data:
                    if self.preview:
                        res = None
                    else:
                        res = handle_file(val,
                                          data,
                                          self.source,
                                          user=analyst,
                                          md5_digest=md5,
                                          is_return_only_md5=False,
                                          size=size,
                                          description=description)
                else:
                    val = cbx_obj.id_
                    msg = "CybOX 'File' object has no MD5, data, or filename"
                    res = {'success': False, 'reason': msg}
                self.parse_res(imp_type, val, cbx_obj, res, ind_id)
            elif isinstance(item, EmailMessage):
                imp_type = 'Email'
                id_list = []
                data = {}
                val = cbx_obj.id_
                get_attach = False
                data['raw_body'] = str(item.raw_body)
                data['raw_header'] = str(item.raw_header)
                data['helo'] = str(item.email_server)
                if item.header:
                    data['subject'] = str(item.header.subject)
                    if item.header.date:
                        data['date'] = item.header.date.value
                    val = "Date: %s, Subject: %s" % (data.get('date', 'None'),
                                                     data['subject'])
                    data['message_id'] = str(item.header.message_id)
                    data['sender'] = str(item.header.sender)
                    data['reply_to'] = str(item.header.reply_to)
                    data['x_originating_ip'] = str(item.header.x_originating_ip)
                    data['x_mailer'] = str(item.header.x_mailer)
                    data['boundary'] = str(item.header.boundary)
                    data['from_address'] = str(item.header.from_)
                    if item.header.to:
                        data['to'] = [str(r) for r in item.header.to.to_list()]

                if data.get('date'): # Email TLOs must have a date
                    data['source'] = self.source.name
                    data['source_method'] = self.source_instance.method
                    data['source_reference'] = self.source_instance.reference
                    if self.preview:
                        res = None
                    else:
                        res = handle_email_fields(data,
                                                  analyst,
                                                  "STIX")
                    self.parse_res(imp_type, val, cbx_obj, res, ind_id)
                    if not self.preview and res.get('status'):
                        id_list.append(cbx_obj.id_) # save ID for atchmnt rels
                        get_attach = True
                else: # Can't be an Email TLO, so save fields
                    for x, key in enumerate(data):
                        if data[key] and data[key] != "None":
                            if key in ('raw_header', 'raw_body'):
                                if key == 'raw_header':
                                    title = "Email Header from STIX Email: %s"
                                    d_type = "Email Header"
                                else:
                                    title = "Email Body from STIX Email: %s"
                                    d_type = "Email Body"
                                imp_type = 'RawData'
                                title = title % cbx_obj.id_
                                if self.preview:
                                    res = None
                                else:
                                    res = handle_raw_data_file(data[key],
                                                               self.source,
                                                               analyst,
                                                               description,
                                                               title,
                                                               d_type,
                                                               "STIX",
                                                               self.stix_version)
                                self.parse_res(imp_type, title, cbx_obj,
                                               res, ind_id)
                            elif key == 'to':
                                imp_type = 'Target'
                                for y, addr in enumerate(data[key]):
                                    tgt_dict = {'email_address': addr}
                                    if self.preview:
                                        res = None
                                    else:
                                        res = upsert_target(tgt_dict, analyst)
                                        if res['success']:
                                            get_attach = True
                                    tmp_obj = copy(cbx_obj)
                                    tmp_obj.id_ = '%s-%s-%s' % (cbx_obj.id_,
                                                                x, y)
                                    self.parse_res(imp_type, addr, tmp_obj,
                                                   res, ind_id)
                                    self.ind2obj.setdefault(cbx_obj.id_,
                                                            []).append(tmp_obj.id_)
                                    id_list.append(tmp_obj.id_)
                            else:
                                imp_type = 'Indicator'
                                if key in ('sender', 'reply_to', 'from_address'):
                                    ind_type = "Address - e-mail"
                                elif 'ip' in key:
                                    ind_type = "Address - ipv4-addr"
                                elif key == 'raw_body':
                                    ind_type = "Email Message"
                                else:
                                    ind_type = "String"
                                if self.preview:
                                    res = None
                                else:
                                    res = handle_indicator_ind(data[key],
                                                          self.source,
                                                          ind_type,
                                                          IndicatorThreatTypes.UNKNOWN,
                                                          IndicatorAttackTypes.UNKNOWN,
                                                          analyst,
                                                          add_domain=True,
                                                          add_relationship=True,
                                                          description=description)
                                    if res['success']:
                                        get_attach = True
                                tmp_obj = copy(cbx_obj)
                                tmp_obj.id_ = '%s-%s' % (cbx_obj.id_, x)
                                self.parse_res(imp_type, data[key], tmp_obj,
                                               res, ind_id)
                                self.ind2obj.setdefault(cbx_obj.id_,
                                                        []).append(tmp_obj.id_)
                                id_list.append(tmp_obj.id_)

                if not self.preview:
                    # Setup relationships between all Email attributes
                    for oid in id_list:
                        for oid2 in id_list:
                            if oid != oid2:
                                self.relationships.append((oid,
                                                           RelationshipTypes.RELATED_TO,
                                                           oid2, "High"))

                    # Should check for attachments and add them here.
                    if get_attach and item.attachments:
                        for attach in item.attachments:
                            rel_id = attach.to_dict()['object_reference']
                            for oid in id_list:
                                self.relationships.append((oid,
                                                           RelationshipTypes.CONTAINS,
                                                           rel_id, "High"))

            else: # try to parse all other possibilities as Indicator
                imp_type = "Indicator"
                val = cbx_obj.id_
                c_obj = make_crits_object(item)

                # Ignore what was already caught above
                if (ind_id or c_obj.object_type not in IPTypes.values()):
                    ind_type = c_obj.object_type
                    for val in [str(v).strip() for v in c_obj.value if v]:
                        if ind_type:
                            # handle domains mislabeled as URLs
                            if c_obj.object_type == 'URI' and '/' not in val:
                                ind_type = "Domain"

                            if self.preview:
                                res = None
                            else:
                                res = handle_indicator_ind(val,
                                                        self.source,
                                                        ind_type,
                                                        IndicatorThreatTypes.UNKNOWN,
                                                        IndicatorAttackTypes.UNKNOWN,
                                                        analyst,
                                                        add_domain=True,
                                                        add_relationship=True,
                                                        description=description)
                            self.parse_res(imp_type, val, cbx_obj, res, ind_id)

        except Exception, e: # probably caused by cybox object we don't handle
            self.failed.append((e.message,
                                "%s (%s)" % (imp_type, val),
                                cbx_obj.id_)) # note for display in UI
コード例 #24
0
def chopshop_carver(pcap_md5, options, analyst):
    # Make sure we can find ChopShop
    sc = get_config('ChopShop')
    if not sc:
        return {
            'success': False,
            'message': 'Could not find ChopShop service.'
        }

    shop_path = "%s/shop" % str(sc['basedir'])
    if not os.path.exists(shop_path):
        return {
            'success': False,
            'message': "ChopShop shop path does not exist."
        }

    sys.path.append(shop_path)
    import ChopLib as CL
    if StrictVersion(str(CL.VERSION)) < StrictVersion('4.0'):
        return {'success': False, 'message': 'Need ChopShop 4.0 or newer'}

    # Until we have an smtp_extractor in ChopShop we have to resort to
    # to (ab)using payloads to dump the entire TCP stream and letting
    # handle_eml() process everything. We also use the payloads module
    # for handling raw carves. If a user wants to do SMTP and raw
    # simultaneously it won't work because we can't distinguish one
    # payloads module from another.
    if options.get('raw', False) and options.get('smtp', False):
        return {
            'success': False,
            'message': "Can not process SMTP and raw simultaneously."
        }

    # Make sure we have a PCAP to work with
    pcap = PCAP.objects(md5=pcap_md5).first()
    if not pcap:
        return {'success': False, 'message': "No PCAP found."}
    pcap_data = pcap.filedata.read()
    if not pcap_data:
        return {
            'success': False,
            'message': "Could not get PCAP from GridFS: %s" % pcap_md5
        }

    source = pcap['source'][0]['name']  # XXX: This kind of sucks...

    # Create module string to pass to ChopShop
    modules = []
    if options.get('http_resp', False) or options.get('http_req', False):
        modules.append("http | http_extractor")

    if options.get('smtp', False) or options.get('raw', False):
        # ChopShop really needs an smtp_extractor, but there's no good
        # capability to do that yet. Maybe one day I'll build one. :)
        # For now, just use payloads and let handle_eml() sort it out.
        #
        # Raw carving works exactly the same way, just post-processed
        # differently.
        modules.append("payloads -b")

    if not modules:
        return {'success': False, 'message': "No modules specified."}

    mod_string = ';'.join(mod for mod in modules)

    from ChopLib import ChopLib
    from ChopUi import ChopUi

    choplib = ChopLib()
    chopui = ChopUi()

    choplib.base_dir = str(sc['basedir'])

    choplib.modules = mod_string

    chopui.jsonout = jsonhandler
    choplib.jsonout = True

    # ChopShop (because of pynids) needs to read a file off disk.
    # Write the pcap data to a temporary file.
    temp_pcap = tempfile.NamedTemporaryFile(delete=False)
    temp_pcap.write(pcap_data)
    temp_pcap.close()

    choplib.filename = temp_pcap.name
    chopui.bind(choplib)
    chopui.start()

    if chopui.jsonclass == None:
        os.unlink(temp_pcap.name)
        chopui.join()
        choplib.finish()
        choplib.join()
        return {
            'success': False,
            'message': 'Lost race condition in chopui. Try again.'
        }

    # ChopUI must be started before the jsonhandler class is insantiated.
    # Tell the class what we are looking for now that it exists.
    chopui.jsonclass.parse_options(options)

    choplib.start()

    while chopui.is_alive():
        time.sleep(.1)

    chopui.join()
    choplib.finish()
    choplib.join()

    os.unlink(temp_pcap.name)

    message = ''

    # Grab any carved HTTP bodies.
    for (md5_digest, (name, blob)) in chopui.jsonclass.http_files.items():
        if handle_file(name,
                       blob,
                       source,
                       related_md5=pcap_md5,
                       user=analyst,
                       method='ChopShop Filecarver',
                       md5_digest=md5_digest,
                       related_type='PCAP'):
            # Specifically not using name here as I don't want to deal
            # with sanitizing it
            message += "Saved HTTP body: <a href=\"%s\">%s</a><br />" % (
                reverse('crits.samples.views.detail', args=[md5_digest
                                                            ]), md5_digest)
        else:
            message += "Failed to save file %s." % md5_digest

    # Grab any carved SMTP returns.
    for blob in chopui.jsonclass.smtp_returns.values():
        ret = handle_eml(blob,
                         source,
                         None,
                         analyst,
                         'ChopShop FileCarver',
                         related_id=pcap.id,
                         related_type='PCAP',
                         relationship_type=RelationshipTypes.RELATED_TO)
        if not ret['status']:
            message += ret['reason']
            continue

        message += "Saved email: <a href=\"%s\">%s</a><br />%i attachment(s)<br />" % (
            reverse('crits.emails.views.email_detail', args=[
                ret['object'].id
            ]), ret['object'].id, len(ret['attachments'].keys()))

        for md5_digest in ret['attachments'].keys():
            message += "<a href=\"%s\">%s</a><br />" % (reverse(
                'crits.samples.views.detail', args=[md5_digest]), md5_digest)

    # Handle raw returns.
    for id_, blob in chopui.jsonclass.raw_returns.items():
        md5_digest = handle_file(id_,
                                 blob,
                                 source,
                                 related_md5=pcap_md5,
                                 user=analyst,
                                 method='ChopShop Filecarver',
                                 related_type='PCAP')
        if md5_digest:
            message += "Saved raw %s: <a href=\"%s\">%s</a><br />" % (
                id_, reverse('crits.samples.views.detail',
                             args=[md5_digest]), md5_digest)
        else:
            message += "Failed to save raw %s." % md5_digest

    # It's possible to have no files here if nothing matched.
    # Still return True as there were no problems.
    if not message:
        message = 'No files found.'
    return {'success': True, 'message': message}
コード例 #25
0
ファイル: __init__.py プロジェクト: lolnate/crits_services
    def run(self, obj, config):
        """
        Being plugin processing
        """
        #Load data from file as libchm will only accept a filename
        with self._write_to_file() as chm_file:
            try:
                self.chmparse.LoadCHM(chm_file)
            except Exception as e:
                raise e

        #Conduct analysis
        result = self.analyze()

        #Handle output of results
        if 'obj_items_summary' in result.keys():
            obj_items_summary = result.pop('obj_items_summary')

        #General CHM info
        for key, value in result.items():
            self._add_result('chm_overview', key, {'value': value})

        if config['chm_items']:
            #Data and details of each object/page in the CHM
            for f in self.added_files:
                handle_file(f[0],
                            f[3],
                            obj.source,
                            related_id=str(obj.id),
                            campaign=obj.campaign,
                            method=self.name,
                            relationship='Extracted_From',
                            user=self.current_task.username)
                self._add_result("chm_items_added", f[0], {
                    'size': f[1],
                    'md5': f[2]
                })
        else:
            #Details of each object/page in the CHM
            for object_item in obj_items_summary:
                self._add_result('chm_items', object_item.get('name'), {
                    'size': object_item.get('size'),
                    'md5': object_item.get('md5')
                })

        #Detection results from CHM analysis
        for object_item in obj_items_summary:
            if object_item.get('detection'):
                for detection in object_item.get('detection'):
                    self._add_result('chm_detection', detection,
                                     {'chm_item': object_item.get('name')})

        #URLs and IPs found in CHM
        for object_item in obj_items_summary:
            if object_item.get('urls'):
                for url in object_item.get('urls'):
                    self._add_result('chm_urls', url,
                                     {'chm_item': object_item.get('name')})

        #Close file in memory
        self.chmparse.CloseCHM()
コード例 #26
0
ファイル: parsers.py プロジェクト: svr6/crits_services
    def parse_observables(self, observables):
        """
        Parse list of observables in STIX doc.

        :param observables: List of STIX observables.
        :type observables: List of STIX observables.
        """

        analyst = self.source_instance.analyst
        for obs in observables: # for each STIX observable
            if not obs.object_ or not obs.object_.properties:
                self.failed.append(("No valid object_properties was found!",
                                    type(obs).__name__,
                                    obs.id_)) # note for display in UI
                continue
            try: # try to create CRITs object from observable
                item = obs.object_.properties
                if isinstance(item, Address):
                    if item.category in ('cidr', 'ipv4-addr', 'ipv4-net',
                                         'ipv4-netmask', 'ipv6-addr',
                                         'ipv6-net', 'ipv6-netmask'):
                        imp_type = "IP"
                        for value in item.address_value.values:
                            ip = str(value).strip()
                            iptype = get_crits_ip_type(item.category)
                            if iptype:
                                res = ip_add_update(ip,
                                                    iptype,
                                                    [self.source],
                                                    analyst=analyst,
                                                    is_add_indicator=True)
                                self.parse_res(imp_type, obs, res)
                if isinstance(item, DomainName):
                    imp_type = "Domain"
                    for value in item.value.values:
                        res = upsert_domain(str(value),
                                            [self.source],
                                            username=analyst)
                        self.parse_res(imp_type, obs, res)
                elif isinstance(item, Artifact):
                    # Not sure if this is right, and I believe these can be
                    # encoded in a couple different ways.
                    imp_type = "RawData"
                    rawdata = item.data.decode('utf-8')
                    description = "None"
                    # TODO: find out proper ways to determine title, datatype,
                    #       tool_name, tool_version
                    title = "Artifact for Event: STIX Document %s" % self.package.id_
                    res = handle_raw_data_file(rawdata,
                                            self.source.name,
                                            user=analyst,
                                            description=description,
                                            title=title,
                                            data_type="Text",
                                            tool_name="STIX",
                                            tool_version=None,
                                            method=self.source_instance.method,
                                            reference=self.source_instance.reference)
                    self.parse_res(imp_type, obs, res)
                elif (isinstance(item, File) and
                      item.custom_properties and
                      item.custom_properties[0].name == "crits_type" and
                      item.custom_properties[0]._value == "Certificate"):
                    imp_type = "Certificate"
                    description = "None"
                    filename = str(item.file_name)
                    data = None
                    for obj in item.parent.related_objects:
                        if isinstance(obj.properties, Artifact):
                            data = obj.properties.data
                    res = handle_cert_file(filename,
                                           data,
                                           self.source,
                                           user=analyst,
                                           description=description)
                    self.parse_res(imp_type, obs, res)
                elif isinstance(item, File) and self.has_network_artifact(item):
                    imp_type = "PCAP"
                    description = "None"
                    filename = str(item.file_name)
                    data = None
                    for obj in item.parent.related_objects:
                        if (isinstance(obj.properties, Artifact) and
                            obj.properties.type_ == Artifact.TYPE_NETWORK):
                            data = obj.properties.data
                    res = handle_pcap_file(filename,
                                           data,
                                           self.source,
                                           user=analyst,
                                           description=description)
                    self.parse_res(imp_type, obs, res)
                elif isinstance(item, File):
                    imp_type = "Sample"
                    filename = str(item.file_name)
                    md5 = item.md5
                    data = None
                    for obj in item.parent.related_objects:
                        if (isinstance(obj.properties, Artifact) and
                            obj.properties.type_ == Artifact.TYPE_FILE):
                            data = obj.properties.data
                    res = handle_file(filename,
                                      data,
                                      self.source,
                                      user=analyst,
                                      md5_digest=md5,
                                      is_return_only_md5=False)
                    self.parse_res(imp_type, obs, res)
                elif isinstance(item, EmailMessage):
                    imp_type = "Email"
                    data = {}
                    data['source'] = self.source.name
                    data['source_method'] = self.source_instance.method
                    data['source_reference'] = self.source_instance.reference
                    data['raw_body'] = str(item.raw_body)
                    data['raw_header'] = str(item.raw_header)
                    data['helo'] = str(item.email_server)
                    if item.header:
                        data['message_id'] = str(item.header.message_id)
                        data['subject'] = str(item.header.subject)
                        data['sender'] = str(item.header.sender)
                        data['reply_to'] = str(item.header.reply_to)
                        data['x_originating_ip'] = str(item.header.x_originating_ip)
                        data['x_mailer'] = str(item.header.x_mailer)
                        data['boundary'] = str(item.header.boundary)
                        data['from_address'] = str(item.header.from_)
                        data['date'] = item.header.date.value
                        if item.header.to:
                            data['to'] = [str(r) for r in item.header.to.to_list()]
                    res = handle_email_fields(data,
                                            analyst,
                                            "STIX")
                    # Should check for attachments and add them here.
                    self.parse_res(imp_type, obs, res)
                    if res.get('status') and item.attachments:
                        for attach in item.attachments:
                            rel_id = attach.to_dict()['object_reference']
                            self.relationships.append((obs.id_,
                                                       "Contains",
                                                       rel_id, "High"))
                else: # try to parse all other possibilities as Indicator
                    imp_type = "Indicator"
                    obj = make_crits_object(item)
                    if obj.object_type == 'Address':
                        # This was already caught above
                        continue
                    else:
                        ind_type = obj.object_type
                        for value in obj.value:
                            if value and ind_type:
                                res = handle_indicator_ind(value.strip(),
                                                        self.source,
                                                        ind_type,
                                                        IndicatorThreatTypes.UNKNOWN,
                                                        IndicatorAttackTypes.UNKNOWN,
                                                        analyst,
                                                        add_domain=True,
                                                        add_relationship=True)
                                self.parse_res(imp_type, obs, res)
            except Exception, e: # probably caused by cybox object we don't handle
                self.failed.append((e.message,
                                    type(item).__name__,
                                    item.parent.id_)) # note for display in UI
コード例 #27
0
    def run(self, obj, config):
        key = config.get('vt_api_key', '')
        url = config.get('vt_download_url', '')
        sizeLimit = config.get('size_limit', '')
        replace = config.get('replace_sample', False)
        do_triage = config.get('run_triage', False)

        user = self.current_task.user
        sample = Sample.objects(md5=obj.md5).first()
        if not sample:
            sample = Sample()
            sample.md5 = md5_digest
        self._info("Checking if binary already exists in CRITs.")
        sample.discover_binary()

        if sample.filedata and replace == False:  #if we already have this binary and don't have permission to replace
            self._info(
                "CRITs already has this binary. Enable the 'Replace' option to overwrite with data from VirusTotal."
            )
            self._add_result("Download Canceled",
                             "Binary already exists in CRITs.")
            return

        if not user.has_access_to(SampleACL.WRITE):
            self._info("User does not have permission to add Samples to CRITs")
            self._add_result(
                "Download Canceled",
                "User does not have permission to add Samples to CRITs")
            return

        parameters = urllib.urlencode({"hash": obj.md5, "apikey": key})
        if settings.HTTP_PROXY:
            proxy = urllib2.ProxyHandler({
                'http': settings.HTTP_PROXY,
                'https': settings.HTTP_PROXY
            })
            opener = urllib2.build_opener(proxy)
            urllib2.install_opener(opener)

        try:
            req = url + "?" + parameters
            self._info(
                "Requesting binary with md5 '{0}' from VirusTotal.".format(
                    obj.md5))
            request = urllib2.Request(req)
            response = urllib2.urlopen(request)
            size = response.info().getheaders("Content-Length")[0]
            self._info("Binary size: {0} bytes".format(size))

            if int(size) > sizeLimit:  # Check if within size limit
                self._error(
                    "Binary size is {0} bytes, which is greater than maximum of {1} bytes. This limit can be changed in options."
                    .format(size, sizeLimit))
                self._add_result(
                    "Download Aborted",
                    "Match found, but binary is larger than maximum size limit."
                )
                return

            data = response.read()
        except urllib2.HTTPError as e:
            if e.code == 404:
                self._info(
                    "No results were returned. Either VirusTotal does not have the requested binary, or the request URL is incorrect."
                )
                self._add_result(
                    "Not Found",
                    "Binary was not found in the VirusTotal database")
            elif e.code == 403:
                self._error("Download forbidden. {0}".format(e))
                self._add_result(
                    "Download Canceled",
                    "CRITs was forbidden from downloading the binary.")
            else:
                self._error("An HTTP Error occurred: {0}".format(e))
            return
        except Exception as e:
            logger.error("VirusTotal: Failed connection ({0})".format(e))
            self._error("Failed to get data from VirusTotal: {0}".format(e))
            return

        if data:  # Retrieved some data from VT
            if replace == True:
                try:
                    self._info(
                        "Replace = True. Deleting any previous binary with md5 {0}"
                        .format(obj.md5))
                    sample.filedata.delete()
                except Exception as e:
                    logger.error(
                        "VirusTotal: Error deleting existing binary ({0})".
                        format(e))
                    self._error("Failed to delete existing binary")
            self._info("Adding new binary to CRITs.")

            try:
                handle_file(
                    filename=obj.md5,
                    data=data,
                    source="VirusTotal",
                    source_reference="Binary downloaded from VT based on MD5",
                    user=self.current_task.user,
                    source_method="VirusTotal Download Service",
                    md5_digest=obj.md5)
            except Exception as e:
                logger.error(
                    "VirusTotal: Sample creation failed ({0})".format(e))
                self._error("Failed to create new Sample: {0}".format(e))
                return
            if do_triage:
                self._info("Running sample triage for data-reliant services.")
                sample.reload()
                run_triage(sample, user="******")
            self._add_result(
                "Download Successful",
                "Binary was successfully downloaded from VirusTotal")
        else:
            self._error("No data returned by VirusTotal.")
コード例 #28
0
ファイル: handlers.py プロジェクト: cvdsouza/crits_services
def import_object(request, type_, id_):
    setup_access()
    user = request.user

    if type_ == "Threat Descriptors":
        obj = ThreatDescriptor(id=id_)
        obj.details(
            fields=[f for f in ThreatDescriptor._default_fields if f not in
                    (td.PRIVACY_MEMBERS, td.METADATA)]
        )
        itype = get_mapped_itype(obj.get(td.TYPE))
        tags = obj.get(td.TAGS)
        if itype is None:
            return {'success': False,
                    'message': "Descriptor type is not supported by CRITs"}

        if not user.has_access_to(IndicatorACL.WRITE):
            return {'success': False,
                    'message': "User does not have permission to add Indicators to CRITs"}

        ithreat_type = getattr(IndicatorThreatTypes, obj.get(td.THREAT_TYPE))
        results = handle_indicator_ind(
            obj.get(td.RAW_INDICATOR),
            "ThreatExchange",
            itype,
            IndicatorThreatTypes.UNKNOWN,
            IndicatorAttackTypes.UNKNOWN,
            request.user.username,
            method="ThreatExchange Service",
            reference="id: %s, owner: %s, share_level: %s" % (obj.get(td.ID),
                                                              obj.get(td.OWNER)['name'],
                                                              obj.get(td.SHARE_LEVEL)),
            add_domain=True,
            add_relationship=True,
            confidence=build_ci(obj.get(td.CONFIDENCE)),
            description=obj.get(td.DESCRIPTION),
            bucket_list=tags
        )
        return results
    elif type_ == "Malware Analyses":
        if not user.has_access_to(SampleACL.WRITE):
            return {'success': False,
                    'message': "User does not have permission to add Sample to CRITs"}
        obj = Malware(id=id_)
        obj.details(
            fields=[f for f in Malware._fields if f not in
                    (m.METADATA)]
        )
        filename = obj.get(m.MD5)
        tags = obj.get(m.TAGS)
        try:
            data = obj.rf
        except:
            data = None
        results = handle_file(
            filename,
            data,
            "ThreatExchange",
            method="ThreatExchange Service",
            reference="id: %s, share_level: %s" % (obj.get(td.ID),
                                                   obj.get(td.SHARE_LEVEL)),
            user=request.user.username,
            md5_digest = obj.get(m.MD5),
            sha1_digest = obj.get(m.SHA1),
            sha256_digest = obj.get(m.SHA256),
            size = obj.get(m.SAMPLE_SIZE),
            mimetype = obj.get(m.SAMPLE_TYPE),
            bucket_list=tags,
        )
        return {'success': True,
                'md5': results}
    else:
        return {'success': False,
                'message': "Invalid Type"}
    return {'success': True}
コード例 #29
0
    def run(self, obj, config):
        key = config.get("malshare_api_key", "")
        self.obj = obj

        if obj.filedata.read():
            logger.info("File already exists, no need to download")
            self._info("File already exists, no need to download")
            return

        if not key:
            logger.error("No valid MalShare API key found")
            self._error("No valid MalShare API key found")
            return
        
        #Download URL: https://malshare.com/api.php?api_key=[API_KEY]&action=getfile&hash=[HASH]

        parameters = {"api_key": key, "action": "getfile", "hash": obj.md5}
        data = urllib.urlencode(parameters)
        req = urllib2.Request("http://malshare.com/api.php", data)

        logger.info("Connecting MalShare to download sample")
        self._info("Connecting MalShare to download sample")

        # Execute GET request
        if settings.HTTP_PROXY:
            proxy = urllib2.ProxyHandler({'http': settings.HTTP_PROXY})
            opener = urllib2.build_opener(proxy)
            urllib2.install_opener(opener)
        try:
            response = urllib2.urlopen(req)
            sample_file = response.read()
        except Exception as e:
            logger.error("MalShare: network connection error (%s)" % e)
            self._error("Network connection error checking MalShare (%s)" % e)
            return

        logger.info("Download completed")
        self._info("Download completed")

        if sample_file.startswith("Sample not found by hash"):
            logger.error("Sample was not found on MalShare")
            self._error("Sample was not found on MalShare")
            return
        else:
            logger.info("Sample was found on MalShare!")
            self._info("Sample was found on MalShare!")

        #Verify file's MD5
        if (hashlib.md5(sample_file).hexdigest() != obj.md5):
            logger.error("Error while downloading sample from MalShare, MD5 missmatch")
            self._error("Error while downloading sample from MalShare, MD5 missmatch")
            return
        else:
            logger.info("MD5 verification successfull!")
            self._info("MD5 verification successfull!")
            #Write file
            #Filename is just the md5
            filename = obj.md5
            handle_file(filename, sample_file, obj.source,
                        related_id=str(obj.id),
                        related_type=str(obj._meta['crits_type']),
                        campaign=obj.campaign,
                        method=self.name,
                        user=self.current_task.username)
            self._add_result("file_downloaded", filename, {'md5': filename})
コード例 #30
0
def parse_result(self, result_extract, acl_write, md5_parent):
    stream_md5 = None
    if type(result_extract) is dict:
        #make reccursion extract each file embbed
        if 'FileMD5' in result_extract and result_extract['FileMD5']:
            tmp_dict = {}
            b_yara = False
            b_ioc = False
            #extract info
            no_info = [
                'ExtractInfo', 'ContainedObjects', 'Yara', 'PathFile',
                'FileMD5', 'RootFileType', 'TempDirExtract'
            ]
            for key, value in result_extract.iteritems():
                if not key in no_info:
                    self._add_result(
                        'File: ' + result_extract['FileMD5'] + ' - Info', key,
                        {'value': str(value)})
            #extract yara match
            if result_extract['Yara']:
                for item_v in result_extract['Yara']:
                    for key, value in item_v.iteritems():
                        self._add_result(
                            'File: ' + result_extract['FileMD5'] +
                            ' - Signatures yara matched', key, value)
                        b_yara = True
            #extract IOC
            if result_extract['ExtractInfo']:
                for item_v in result_extract['ExtractInfo']:
                    for key, value in item_v.iteritems():
                        self._add_result(
                            'File: ' + result_extract['FileMD5'] +
                            ' - Extract potential IOC', key,
                            {'value': str(value)})
                        b_ioc = True
            #add_sample
            if 'PathFile' in result_extract and type(
                    result_extract['PathFile']) is list and len(
                        result_extract['PathFile']) > 0:
                if os.path.isfile(str(result_extract['PathFile'][0])):
                    with open(str(result_extract['PathFile'][0]),
                              'r') as content_file_tmp:
                        content_tmp = content_file_tmp.read()
                        stream_md5 = hashlib.md5(content_tmp).hexdigest()
                        name = str(stream_md5).decode('ascii', errors='ignore')
                        id_ = Sample.objects(md5=stream_md5).only('id').first()
                        if id_:
                            self._info(
                                'Add relationship with sample existed:' +
                                str(stream_md5))
                            #make relationship
                            id_.add_relationship(
                                rel_item=self.obj,
                                rel_type=RelationshipTypes.CONTAINED_WITHIN,
                                rel_date=datetime.now(),
                                analyst=self.current_task.user.username)
                        else:
                            if acl_write and (
                                    self.config['import_file'] or
                                (self.config['import_file_yara'] and b_yara) or
                                (self.config['import_file_ioc'] and b_ioc)):
                                obj_parent = None
                                if md5_parent:
                                    obj_parent = Sample.objects(
                                        md5=md5_parent).only('id').first()
                                if not obj_parent:
                                    sample = handle_file(
                                        name,
                                        content_tmp,
                                        self.obj.source,
                                        related_id=str(self.obj.id),
                                        related_type=str(
                                            self.obj._meta['crits_type']),
                                        campaign=self.obj.campaign,
                                        source_method=self.name,
                                        relationship=RelationshipTypes.
                                        CONTAINED_WITHIN,
                                        user=self.current_task.user)
                                else:
                                    sample = handle_file(
                                        name,
                                        content_tmp,
                                        obj_parent.source,
                                        related_id=str(obj_parent.id),
                                        related_type=str(
                                            obj_parent._meta['crits_type']),
                                        campaign=obj_parent.campaign,
                                        source_method=self.name,
                                        relationship=RelationshipTypes.
                                        CONTAINED_WITHIN,
                                        user=self.current_task.user)
                                self._info('Add sample ' + str(stream_md5))
                            else:
                                #add IOC if not add sample
                                if self.current_task.user.has_access_to(
                                        IndicatorACL.WRITE) and b_yara:
                                    res = handle_indicator_ind(
                                        stream_md5,
                                        self.obj.source,
                                        IndicatorTypes.MD5,
                                        IndicatorThreatTypes.UNKNOWN,
                                        IndicatorAttackTypes.UNKNOWN,
                                        self.current_task.user,
                                        add_relationship=True,
                                        source_method=self.name,
                                        campaign=self.obj.campaign,
                                        description='Extracted by service ' +
                                        self.name)
                                    self._info('Add indicator md5:' +
                                               str(stream_md5) + ' -- id: ' +
                                               str(res))
            #contains file
            if 'ContainedObjects' in result_extract and type(
                    result_extract['ContainedObjects']
            ) is list and result_extract['ContainedObjects']:
                for item_v in result_extract['ContainedObjects']:
                    if item_v['FileMD5'] and item_v['FileType'] and item_v[
                            'FileSize']:
                        #search if file exist
                        id_ = Sample.objects(
                            md5=str(item_v['FileMD5'])).only('id').first()
                        sample_exist = False
                        ioc_exist = False
                        if id_:
                            sample_exist = True
                        id_ = Indicator.objects(
                            value=str(item_v['FileMD5'])).only('id').first()
                        if id_:
                            ioc_exist = True
                        self._add_result(
                            'File: ' + result_extract['FileMD5'] +
                            ' - Contains md5 files', item_v['FileMD5'], {
                                'type': str(item_v['FileType']),
                                'size': str(item_v['FileSize']),
                                'Exists Sample': str(sample_exist),
                                'Exists IOC md5': str(ioc_exist)
                            })
                for item_v in result_extract['ContainedObjects']:
                    #re do loop for best display result
                    parse_result(self, item_v, acl_write, stream_md5)
コード例 #31
0
ファイル: db.py プロジェクト: brentonchang/crits-1
    def finish_task(self, task):
        """
        Finish a task.
        """

        logger.debug("Finishing task %s" % task)
        self.update_task(task)

        obj = class_from_type(task.context.crits_type)
        query = self.get_db_query(task.context)

        sample = obj.objects(__raw__=query).first()

        if task.files:
            logger.debug("Adding samples")
            for f in task.files:
                logger.debug("Adding %s" % f['filename'])
                #TODO: add in backdoor?, user
                from crits.samples.handlers import handle_file
                handle_file(f['filename'], f['data'], sample.source,
                            related_md5=task.context.identifier,
                            campaign=sample.campaign,
                            method=task.service.name,
                            relationship=f['relationship'],
                            user=task.context.username,
                            )
        else:
            logger.debug("No samples to add.")

        if task.certificates:
            logger.debug("Adding certificates")

            for f in task.certificates:
                logger.debug("Adding %s" % f['filename'])
                from crits.certificates.handlers import handle_cert_file
                # XXX: Add campaign from source?
                handle_cert_file(f['filename'], f['data'], sample.source,
                            related_md5=task.context.identifier,
                            related_type=task.context.crits_type,
                            method=task.service.name,
                            relationship=f['relationship'],
                            user=task.context.username,
                            )
        else:
            logger.debug("No certificates to add.")

        if task.pcaps:
            logger.debug("Adding PCAPs")

            for f in task.pcaps:
                logger.debug("Adding %s" % f['filename'])
                from crits.pcaps.handlers import handle_pcap_file
                # XXX: Add campaign from source?
                handle_pcap_file(f['filename'], f['data'], sample.source,
                            related_md5=task.context.identifier,
                            related_type=task.context.crits_type,
                            method=task.service.name,
                            relationship=f['relationship'],
                            user=task.context.username,
                            )
        else:
            logger.debug("No PCAPs to add.")
コード例 #32
0
ファイル: __init__.py プロジェクト: lprat/crits_services
def parse_result(self, result_extract, response_dict, config, md5_parent):
    stream_md5 = None
    user = self.current_task.user
    self.config = config
    acl_write = user.has_access_to(SampleACL.WRITE)
    if type(result_extract) is dict:
        #make reccursion extract each file embbed
        if 'FileMD5' in result_extract and result_extract['FileMD5']:
            tmp_dict = {}
            b_yara = False
            b_ioc = False
            #extract info
            no_info = [
                'ExtractInfo', 'ContainedObjects', 'Yara', 'PathFile',
                'FileMD5', 'RootFileType', 'TempDirExtract', 'GlobalIOC'
            ]
            for key, value in result_extract.iteritems():
                if not key in no_info:
                    self._add_result(
                        'File: ' + result_extract['FileMD5'] + ' - Info', key,
                        {'value': str(value)})
            #add download info
            if result_extract['FileMD5'] in response_dict:
                self._add_result(
                    'File: ' + result_extract['FileMD5'] + ' - Info',
                    'Download embed file', {'value': sfa_api + vx})
            #GLOBAL IOC
            if 'GlobalIOC' in result_extract and result_extract['GlobalIOC']:
                for key, value in result_extract['GlobalIOC'].iteritems():
                    self._add_result('Global IOC by categories', key,
                                     {'value': str(value)})
            #extract yara match
            if result_extract['Yara']:
                for item_v in result_extract['Yara']:
                    #self._info("Dict:"+str(item_v))
                    for key, value in item_v.iteritems():
                        data = {
                            'description': '',
                            'ioc': '',
                            'tags': '',
                            'score': '0'
                        }
                        for kx, vx in value.iteritems():
                            data[kx] = str(vx)
                        self._add_result(
                            'File: ' + result_extract['FileMD5'] +
                            ' - Signatures yara matched', key, data)
                        score_conf = re.sub("\D", "",
                                            config['import_yara_score'])
                        if acl_write and config[
                                'import_file_yara'] and 'score' in value and int(
                                    value['score']) >= int(score_conf):
                            id_ = Sample.objects(md5=result_extract['FileMD5']
                                                 ).only('id').first()
                            if id_:
                                self._info(
                                    'Add relationship with sample existed:' +
                                    str(stream_md5))
                                #make relationship
                                id_.add_relationship(
                                    rel_item=self.obj,
                                    rel_type=RelationshipTypes.
                                    CONTAINED_WITHIN,
                                    rel_date=datetime.now(),
                                    analyst=self.current_task.user.username)
                            elif result_extract['FileMD5'] in response_dict:
                                content_tmp = download(
                                    self,
                                    response_dict[result_extract['FileMD5']])
                                if content_tmp:
                                    name = str(result_extract['FileMD5'])
                                    if 'CDBNAME' in result_extract:
                                        name = str(result_extract['CDBNAME'])
                                    obj_parent = None
                                    if md5_parent:
                                        obj_parent = Sample.objects(
                                            md5=md5_parent).only('id').first()
                                    if not obj_parent:
                                        sample = handle_file(
                                            name,
                                            content_tmp,
                                            self.obj.source,
                                            related_id=str(self.obj.id),
                                            related_type=str(
                                                self.obj._meta['crits_type']),
                                            campaign=self.obj.campaign,
                                            source_method=self.name,
                                            relationship=RelationshipTypes.
                                            CONTAINED_WITHIN,
                                            user=self.current_task.user)
                                    else:
                                        sample = handle_file(
                                            name,
                                            content_tmp,
                                            obj_parent.source,
                                            related_id=str(obj_parent.id),
                                            related_type=str(
                                                obj_parent._meta['crits_type']
                                            ),
                                            campaign=obj_parent.campaign,
                                            source_method=self.name,
                                            relationship=RelationshipTypes.
                                            CONTAINED_WITHIN,
                                            user=self.current_task.user)
                                    self._info('Add sample ' + str(name) +
                                               ' - MD5:' +
                                               str(result_extract['FileMD5']))
            #extract IOC
            if result_extract['ExtractInfo']:
                for item_v in result_extract['ExtractInfo']:
                    for key, value in item_v.iteritems():
                        self._add_result(
                            'File: ' + result_extract['FileMD5'] +
                            ' - Extract potential IOC', key,
                            {'value': str(value)})
                        if acl_write and config['import_file_ioc']:
                            id_ = Sample.objects(md5=result_extract['FileMD5']
                                                 ).only('id').first()
                            if id_:
                                self._info(
                                    'Add relationship with sample existed:' +
                                    str(stream_md5))
                                #make relationship
                                id_.add_relationship(
                                    rel_item=self.obj,
                                    rel_type=RelationshipTypes.
                                    CONTAINED_WITHIN,
                                    rel_date=datetime.now(),
                                    analyst=self.current_task.user.username)
                            elif result_extract['FileMD5'] in response_dict:
                                content_tmp = download(
                                    self,
                                    response_dict[result_extract['FileMD5']])
                                if content_tmp:
                                    name = str(result_extract['FileMD5'])
                                    if 'CDBNAME' in result_extract:
                                        name = str(result_extract['CDBNAME'])
                                    obj_parent = None
                                    if md5_parent:
                                        obj_parent = Sample.objects(
                                            md5=md5_parent).only('id').first()
                                    if not obj_parent:
                                        sample = handle_file(
                                            name,
                                            content_tmp,
                                            self.obj.source,
                                            related_id=str(self.obj.id),
                                            related_type=str(
                                                self.obj._meta['crits_type']),
                                            campaign=self.obj.campaign,
                                            source_method=self.name,
                                            relationship=RelationshipTypes.
                                            CONTAINED_WITHIN,
                                            user=self.current_task.user)
                                    else:
                                        sample = handle_file(
                                            name,
                                            content_tmp,
                                            obj_parent.source,
                                            related_id=str(obj_parent.id),
                                            related_type=str(
                                                obj_parent._meta['crits_type']
                                            ),
                                            campaign=obj_parent.campaign,
                                            source_method=self.name,
                                            relationship=RelationshipTypes.
                                            CONTAINED_WITHIN,
                                            user=self.current_task.user)
                                    self._info('Add sample ' + str(name) +
                                               ' - MD5:' +
                                               str(result_extract['FileMD5']))
            #contains file
            if 'ContainedObjects' in result_extract and type(
                    result_extract['ContainedObjects']
            ) is list and result_extract['ContainedObjects']:
                for item_v in result_extract['ContainedObjects']:
                    if item_v['FileMD5'] and item_v['FileType'] and item_v[
                            'FileSize']:
                        #search if file exist
                        id_ = Sample.objects(
                            md5=str(item_v['FileMD5'])).only('id').first()
                        sample_exist = False
                        ioc_exist = False
                        if id_:
                            sample_exist = True
                            id_.add_relationship(
                                rel_item=self.obj,
                                rel_type=RelationshipTypes.RELATED_TO,
                                #rel_date=datetime.now(),
                                analyst=self.current_task.user.username)
                        id_ = Indicator.objects(
                            value=str(item_v['FileMD5'])).only('id').first()
                        if id_:
                            ioc_exist = True
                            id_.add_relationship(
                                rel_item=self.obj,
                                rel_type=RelationshipTypes.RELATED_TO,
                                #rel_date=datetime.now(),
                                analyst=self.current_task.user.username)
                        self._add_result(
                            'File: ' + result_extract['FileMD5'] +
                            ' - Contains md5 files', item_v['FileMD5'], {
                                'type': str(item_v['FileType']),
                                'size': str(item_v['FileSize']),
                                'Exists Sample': str(sample_exist),
                                'Exists IOC md5': str(ioc_exist)
                            })
                for item_v in result_extract['ContainedObjects']:
                    #re do loop for best display result
                    parse_result(self, item_v, response_dict, config,
                                 stream_md5)
コード例 #33
0
 def run(self, obj, config):
     rparser = RtfParser(obj.filedata.read())
     rparser.parse()
     added_files = []
     if not rparser.features.get('valid_rtf'):
         self._error("Could not parse file as an RTF document")
         return
     props = [
         'rtf_header_version',
         'rtf_generator',
         'ansi_code_page',
         'ansi_code_page_name',
         'deflang',
         'binary_ratio',
     ]
     for prop in props:
         value = rparser.features.get(prop)
         if value == None:
             continue
         result = {
             'name': prop,
             'value': value,
             'result': value,
         }
         self._add_result('rtf_meta', prop, result)
     for (k,v) in rparser.features.get('info', {}).items():
         result = {
             'name': k,
             'value': v,
             'result': v,
         }
         self._add_result('rtf_meta', prop, result)
     hashes = [
         'themedata',
         'blipuid',
         'colorschememapping',
         'rsids',
     ]
     for hash_type in hashes:
         items = rparser.features.get(hash_type, [])
         for item in items:
             result = {
                 'name': hash_type,
                 'value': item,
                 'result': item,
             }
             self._add_result('Item Hashes', hash_type, result)
     obj_num = 1
     for obj_info in rparser.features.get('objects', []):
         name = 'Object %d - %s' % (obj_num, obj_info.get('classname', 'Unknown'))
         for (k,v) in obj_info.items():
             val = hex(v) if type(v) == int else v
             result = {
                 'name': k,
                 'value': val,
                 'result': v,
             }
             self._add_result(name, name, result)
         obj_num += 1
     obj_num = 1
     for dstore in rparser.features.get('datastores', []):
         name = 'Datastore %d - %s' % (obj_num, dstore.get('classname', 'Unknown'))
         for (k,v) in dstore.items():
             val = hex(v) if type(v) == int else v
             result = {
                 'name': k,
                 'value': val,
                 'result': v,
             }
             self._add_result(name, name, result)
         obj_num += 1            
     if config.get('save_streams', 0) == 1:
         for i in range(len(rparser.objects)):
             stream_md5 = hashlib.md5(rparser.objects[i]).hexdigest()
             name = "Unknown object"
             for obj_info in rparser.features.get('objects', []):
                 if stream_md5 == obj_info.get('content_md5'):
                     name = "Object - %s" % obj_info.get('classname', 'Unknown')
             for obj_info in rparser.features.get('datastore', []):
                 if stream_md5 == obj_info.get('content_md5'):
                     name = "Object - %s" % obj_info.get('classname', 'Unknown')
             handle_file(
                 name, 
                 rparser.objects[i],
                 obj.source,
                 related_id=str(obj.id),
                 related_type=str(obj._meta['crits_type']),
                 campaign=obj.campaign,
                 method=self.name,
                 relationship=RelationshipTypes.CONTAINED_WITHIN,
                 user=self.current_task.username,
             )
             added_files.append((stream_md5, stream_md5))
     for f in added_files:
         self._add_result("file_added", f[0], {'md5': f[1]})
コード例 #34
0
 def _upload_md5(self):
     print "[+] calling handle_file with md5_digest"
     handle_file(self.test_filename, data='', source=self.sources[0],
                 md5_digest = self.test_md5)
コード例 #35
0
 def _upload_file(self):
     print "[+] calling handle_file with file data"
     handle_file(self.test_filename, self.test_data, self.sources[0])
コード例 #36
0
ファイル: add_file.py プロジェクト: thelok/crits_services
    def run(self, argv):
        parser = OptionParser()
        parser.add_option("-f", "--file", action="store", dest="filename",
                type="string", help="scanned FILENAME")
        parser.add_option("-s", "--source", action="store",
                dest="source", type="string", help="source")
        parser.add_option("-p", "--parent", action="store", dest="parent",
                type="string", help="parent md5")
        parser.add_option("-P", "--parent-type", action="store", dest="parent_type",
                type="string", default="Sample", help="parent type (Sample, PCAP, ...)")
        parser.add_option("-t", "--trojan", action="store", dest="trojan",
                type="string", help="trojan")
        parser.add_option("-r", "--reference", action="store", dest="reference",
                type="string", help="reference field")
        parser.add_option("-b", "--bucket", action="store", dest="bucket_list",
                type="string", help="bucket list")
        (opts, args) = parser.parse_args(argv)

        md5hash = hashlib.md5()
        if opts.source:
            source = opts.source
        else:
            print "[-] Source required, none provided"
            exit(1)
        try:
            fin = open(opts.filename, 'rb')
            data = fin.read()
            fin.close()
            md5hash.update(data)
            sourcemd5 = md5hash.hexdigest()
            print "[+] Read %d from %s" % (len(data), opts.filename)
        except:
            print "[-] Cannot open %s for reading!" % opts.filename
            return
        if opts.parent:
            parent = opts.parent
        else:
            parent = None
        parent_type = opts.parent_type
        if opts.trojan:
            trojan = opts.trojan
        else:
            trojan = None
        fname = opts.filename
        (dirname, filename) = os.path.split(fname)
        sample = handle_file(
            filename,
            data,
            source,
            opts.reference,
            backdoor=trojan,
            user=self.username,
            parent_md5=parent,
            parent_type=parent_type,
            method="Command line add_file.py",
            bucket_list=opts.bucket_list)
        if sourcemd5 != sample:
            print "[-] Source MD5: %s is not the same as the returned MD5: %s" % (sourcemd5, sample)
            exit(1)
        else:
            print "[+] Added %s (MD5: %s)" % (filename, sample)
コード例 #37
0
ファイル: handlers.py プロジェクト: apolkosnik/crits_services
def chopshop_carver(pcap_md5, options, analyst):
    # Make sure we can find ChopShop
    sc = get_config('ChopShop')
    user = get_user_info(analyst)

    if not sc:
        return {'success': False, 'message': 'Could not find ChopShop service.'}

    shop_path = "%s/shop" % str(sc['basedir'])
    if not os.path.exists(shop_path):
        return {'success': False, 'message': "ChopShop shop path does not exist."}

    sys.path.append(shop_path)
    import ChopLib as CL
    if StrictVersion(str(CL.VERSION)) < StrictVersion('4.0'):
        return {'success': False, 'message': 'Need ChopShop 4.0 or newer'}

    # Until we have an smtp_extractor in ChopShop we have to resort to
    # to (ab)using payloads to dump the entire TCP stream and letting
    # handle_eml() process everything. We also use the payloads module
    # for handling raw carves. If a user wants to do SMTP and raw
    # simultaneously it won't work because we can't distinguish one
    # payloads module from another.
    if options.get('raw', False) and options.get('smtp', False):
        return {'success': False, 'message': "Can not process SMTP and raw simultaneously."}

    # Make sure we have a PCAP to work with
    pcap = PCAP.objects(md5=pcap_md5).first()
    if not pcap:
        return {'success': False, 'message': "No PCAP found."}
    pcap_data = pcap.filedata.read()
    if not pcap_data:
        return {'success': False, 'message': "Could not get PCAP from GridFS: %s" %  pcap_md5}

    source = pcap['source'][0]['name'] # XXX: This kind of sucks...

    # Create module string to pass to ChopShop
    modules = []
    if options.get('http_resp', False) or options.get('http_req', False):
        modules.append("http | http_extractor")

    if options.get('smtp', False) or options.get('raw', False):
        # ChopShop really needs an smtp_extractor, but there's no good
        # capability to do that yet. Maybe one day I'll build one. :)
        # For now, just use payloads and let handle_eml() sort it out.
        #
        # Raw carving works exactly the same way, just post-processed
        # differently.
        modules.append("payloads -b")

    if not modules:
        return {'success': False, 'message': "No modules specified."}

    mod_string = ';'.join(mod for mod in modules)

    from ChopLib import ChopLib
    from ChopUi import ChopUi

    choplib = ChopLib()
    chopui = ChopUi()

    choplib.base_dir = str(sc['basedir'])

    choplib.modules = mod_string

    chopui.jsonout = jsonhandler
    choplib.jsonout = True

    # ChopShop (because of pynids) needs to read a file off disk.
    # Write the pcap data to a temporary file.
    temp_pcap = tempfile.NamedTemporaryFile(delete=False)
    temp_pcap.write(pcap_data)
    temp_pcap.close()

    choplib.filename = temp_pcap.name
    chopui.bind(choplib)
    chopui.start()

    if chopui.jsonclass == None:
        os.unlink(temp_pcap.name)
        chopui.join()
        choplib.finish()
        choplib.join()
        return {'success': False,
                'message': 'Lost race condition in chopui. Try again.'}

    # ChopUI must be started before the jsonhandler class is insantiated.
    # Tell the class what we are looking for now that it exists.
    chopui.jsonclass.parse_options(options)

    choplib.start()

    while chopui.is_alive():
        time.sleep(.1)

    chopui.join()
    choplib.finish()
    choplib.join()

    os.unlink(temp_pcap.name)

    message = ''

    # Grab any carved HTTP bodies.
    for (md5_digest, (name, blob)) in chopui.jsonclass.http_files.items():
        if user.has_access_to(SampleACL.WRITE) and handle_file(name, blob, source, related_md5=pcap_md5, user=user, source_method='ChopShop Filecarver', md5_digest=md5_digest, related_type='PCAP'):
            # Specifically not using name here as I don't want to deal
            # with sanitizing it
            message += "Saved HTTP body: <a href=\"%s\">%s</a><br />" % (reverse('crits-samples-views-detail', args=[md5_digest]), md5_digest)
        else:
            message += "Failed to save file %s." % md5_digest

    # Grab any carved SMTP returns.
    for blob in chopui.jsonclass.smtp_returns.values():
        ret = handle_eml(blob, source, None, analyst, 'ChopShop FileCarver',
                         related_id=pcap.id, related_type='PCAP',
                         relationship_type=RelationshipTypes.RELATED_TO)
        if not ret['status']:
            message += ret['reason']
            continue

        message += "Saved email: <a href=\"%s\">%s</a><br />%i attachment(s)<br />" % (reverse('crits-emails-views-email_detail', args=[ret['object'].id]), ret['object'].id, len(ret['attachments'].keys()))

        for md5_digest in ret['attachments'].keys():
            message += "<a href=\"%s\">%s</a><br />" % (reverse('crits-samples-views-detail', args=[md5_digest]), md5_digest)

    # Handle raw returns.
    for id_, blob in chopui.jsonclass.raw_returns.items():
        if user.has_access_to(SampleACL.WRITE):
            md5_digest = handle_file(id_, blob, source, related_md5=pcap_md5, user=user, source_method='ChopShop Filecarver', related_type='PCAP')
        else:
            md5_digest = None
        if md5_digest:
            message += "Saved raw %s: <a href=\"%s\">%s</a><br />" % (id_, reverse('crits-samples-views-detail', args=[md5_digest]), md5_digest)
        else:
            message += "Failed to save raw %s." % md5_digest

    # It's possible to have no files here if nothing matched.
    # Still return True as there were no problems.
    if not message:
        message = 'No files found.'
    return {'success': True, 'message': message}
コード例 #38
0
    def parse_cybox_object(self, cbx_obj, description='', ind_id=None):
        """
        Parse a CybOX object form a STIX doc. An object can contain
        multiple related_objects, which in turn can have their own
        related_objects, so this handles those recursively.

        :param cbx_obj: The CybOX object to parse.
        :type cbx_obj: A CybOX object.
        :param description: Parent-level (e.g. Observable) description.
        :type description: str
        :param ind_id: The ID of a parent STIX Indicator.
        :type ind_id: str
        """

        # check for missing attributes
        if not cbx_obj or not cbx_obj.properties:
            if cbx_obj.idref:  # just a reference, so nothing to parse
                return
            else:
                cbx_id = getattr(cbx_obj, 'id_', 'None')
                self.failed.append(("No valid object_properties was found!",
                                    "Observable (%s)" % cbx_id,
                                    cbx_id))  # note for display in UI
                return

        # Don't parse if already been parsed
        # This is for artifacts that are related to CybOX File Objects
        if cbx_obj.id_ in self.parsed:
            return

        try:  # try to create CRITs object from Cybox Object
            analyst = self.source_instance.analyst
            item = cbx_obj.properties
            val = cbx_obj.id_
            if isinstance(item, Address) and not ind_id:
                if item.category in ('cidr', 'ipv4-addr', 'ipv4-net',
                                     'ipv4-netmask', 'ipv6-addr', 'ipv6-net',
                                     'ipv6-netmask'):
                    imp_type = "IP"
                    for value in item.address_value.values:
                        val = str(value).strip()
                        if self.preview:
                            res = None
                        else:
                            iptype = get_crits_ip_type(item.category)
                            if iptype:
                                res = ip_add_update(val,
                                                    iptype, [self.source],
                                                    analyst=analyst,
                                                    is_add_indicator=True)
                            else:
                                res = {
                                    'success': False,
                                    'reason': 'No IP Type'
                                }
                        self.parse_res(imp_type, val, cbx_obj, res, ind_id)
            if (not ind_id and
                (isinstance(item, DomainName) or
                 (isinstance(item, URI) and item.type_ == 'Domain Name'))):
                imp_type = "Domain"
                for val in item.value.values:
                    if self.preview:
                        res = None
                    else:
                        res = upsert_domain(str(val), [self.source],
                                            username=analyst)
                    self.parse_res(imp_type, str(val), cbx_obj, res, ind_id)

            elif isinstance(item, HTTPSession):
                imp_type = "RawData"
                val = cbx_obj.id_
                try:
                    c_req = item.http_request_response[0].http_client_request
                    hdr = c_req.http_request_header
                    if hdr.raw_header:
                        data = hdr.raw_header.value
                        title = "HTTP Header from STIX: %s" % self.package.id_
                        method = self.source_instance.method
                        ref = self.source_instance.reference
                        if self.preview:
                            res = None
                            val = title
                        else:
                            res = handle_raw_data_file(data,
                                                       self.source.name,
                                                       user=analyst,
                                                       description=description,
                                                       title=title,
                                                       data_type="HTTP Header",
                                                       tool_name="STIX",
                                                       tool_version=None,
                                                       method=method,
                                                       reference=ref)
                    else:
                        imp_type = "Indicator"
                        ind_type = "HTTP Request Header Fields - User-Agent"
                        val = hdr.parsed_header.user_agent.value
                        val = ','.join(val) if isinstance(val, list) else val
                        if self.preview:
                            res = None
                        else:
                            res = handle_indicator_ind(
                                val,
                                self.source,
                                ind_type,
                                IndicatorThreatTypes.UNKNOWN,
                                IndicatorAttackTypes.UNKNOWN,
                                analyst,
                                add_relationship=True,
                                description=description)
                except:
                    msg = "Unsupported use of 'HTTPSession' object."
                    res = {'success': False, 'reason': msg}

                self.parse_res(imp_type, val, cbx_obj, res, ind_id)
            elif isinstance(item, WhoisEntry):
                # No sure where else to put this
                imp_type = "RawData"
                val = cbx_obj.id_
                if item.remarks:
                    data = item.remarks.value
                    title = "WHOIS Entry from STIX: %s" % self.package.id_
                    if self.preview:
                        res = None
                        val = title
                    else:
                        res = handle_raw_data_file(
                            data,
                            self.source.name,
                            user=analyst,
                            description=description,
                            title=title,
                            data_type="Text",
                            tool_name="WHOIS",
                            tool_version=None,
                            method=self.source_instance.method,
                            reference=self.source_instance.reference)
                else:
                    msg = "Unsupported use of 'WhoisEntry' object."
                    res = {'success': False, 'reason': msg}

                self.parse_res(imp_type, val, cbx_obj, res, ind_id)
            elif isinstance(item, Artifact):
                # Not sure if this is right, and I believe these can be
                # encoded in a couple different ways.
                imp_type = "RawData"
                val = cbx_obj.id_
                rawdata = item.data.decode('utf-8')
                # TODO: find out proper ways to determine title, datatype,
                #       tool_name, tool_version
                title = "Artifact for Event: STIX Document %s" % self.package.id_
                if self.preview:
                    res = None
                    val = title
                else:
                    res = handle_raw_data_file(
                        rawdata,
                        self.source.name,
                        user=analyst,
                        description=description,
                        title=title,
                        data_type="Text",
                        tool_name="STIX",
                        tool_version=None,
                        method=self.source_instance.method,
                        reference=self.source_instance.reference)
                self.parse_res(imp_type, val, cbx_obj, res, ind_id)
            elif (isinstance(item, File) and item.custom_properties
                  and item.custom_properties[0].name == "crits_type"
                  and item.custom_properties[0]._value == "Certificate"):
                imp_type = "Certificate"
                val = str(item.file_name)
                data = None
                if self.preview:
                    res = None
                else:
                    for rel_obj in item.parent.related_objects:
                        if isinstance(rel_obj.properties, Artifact):
                            data = rel_obj.properties.data
                            self.parsed.append(rel_obj.id_)
                    res = handle_cert_file(val,
                                           data,
                                           self.source,
                                           user=analyst,
                                           description=description)
                self.parse_res(imp_type, val, cbx_obj, res, ind_id)
            elif isinstance(item, File) and self.has_network_artifact(item):
                imp_type = "PCAP"
                val = str(item.file_name)
                data = None
                if self.preview:
                    res = None
                else:
                    for rel_obj in item.parent.related_objects:
                        if (isinstance(rel_obj.properties, Artifact)
                                and rel_obj.properties.type_
                                == Artifact.TYPE_NETWORK):
                            data = rel_obj.properties.data
                            self.parsed.append(rel_obj.id_)
                    res = handle_pcap_file(val,
                                           data,
                                           self.source,
                                           user=analyst,
                                           description=description)
                self.parse_res(imp_type, val, cbx_obj, res, ind_id)
            elif isinstance(item, File):
                imp_type = "Sample"
                md5 = item.md5
                if md5:
                    md5 = md5.lower()
                val = str(item.file_name or md5)
                # add sha1/sha256/ssdeep once handle_file supports it
                size = item.size_in_bytes
                data = None
                if item.file_path:
                    path = "File Path: " + str(item.file_path)
                    description += "\n" + path
                for rel_obj in item.parent.related_objects:
                    if (isinstance(rel_obj.properties, Artifact) and
                            rel_obj.properties.type_ == Artifact.TYPE_FILE):
                        data = rel_obj.properties.data
                        self.parsed.append(rel_obj.id_)
                if not md5 and not data and val and val != "None":
                    imp_type = "Indicator"
                    if self.preview:
                        res = None
                    else:
                        res = handle_indicator_ind(
                            val,
                            self.source,
                            "Win File",
                            IndicatorThreatTypes.UNKNOWN,
                            IndicatorAttackTypes.UNKNOWN,
                            analyst,
                            add_domain=True,
                            add_relationship=True,
                            description=description)
                elif md5 or data:
                    if self.preview:
                        res = None
                    else:
                        res = handle_file(val,
                                          data,
                                          self.source,
                                          user=analyst,
                                          md5_digest=md5,
                                          is_return_only_md5=False,
                                          size=size,
                                          description=description)
                else:
                    val = cbx_obj.id_
                    msg = "CybOX 'File' object has no MD5, data, or filename"
                    res = {'success': False, 'reason': msg}
                self.parse_res(imp_type, val, cbx_obj, res, ind_id)
            elif isinstance(item, EmailMessage):
                imp_type = 'Email'
                id_list = []
                data = {}
                val = cbx_obj.id_
                get_attach = False
                data['raw_body'] = str(item.raw_body)
                data['raw_header'] = str(item.raw_header)
                data['helo'] = str(item.email_server)
                if item.header:
                    data['subject'] = str(item.header.subject)
                    if item.header.date:
                        data['date'] = item.header.date.value
                    val = "Date: %s, Subject: %s" % (data.get(
                        'date', 'None'), data['subject'])
                    data['message_id'] = str(item.header.message_id)
                    data['sender'] = str(item.header.sender)
                    data['reply_to'] = str(item.header.reply_to)
                    data['x_originating_ip'] = str(
                        item.header.x_originating_ip)
                    data['x_mailer'] = str(item.header.x_mailer)
                    data['boundary'] = str(item.header.boundary)
                    data['from_address'] = str(item.header.from_)
                    if item.header.to:
                        data['to'] = [str(r) for r in item.header.to.to_list()]

                if data.get('date'):  # Email TLOs must have a date
                    data['source'] = self.source.name
                    data['source_method'] = self.source_instance.method
                    data['source_reference'] = self.source_instance.reference
                    if self.preview:
                        res = None
                    else:
                        res = handle_email_fields(data, analyst, "STIX")
                    self.parse_res(imp_type, val, cbx_obj, res, ind_id)
                    if not self.preview and res.get('status'):
                        id_list.append(cbx_obj.id_)  # save ID for atchmnt rels
                        get_attach = True
                else:  # Can't be an Email TLO, so save fields
                    for x, key in enumerate(data):
                        if data[key] and data[key] != "None":
                            if key in ('raw_header', 'raw_body'):
                                if key == 'raw_header':
                                    title = "Email Header from STIX Email: %s"
                                    d_type = "Email Header"
                                else:
                                    title = "Email Body from STIX Email: %s"
                                    d_type = "Email Body"
                                imp_type = 'RawData'
                                title = title % cbx_obj.id_
                                if self.preview:
                                    res = None
                                else:
                                    res = handle_raw_data_file(
                                        data[key], self.source, analyst,
                                        description, title, d_type, "STIX",
                                        self.stix_version)
                                self.parse_res(imp_type, title, cbx_obj, res,
                                               ind_id)
                            elif key == 'to':
                                imp_type = 'Target'
                                for y, addr in enumerate(data[key]):
                                    tgt_dict = {'email_address': addr}
                                    if self.preview:
                                        res = None
                                    else:
                                        res = upsert_target(tgt_dict, analyst)
                                        if res['success']:
                                            get_attach = True
                                    tmp_obj = copy(cbx_obj)
                                    tmp_obj.id_ = '%s-%s-%s' % (cbx_obj.id_, x,
                                                                y)
                                    self.parse_res(imp_type, addr, tmp_obj,
                                                   res, ind_id)
                                    self.ind2obj.setdefault(
                                        cbx_obj.id_, []).append(tmp_obj.id_)
                                    id_list.append(tmp_obj.id_)
                            else:
                                imp_type = 'Indicator'
                                if key in ('sender', 'reply_to',
                                           'from_address'):
                                    ind_type = "Address - e-mail"
                                elif 'ip' in key:
                                    ind_type = "Address - ipv4-addr"
                                elif key == 'raw_body':
                                    ind_type = "Email Message"
                                else:
                                    ind_type = "String"
                                if self.preview:
                                    res = None
                                else:
                                    res = handle_indicator_ind(
                                        data[key],
                                        self.source,
                                        ind_type,
                                        IndicatorThreatTypes.UNKNOWN,
                                        IndicatorAttackTypes.UNKNOWN,
                                        analyst,
                                        add_domain=True,
                                        add_relationship=True,
                                        description=description)
                                    if res['success']:
                                        get_attach = True
                                tmp_obj = copy(cbx_obj)
                                tmp_obj.id_ = '%s-%s' % (cbx_obj.id_, x)
                                self.parse_res(imp_type, data[key], tmp_obj,
                                               res, ind_id)
                                self.ind2obj.setdefault(cbx_obj.id_,
                                                        []).append(tmp_obj.id_)
                                id_list.append(tmp_obj.id_)

                if not self.preview:
                    # Setup relationships between all Email attributes
                    for oid in id_list:
                        for oid2 in id_list:
                            if oid != oid2:
                                self.relationships.append(
                                    (oid, RelationshipTypes.RELATED_TO, oid2,
                                     "High"))

                    # Should check for attachments and add them here.
                    if get_attach and item.attachments:
                        for attach in item.attachments:
                            rel_id = attach.to_dict()['object_reference']
                            for oid in id_list:
                                self.relationships.append(
                                    (oid, RelationshipTypes.CONTAINS, rel_id,
                                     "High"))

            else:  # try to parse all other possibilities as Indicator
                imp_type = "Indicator"
                val = cbx_obj.id_
                c_obj = make_crits_object(item)

                # Ignore what was already caught above
                if (ind_id or c_obj.object_type not in IPTypes.values()):
                    ind_type = c_obj.object_type
                    for val in [str(v).strip() for v in c_obj.value if v]:
                        if ind_type:
                            # handle domains mislabeled as URLs
                            if c_obj.object_type == 'URI' and '/' not in val:
                                ind_type = "Domain"

                            if self.preview:
                                res = None
                            else:
                                res = handle_indicator_ind(
                                    val,
                                    self.source,
                                    ind_type,
                                    IndicatorThreatTypes.UNKNOWN,
                                    IndicatorAttackTypes.UNKNOWN,
                                    analyst,
                                    add_domain=True,
                                    add_relationship=True,
                                    description=description)
                            self.parse_res(imp_type, val, cbx_obj, res, ind_id)

        except Exception, e:  # probably caused by cybox object we don't handle
            self.failed.append((e.message, "%s (%s)" % (imp_type, val),
                                cbx_obj.id_))  # note for display in UI
コード例 #39
0
    def finish_task(self, task):
        """
        Finish a task.
        """

        logger.debug("Finishing task %s" % task)
        self.update_task(task)

        obj = class_from_type(task.context.crits_type)
        query = self.get_db_query(task.context)

        sample = obj.objects(__raw__=query).first()

        if task.files:
            logger.debug("Adding samples")
            for f in task.files:
                logger.debug("Adding %s" % f['filename'])
                #TODO: add in backdoor?, user
                from crits.samples.handlers import handle_file
                handle_file(f['filename'], f['data'], sample.source,
                            parent_md5=task.context.identifier,
                            campaign=sample.campaign,
                            method=task.service.name,
                            relationship=f['relationship'],
                            user=task.context.username,
                            )
        else:
            logger.debug("No samples to add.")

        if task.certificates:
            logger.debug("Adding certificates")

            for f in task.certificates:
                logger.debug("Adding %s" % f['filename'])
                from crits.certificates.handlers import handle_cert_file
                # XXX: Add campaign from source?
                handle_cert_file(f['filename'], f['data'], sample.source,
                            parent_md5=task.context.identifier,
                            parent_type=task.context.crits_type,
                            method=task.service.name,
                            relationship=f['relationship'],
                            user=task.context.username,
                            )
        else:
            logger.debug("No certificates to add.")

        if task.pcaps:
            logger.debug("Adding PCAPs")

            for f in task.pcaps:
                logger.debug("Adding %s" % f['filename'])
                from crits.pcaps.handlers import handle_pcap_file
                # XXX: Add campaign from source?
                handle_pcap_file(f['filename'], f['data'], sample.source,
                            parent_md5=task.context.identifier,
                            parent_type=task.context.crits_type,
                            method=task.service.name,
                            relationship=f['relationship'],
                            user=task.context.username,
                            )
        else:
            logger.debug("No PCAPs to add.")
コード例 #40
0
ファイル: __init__.py プロジェクト: cvdsouza/crits_services
    def run(self, obj, config):
        key = config.get('vt_api_key', '')
        url = config.get('vt_download_url', '')
        sizeLimit = config.get('size_limit', '')
        replace = config.get('replace_sample', False)
        do_triage = config.get('run_triage', False)

        user = self.current_task.user
        sample = Sample.objects(md5=obj.md5).first()
        if not sample:
            sample = Sample()
            sample.md5 = md5_digest
        self._info("Checking if binary already exists in CRITs.")
        sample.discover_binary()

        if sample.filedata and replace == False: #if we already have this binary and don't have permission to replace
            self._info("CRITs already has this binary. Enable the 'Replace' option to overwrite with data from VirusTotal.")
            self._add_result("Download Canceled", "Binary already exists in CRITs.")
            return

        if not user.has_access_to(SampleACL.WRITE):
            self._info("User does not have permission to add Samples to CRITs")
            self._add_result("Download Canceled", "User does not have permission to add Samples to CRITs")
            return

        parameters = urllib.urlencode({"hash": obj.md5, "apikey": key})
        if settings.HTTP_PROXY:
            proxy = urllib2.ProxyHandler({'http': settings.HTTP_PROXY, 'https': settings.HTTP_PROXY})
            opener = urllib2.build_opener(proxy)
            urllib2.install_opener(opener)

        try:
            req = url + "?" + parameters
            self._info("Requesting binary with md5 '{0}' from VirusTotal.".format(obj.md5))
            request = urllib2.Request(req)
            response = urllib2.urlopen(request)
            size = response.info().getheaders("Content-Length")[0]
            self._info("Binary size: {0} bytes".format(size))

            if int(size) > sizeLimit: # Check if within size limit
                self._error("Binary size is {0} bytes, which is greater than maximum of {1} bytes. This limit can be changed in options.".format(size, sizeLimit))
                self._add_result("Download Aborted", "Match found, but binary is larger than maximum size limit.")
                return

            data = response.read()
        except urllib2.HTTPError as e:
            if e.code == 404:
                self._info("No results were returned. Either VirusTotal does not have the requested binary, or the request URL is incorrect.")
                self._add_result("Not Found", "Binary was not found in the VirusTotal database")
            elif e.code == 403:
                self._error("Download forbidden. {0}".format(e))
                self._add_result("Download Canceled", "CRITs was forbidden from downloading the binary.")
            else:
                self._error("An HTTP Error occurred: {0}".format(e))
        except Exception as e:
            logger.error("VirusTotal: Failed connection ({0})".format(e))
            self._error("Failed to get data from VirusTotal: {0}".format(e))
            return

        if data: # Retrieved some data from VT
            if replace == True:
                try:
                    self._info("Replace = True. Deleting any previous binary with md5 {0}".format(obj.md5))
                    sample.filedata.delete()
                except Exception as e:
                    logger.error("VirusTotal: Error deleting existing binary ({0})".format(e))
                    self._error("Failed to delete existing binary")
            self._info("Adding new binary to CRITs.")

            try:
                handle_file(filename = obj.md5,
                            data = data,
                            source = "VirusTotal",
                            reference = "Binary downloaded from VT based on MD5",
                            user = "******",
                            method = "VirusTotal Download Service",
                            md5_digest = obj.md5 )
            except Exception as e:
                logger.error("VirusTotal: Sample creation failed ({0})".format(e))
                self._error("Failed to create new Sample: {0}".format(e))
                return
            if do_triage:
                self._info("Running sample triage for data-reliant services.")
                sample.reload()
                run_triage(sample, user = "******")
            self._add_result("Download Successful", "Binary was successfully downloaded from VirusTotal")
        else:
            self._error("No data returned by VirusTotal.")
コード例 #41
0
    def parse_observables(self, observables):
        """
        Parse list of observables in STIX doc.

        :param observables: List of STIX observables.
        :type observables: List of STIX observables.
        """

        analyst = self.source_instance.analyst

        for obs in observables:  # for each STIX observable

            if obs.observable_composition:
                object_list = obs.observable_composition.observables
            else:
                object_list = [obs]

            for obs_comp in object_list:

                if not obs_comp.object_ or not obs_comp.object_.properties:
                    self.failed.append(
                        ("No valid object_properties was found!",
                         type(obs_comp).__name__,
                         obs_comp.id_))  # note for display in UI
                    continue
                try:  # try to create CRITs object from observable
                    item = obs_comp.object_.properties

                    if isinstance(item, Address):
                        if item.category in ('cidr', 'ipv4-addr', 'ipv4-net',
                                             'ipv4-netmask', 'ipv6-addr',
                                             'ipv6-net', 'ipv6-netmask',
                                             'ipv6-subnet'):
                            imp_type = "IP"
                            for value in item.address_value.values:
                                ip = str(value).strip()
                                iptype = get_crits_ip_type(item.category)
                                if iptype:
                                    res = ip_add_update(ip,
                                                        iptype, [self.source],
                                                        analyst=analyst,
                                                        id=self.package.id_)
                                    self.parse_res(imp_type, obs, res)
                    if isinstance(item, DomainName):
                        imp_type = "Domain"
                        for value in item.value.values:
                            res = upsert_domain(str(value), [self.source],
                                                username=analyst,
                                                id=self.package.id_)
                            self.parse_res(imp_type, obs, res)
                    elif isinstance(item, Artifact):
                        # Not sure if this is right, and I believe these can be
                        # encoded in a couple different ways.
                        imp_type = "RawData"
                        rawdata = item.data.decode('utf-8')
                        description = "None"
                        # TODO: find out proper ways to determine title, datatype,
                        #       tool_name, tool_version
                        title = "Artifact for Event: STIX Document %s" % self.package.id_
                        res = handle_raw_data_file(
                            rawdata,
                            self.source.name,
                            user=analyst,
                            description=description,
                            title=title,
                            data_type="Text",
                            tool_name="STIX",
                            tool_version=None,
                            method=self.source_instance.method,
                            reference=self.source_instance.reference)
                        self.parse_res(imp_type, obs, res)
                    elif (isinstance(item, File) and item.custom_properties
                          and item.custom_properties[0].name == "crits_type"
                          and item.custom_properties[0]._value
                          == "Certificate"):
                        imp_type = "Certificate"
                        description = "None"
                        filename = str(item.file_name)
                        data = None
                        for obj in item.parent.related_objects:
                            if isinstance(obj.properties, Artifact):
                                data = obj.properties.data
                        res = handle_cert_file(filename,
                                               data,
                                               self.source,
                                               user=analyst,
                                               description=description)
                        self.parse_res(imp_type, obs, res)
                    elif isinstance(item,
                                    File) and self.has_network_artifact(item):
                        imp_type = "PCAP"
                        description = "None"
                        filename = str(item.file_name)
                        data = None
                        for obj in item.parent.related_objects:
                            if (isinstance(obj.properties, Artifact)
                                    and obj.properties.type_
                                    == Artifact.TYPE_NETWORK):
                                data = obj.properties.data
                        res = handle_pcap_file(filename,
                                               data,
                                               self.source,
                                               user=analyst,
                                               description=description)
                        self.parse_res(imp_type, obs, res)
                    elif isinstance(item, File):
                        imp_type = "Sample"
                        filename = str(item.file_name)
                        md5 = item.md5
                        data = None
                        for obj in item.parent.related_objects:
                            if (isinstance(obj.properties, Artifact)
                                    and obj.properties.type_
                                    == Artifact.TYPE_FILE):
                                data = obj.properties.data
                        res = handle_file(filename,
                                          data,
                                          self.source,
                                          user=analyst,
                                          md5_digest=md5,
                                          is_return_only_md5=False,
                                          id=self.package.id_)
                        self.parse_res(imp_type, obs, res)
                        if item.extracted_features:
                            self.parse_filenames(item.extracted_features,
                                                 res['object'].id)
                    elif isinstance(item, EmailMessage):

                        imp_type = "Email"
                        data = {}
                        data['source'] = self.source.name
                        data['source_method'] = self.source_instance.method
                        data[
                            'source_reference'] = self.source_instance.reference
                        data['raw_body'] = str(item.raw_body)
                        data['raw_header'] = str(item.raw_header)
                        data['helo'] = str(item.email_server)
                        if item.header:
                            data['message_id'] = str(item.header.message_id)
                            data['subject'] = str(item.header.subject)
                            data['sender'] = str(item.header.sender)
                            data['reply_to'] = str(item.header.reply_to)
                            data['x_originating_ip'] = str(
                                item.header.x_originating_ip)
                            data['x_mailer'] = str(item.header.x_mailer)
                            data['boundary'] = str(item.header.boundary)
                            data['from_address'] = str(item.header.from_)
                            data['date'] = item.header.date.value
                            if item.header.to:
                                data['to'] = [str(r) for r in item.header.to]
                            if item.header.cc:
                                data['cc'] = [str(r) for r in item.header.cc]
                        res = handle_email_fields(data,
                                                  analyst,
                                                  "STIX",
                                                  id=self.package.id_)

                        # Should check for attachments and add them here.
                        self.parse_res(imp_type, obs, res)

                        if res.get('status') and item.attachments:
                            for attach in item.attachments:
                                rel_id = attach.to_dict()['object_reference']
                                self.relationships.append(
                                    (obs.id_, "Contains", rel_id, "High"))
                    else:  # try to parse all other possibilities as Indicator
                        imp_type = "Indicator"
                        obj = make_crits_object(item)
                        if obj.object_type == 'Address':
                            # This was already caught above
                            continue
                        else:
                            ind_type = obj.object_type
                            for value in obj.value:
                                if value and ind_type:
                                    res = handle_indicator_ind(
                                        value.strip(),
                                        self.source,
                                        ind_type,
                                        IndicatorThreatTypes.UNKNOWN,
                                        IndicatorAttackTypes.UNKNOWN,
                                        analyst,
                                        add_domain=True,
                                        add_relationship=True)
                                    self.parse_res(imp_type, obs, res)
                except Exception, e:  # probably caused by cybox object we don't handle
                    self.failed.append(
                        (e.message, type(item).__name__,
                         item.parent.id_))  # note for display in UI
コード例 #42
0
ファイル: add_file.py プロジェクト: sysnap2012/crits_services
    def run(self, argv):
        parser = OptionParser()
        parser.add_option("-f", "--file", action="store", dest="filename",
                type="string", help="scanned FILENAME")
        parser.add_option("-s", "--source", action="store",
                dest="source", type="string", help="source")
        parser.add_option("-p", "--parent", action="store", dest="parent_md5",
                type="string", help="parent MD5")
        parser.add_option("-i", "--parent-id", action="store", dest="parent_id",
                type="string", help="parent ID")
        parser.add_option("-P", "--parent-type", action="store", dest="parent_type",
                type="string", default="Sample", help="parent type (Sample, PCAP, ...)")
        parser.add_option("-t", "--trojan", action="store", dest="trojan",
                type="string", help="trojan")
        parser.add_option("-r", "--reference", action="store", dest="reference",
                type="string", help="reference field")
        parser.add_option("-b", "--bucket", action="store", dest="bucket_list",
                type="string", help="bucket list")
        parser.add_option("-T", "--tlp", action="store", dest="tlp",
                type="string", default="red", help="TLP of data")

        (opts, args) = parser.parse_args(argv)

        md5hash = hashlib.md5()
        if opts.source:
            source = opts.source
        else:
            print "[-] Source required, none provided"
            return
        if opts.parent_md5 and opts.parent_id:
            print "[-] Specify one of -p or -i!"
            return
        if not user.has_access_to(SampleACL.WRITE):
            print "[-] User does not have permission to add Samples."
            return
        if not opts.tlp or opts.tlp not in ['red', 'amber', 'green', 'white']:
            opts.tlp = 'red'

        try:
            fin = open(opts.filename, 'rb')
            data = fin.read()
            fin.close()
            md5hash.update(data)
            sourcemd5 = md5hash.hexdigest()
            print "[+] Read %d from %s" % (len(data), opts.filename)
        except:
            print "[-] Cannot open %s for reading!" % opts.filename
            return
        if opts.parent_md5:
            parent_md5 = opts.parent_md5
        else:
            parent_md5 = None
        if opts.parent_id:
            parent_id = opts.parent_id
        else:
            parent_id = None
        parent_type = opts.parent_type
        if opts.trojan:
            trojan = opts.trojan
        else:
            trojan = None
        fname = opts.filename
        (dirname, filename) = os.path.split(fname)
        sample = handle_file(
            filename,
            data,
            source,
            source_reference=opts.reference,
            source_tlp=opts.tlp,
            backdoor=trojan,
            user=self.user,
            related_md5=parent_md5,
            related_id=parent_id,
            related_type=parent_type,
            source_method="Command line add_file.py",
            bucket_list=opts.bucket_list)
        if sourcemd5 != sample:
            print "[-] Source MD5: %s is not the same as the returned MD5: %s" % (sourcemd5, sample)
        else:
            print "[+] Added %s (MD5: %s)" % (filename, sample)