Exemplo n.º 1
0
Arquivo: web.py Projeto: blaquee/viper
def url_download():
    url = request.forms.get('url')
    tags = request.forms.get('tag_list')
    tags = "url,"+tags
    if request.forms.get("tor"):
        upload = network.download(url,tor=True)
    else:
        upload = network.download(url,tor=False)
    if upload == None:
        return template('error.tpl', error="server can't download from URL")
    # Set Project
    project = 'Main'
    db = Database()
    tf = tempfile.NamedTemporaryFile()
    tf.write(upload)
    if tf == None:
        return template('error.tpl', error="server can't download from URL")
    tf.flush()
    tf_obj = File(tf.name)
    tf_obj.name = tf_obj.sha256
    new_path = store_sample(tf_obj)
    success = False
    if new_path:
        # Add file to the database.
        success = db.add(obj=tf_obj, tags=tags)

    if success:
        #redirect("/project/{0}".format(project))
        redirect("/file/Main/"+tf_obj.sha256)
    else:
        return template('error.tpl', error="Unable to Store The File,already in database")
Exemplo n.º 2
0
def url_download():
    url = request.forms.get('url')
    tags = request.forms.get('tag_list')
    tags = "url," + tags
    if request.forms.get("tor"):
        upload = network.download(url, tor=True)
    else:
        upload = network.download(url, tor=False)
    if upload == None:
        return template('error.tpl', error="server can't download from URL")
    # Set Project
    project = 'Main'
    db = Database()
    tf = tempfile.NamedTemporaryFile()
    tf.write(upload)
    if tf == None:
        return template('error.tpl', error="server can't download from URL")
    tf.flush()
    tf_obj = File(tf.name)
    tf_obj.name = tf_obj.sha256
    new_path = store_sample(tf_obj)
    success = False
    if new_path:
        # Add file to the database.
        success = db.add(obj=tf_obj, tags=tags)

    if success:
        #redirect("/project/{0}".format(project))
        redirect("/file/Main/" + tf_obj.sha256)
    else:
        return template('error.tpl',
                        error="Unable to Store The File,already in database")
Exemplo n.º 3
0
    def run(self):
        super(Strings, self).run()

        if self.args is None:
            return

        if not (self.args.all or self.args.files or self.args.hosts
                or self.args.network or self.args.interesting):
            self.log('error', 'At least one of the parameters is required')
            self.usage()
            return

        if self.args.scan:
            db = Database()
            samples = db.find(key='all')
            for sample in samples:
                sample_path = get_sample_path(sample.sha256)
                strings = self.get_strings(File(sample_path))
                self.process_strings(strings, sample.name)
        else:
            if not __sessions__.is_set():
                self.log('error', "No open session")
                return
            if os.path.exists(__sessions__.current.file.path):
                strings = self.get_strings(__sessions__.current.file)
                self.process_strings(strings)
Exemplo n.º 4
0
def get_file(file_hash):
    key = ''
    if len(file_hash) == 32:
        key = 'md5'
    elif len(file_hash) == 64:
        key = 'sha256'
    else:
        response.code = 400
        return jsonize({'message':'Invalid hash format (use md5 or sha256)'})

    db = Database()
    rows = db.find(key=key, value=file_hash)

    if not rows:
        response.code = 404
        return jsonize({'message':'File not found in the database'})

    path = get_sample_path(rows[0].sha256)
    if not path:
        response.code = 404
        return jsonize({'message':'File not found in the repository'})

    response.content_length = os.path.getsize(path)
    response.content_type = 'application/octet-stream; charset=UTF-8'
    data = ''
    for chunk in File(path).get_chunks():
        data += chunk

    return data
Exemplo n.º 5
0
def add_file(file_path, name=None, tags=None, parent=None):
    obj = File(file_path)
    new_path = store_sample(obj, __project__)
    print(new_path)

    if not name:
        name = os.path.basename(file_path)

    # success = True
    if new_path:
        # Add file to the database.
        try:
            db = Database()
            db.add(obj=obj, name=name, tags=tags, parent_sha=parent)
        except Exception as e:
            log.error("Exception while adding sample to DB: {str(e)}")
            # Removing stored file since DB write failed
            remove_sample(new_path)
            return None

        # AutoRun Modules
        if cfg.autorun.enabled:
            autorun_module(obj.sha256)
        # Close the open session to keep the session table clean
        __sessions__.close()
        return obj.sha256
    else:
        log.info("File already exists in database")
        return None
Exemplo n.º 6
0
    def new(self, path):
        session = Session()

        total = len(self.sessions)
        session.id = total + 1

        # Open a section on the given file.
        session.file = File(path)

        # Try to lookup the file in the database. If it is already present
        # we get file name and 
        row = Database().find(key='sha256', value=session.file.sha256)
        if row:
            session.file.name = row[0].name
            session.file.tags = ', '.join(tag.to_dict()['tag'] for tag in row[0].tag)

        # Loop through all existing sessions and check whether there's another
        # session open on the same file and delete it. This is to avoid
        # duplicates in sessions.
        # NOTE: in the future we might want to remove this if sessions have
        # unique attributes (for example, an history just for each of them).
        for entry in self.sessions:
            if entry.file.sha256 == session.file.sha256:
                self.sessions.remove(entry)

        # Add new session to the list.
        self.sessions.append(session)
        # Mark the new session as the current one.
        self.current = session

        print_info("Session opened on {0}".format(path))
Exemplo n.º 7
0
    def get_strings_base(self, f, min=4, rabin_extract=False, xor_key=None):
        '''
        A wrapper class to handle XORing of the file and which string extraction
        method to usee.
        -x does not work when using -r (rabin) to extract the strings.
        '''
        if rabin_extract:
            strings = self.get_strings_r2(f, min, xor_key)
        else:
            if xor_key:
                decoded = self.xordata(File(f), self.args.xor)
                strings = self.get_strings(decoded, min)
            else:
                strings = self.get_strings(File(f), min)

        return strings
Exemplo n.º 8
0
def file_download(file_hash, project=False):
    if project in project_list():
        __project__.open(project)
    else:
        __project__.open('../')
        project = 'Main'
    # Open the Database
    db = Database()
    # Open a session
    rows = db.find(key='sha256', value=file_hash)
    if not rows:
        return template(
            'error.tpl',
            error="{0} Does not match any hash in the Database".format(
                file_hash))

    path = get_sample_path(rows[0].sha256)
    if not path:
        return template('error.tpl', error="File not found on disk")

    response.content_length = os.path.getsize(path)
    response.content_type = 'application/octet-stream; charset=UTF-8'
    data = ''
    for chunk in File(path).get_chunks():
        data += chunk
    return data
Exemplo n.º 9
0
    def new(self, path=None, misp_event=None):
        if not path and not misp_event:
            print_error("You have to open a session on a path or on a misp event.")
            return

        session = Session()

        total = len(self.sessions)
        session.id = total + 1

        if path:
            if self.is_set() and misp_event is None and self.current.misp_event:
                session.misp_event = self.current.misp_event

            # Open a section on the given file.
            session.file = File(path)
            # Try to lookup the file in the database. If it is already present
            # we get its database ID, file name, and tags.
            row = Database().find(key='sha256', value=session.file.sha256)
            if row:
                session.file.id = row[0].id
                session.file.name = row[0].name
                session.file.tags = ', '.join(tag.to_dict()['tag'] for tag in row[0].tag)

                if row[0].parent:
                    session.file.parent = '{0} - {1}'.format(row[0].parent.name, row[0].parent.sha256)
                session.file.children = Database().get_children(row[0].id)

            print_info("Session opened on {0}".format(path))

        if misp_event:
            if self.is_set() and path is None and self.current.file:
                session.file = self.current.file
            refresh = False
            if (self.current is not None and self.current.misp_event is not None and
                    self.current.misp_event.event.id is not None and
                    self.current.misp_event.event.id == misp_event.event.id):
                refresh = True
            session.misp_event = misp_event
            if refresh:
                print_info("Session on MISP event {0} refreshed.".format(misp_event.event.id))
            elif not misp_event.event.id:
                print_info("Session opened on a new local MISP event.")
            else:
                print_info("Session opened on MISP event {0}.".format(misp_event.event.id))

        if session.file:
            # Loop through all existing sessions and check whether there's another
            # session open on the same file and delete it. This is to avoid
            # duplicates in sessions.
            # NOTE: in the future we might want to remove this if sessions have
            # unique attributes (for example, an history just for each of them).
            for entry in self.sessions:
                if entry.file and entry.file.sha256 == session.file.sha256:
                    self.sessions.remove(entry)

        # Add new session to the list.
        self.sessions.append(session)
        # Mark the new session as the current one.
        self.current = session
Exemplo n.º 10
0
    def parse_message(self, message_folder):
        db = Database()
        email_header = os.path.join(message_folder, 'InternetHeaders.txt')
        email_body = os.path.join(message_folder, 'Message.txt')

        envelope = headers = email_text = ''
        if os.path.exists(email_header):
            envelope, headers = self.email_headers(email_header)
        if os.path.exists(email_body):
            email_text = open(email_body, 'rb').read()

        tags = 'pst, {0}'.format(message_folder)
        if os.path.exists(os.path.join(message_folder, 'Attachments')):
            for filename in os.listdir(
                    os.path.join(message_folder, 'Attachments')):
                if os.path.isfile(
                        os.path.join(message_folder, 'Attachments', filename)):
                    obj = File(
                        os.path.join(message_folder, 'Attachments', filename))
                    sha256 = hashlib.sha256(
                        open(
                            os.path.join(message_folder, 'Attachments',
                                         filename), 'rb').read()).hexdigest()
                    new_path = store_sample(obj)
                    if new_path:
                        # Add file to the database.
                        db.add(obj=obj, tags=tags)
                    # Add Email Details as a Note
                    # To handle duplicates we use multiple notes
                    headers_body = 'Envelope: \n{0}\nHeaders: \n{1}\n'.format(
                        envelope, headers)
                    db.add_note(sha256, 'Headers', headers_body)

                    # Add a note with email body
                    db.add_note(sha256, 'Email Body', string_clean(email_text))
Exemplo n.º 11
0
def add_file():
    tags = request.forms.get('tags')
    upload = request.files.get('file')

    tf = tempfile.NamedTemporaryFile()
    tf.write(upload.file.read())
    tf.flush()
    
    # Added to process zip files
    if request.headers.get('compression') == 'zip' or request.headers.get('compression') == 'ZIP':
        with upload_temp() as temp_dir:
            with ZipFile(tf.name) as zf:
                zf.extractall(temp_dir, pwd=request.headers.get('compression_password'))

            stored_files = []
   
            for root, dirs, files in os.walk(temp_dir, topdown=False):
                for name in files:
                    if not name == upload.filename:
                        tf_obj=File(os.path.join(root,name))
                        new_path = store_sample(tf_obj)
                        success = False
                        
                        if new_path:
                            success = db.add(obj=tf_obj, tags=tags)
                       
                        if success:
                            stored_files.append(name)

            if stored_files:
                return jsonize({'message': 'Files added: %s' % ','.join(stored_files)})
    else:
        tf_obj = File(tf.name)
        tf_obj.name = upload.filename

        new_path = store_sample(tf_obj)

        success = False
        if new_path:
            # Add file to the database.
            success = db.add(obj=tf_obj, tags=tags)

        if success:
            return jsonize({'message' : 'added'})
        else:
            response.status = 500
            return jsonize({'message':'Unable to store file'})
Exemplo n.º 12
0
def main():
    print_warning(
        "WARNING: If you proceed you will lose any changes you might have made to Viper."
    )
    choice = raw_input("Are you sure you want to proceed? [y/N] ")

    if choice.lower() != 'y':
        return

    # Download the latest Zip archive from GitHub's master branch.
    master = download(url)
    # Instantiate a StringIO, we will store the master.zip data in here.
    zip_data = StringIO()
    zip_data.write(master)
    # Initialize the Zip archive.
    zip_file = ZipFile(zip_data, 'r')
    # Obtain a list of all the files contained in the master.zip archive.
    names = zip_file.namelist()

    # Loop through all file and directories in master.zip.
    for name in names[1:]:
        # Split the path in parts.
        name_parts = path_split_all(name)
        # We strip the base directory, which is generated by GitHub in the
        # master.zip archive as {project}-{branch}.
        local_file_path = os.path.join(*name_parts[1:])
        # Skip if the entry is a directory.
        if os.path.isdir(local_file_path):
            continue

        # Read the data of the current file.
        name_data = zip_file.read(name)
        # Calculate MD5 hash of the new file.
        name_data_md5 = hashlib.md5(name_data).hexdigest()

        # If the file already exists locally, we check if its MD5 hash
        # matches the one of the newly downloaded copy. If it does, we
        # obviously skip it.
        exists = False
        if os.path.exists(local_file_path):
            exists = True
            if File(local_file_path).md5 == name_data_md5:
                print_info("{0} up-to-date".format(local_file_path))
                continue

        # Open the local file, whether it exists or not, and either
        # rewrite or write the new content.
        new_local = open(local_file_path, 'w')
        new_local.write(name_data)
        new_local.close()

        if exists:
            print_success("File {0} has been updated".format(local_file_path))
        else:
            print_success(
                "New file {0} has been created".format(local_file_path))

    zip_file.close()
    zip_data.close()
Exemplo n.º 13
0
 def add_file(self, file_path, tags, parent):
     obj = File(file_path)
     new_path = store_sample(obj)
     if new_path:
         # Add file to the database.
         db = Database()
         db.add(obj=obj, tags=tags, parent_sha=parent)
         return obj.sha256
Exemplo n.º 14
0
def _add_file(file_path, name, tags, parent_sha):
    obj = File(file_path)
    new_path = store_sample(obj)
    if new_path:
        db = Database()
        db.add(obj=obj, name=name, tags=tags, parent_sha=parent_sha)
        return obj.sha256
    else:
        return None
Exemplo n.º 15
0
    def test_init_unicode(self, capsys, filename, name):
        instance = File(os.path.join(FIXTURE_DIR, filename))

        assert isinstance(instance, File)
        assert instance.path == os.path.join(FIXTURE_DIR, filename)
        assert instance.name == name

        out, err = capsys.readouterr()
        assert out == ""
Exemplo n.º 16
0
    def _process_uploaded(db, uploaded_file_path, file_name, tag_list=None, note_title=None, note_body=None):
        """_process_uploaded add one uploaded file to database and to storage then remove uploaded file"""

        log.debug("adding: {} as {}".format(uploaded_file_path, file_name))

        malware = File(uploaded_file_path)
        malware.name = file_name

        if get_sample_path(malware.sha256):
            error = {"error": {"code": "DuplicateFileHash",
                               "message": "File hash exists already: {} (sha256: {})".format(malware.name, malware.sha256)}}
            log.error("adding failed: {}".format(error))
            raise ValidationError(detail=error)  # TODO(frennkie) raise more specific error?! so that we can catch it..?!
        # Try to store file object into database
        if db.add(obj=malware, tags=tag_list):
            # If succeeds, store also in the local repository.
            # If something fails in the database (for example unicode strings)
            # we don't want to have the binary lying in the repository with no
            # associated database record.
            malware_stored_path = store_sample(malware)

            # run autoruns on the stored sample
            if cfg.get('autorun').enabled:
                autorun_module(malware.sha256)

            log.debug("added file \"{0}\" to {1}".format(malware.name, malware_stored_path))

            if note_body and note_title:
                db.add_note(malware.sha256, note_title, note_body)
                log.debug("added note: \"{0}\"".format(note_title))

        else:
            error = {"error": {"code": "DatabaseAddFailed",
                               "message": "Adding File to Database failed: {} (sha256: {})".format(malware.name, malware.sha256)}}
            log.error("adding failed: {}".format(error))
            raise ValidationError(detail=error)

        # clean up
        try:
            os.remove(uploaded_file_path)
        except OSError as err:
            log.error("failed to delete temporary file: {}".format(err))

        return malware
Exemplo n.º 17
0
Arquivo: api.py Projeto: RATBORG/viper
def add_file():
    tags = request.forms.get('tags')
    upload = request.files.get('file')

    tf = tempfile.NamedTemporaryFile()
    tf.write(upload.file.read())
    tf_obj = File(tf.name)
    tf_obj.name = upload.filename

    new_path = store_sample(tf_obj)

    success = False
    if new_path:
        # Add file to the database.
        success = db.add(obj=tf_obj, tags=tags)

    if success:
        return jsonize({'message' : 'added'})
    else:
        return HTTPError(500, 'Unable to store file')
Exemplo n.º 18
0
def add_file():
    tags = request.forms.get('tags')
    upload = request.files.get('file')

    tf = tempfile.NamedTemporaryFile()
    tf.write(upload.file.read())
    tf.flush()
    tf_obj = File(tf.name)
    tf_obj.name = upload.filename

    new_path = store_sample(tf_obj)

    success = False
    if new_path:
        # Add file to the database.
        success = db.add(obj=tf_obj, tags=tags)

    if success:
        return jsonize({'message': 'added'})
    else:
        return HTTPError(500, 'Unable to store file')
Exemplo n.º 19
0
def add_file():
    tags = request.forms.get('tag_list')
    uploads = request.files.getlist('file')

    # Set Project
    project = request.forms.get('project')
    if project in project_list():
        __project__.open(project)
    else:
        __project__.open('../')
        project = 'Main'
    db = Database()
    file_list = []
    # Write temp file to disk
    with upload_temp() as temp_dir:
        for upload in uploads:
            file_path = os.path.join(temp_dir, upload.filename)
            with open(file_path, 'w') as tmp_file:
                tmp_file.write(upload.file.read())
            # Zip Files
            if request.forms.get('unzip'):
                zip_pass = request.forms.get('zip_pass')
                try:
                    with ZipFile(file_path) as zf:
                        zf.extractall(temp_dir, pwd=zip_pass)
                    for root, dirs, files in os.walk(temp_dir, topdown=False):
                        for name in files:
                            if not name == upload.filename:
                                file_list.append(os.path.join(root, name))
                except Exception as e:
                    return template('error.tpl',
                                    error="Error with zipfile - {0}".format(e))
            # Non zip files
            else:
                file_list.append(file_path)

        # Add each file
        for new_file in file_list:
            print new_file
            obj = File(new_file)
            new_path = store_sample(obj)
            success = True
            if new_path:
                # Add file to the database.
                success = db.add(obj=obj, tags=tags)
                if not success:
                    return template(
                        'error.tpl',
                        error="Unable to Store The File: {0}".format(
                            upload.filename))
    redirect("/project/{0}".format(project))
Exemplo n.º 20
0
    def run(self):
        # TODO: this function needs to be refactored.

        super(Strings, self).run()
        if self.args is None:
            return

        arg_all = self.args.all
        arg_hosts = self.args.hosts
        arg_scan = self.args.scan

        regexp = '[\x20\x30-\x39\x41-\x5a\x61-\x7a\-\.:]{4,}'

        if arg_scan:
            db = Database()
            samples = db.find(key='all')

            rows = []
            for sample in samples:
                sample_path = get_sample_path(sample.sha256)

                strings = re.findall(regexp, File(sample_path).data)
                results = self.extract_hosts(strings)

                if results:
                    self.log('info', sample.name)

                    for result in results:
                        self.log('item', result)
        else:
            if not __sessions__.is_set():
                self.log('error', "No open session")
                return

            if os.path.exists(__sessions__.current.file.path):
                strings = re.findall(regexp, __sessions__.current.file.data)

            if arg_all:
                for entry in strings:
                    self.log('', entry)
            elif arg_hosts:
                results = self.extract_hosts(strings)
                for result in results:
                    self.log('item', result)

        if not arg_all and not arg_hosts and not arg_scan:
            self.log('error', 'At least one of the parameters is required')
            self.usage()
Exemplo n.º 21
0
def add_file(file_path, name=None, url=None, tags=None, parent=None):
    obj = File(file_path, url)
    new_path = store_sample(obj)
    print(new_path)

    if not name:
        name = os.path.basename(file_path)

    # success = True
    if new_path:
        # Add file to the database.
        db = Database()
        db.add(obj=obj, name=name, tags=tags, url=url, parent_sha=parent)
        # AutoRun Modules
        if cfg.autorun.enabled:
            autorun_module(obj.sha256)
            # Close the open session to keep the session table clean
            __sessions__.close()
        return obj.sha256

    else:
        # ToDo Remove the stored file if we cant write to DB
        return
Exemplo n.º 22
0
    def scan(self, file):
        sample = File(file)
        strings = self.get_strings(sample)

        # Sets up rows - modify these
        rows = [['Name', sample.name], ['MD5', sample.md5]]

        # Get exif data
        metadata = []
        timestamp = ""
        with exiftool.ExifTool() as et:
            metadata = et.get_metadata(file)
        if 'EXE:TimeStamp' in metadata:
            rows.append(['TimeStamp', metadata['EXE:TimeStamp'][:10]])
        if 'EXE:CodeSize' in metadata:
            rows.append(['CodeSize', metadata['EXE:CodeSize']])
        header = ['Key', 'Value']

        rows.append(['PDB Path', self.parse_pdb(strings)])
        rows.append(['IPv4s', self.parse_ips(strings)])
        rows.append(['Emails', self.parse_emails(strings)])

        #Find Emails
        if self.args.emails:
            for email in self.parse_emails(strings):
                self.emails.append(email)

        # Search for specfic string
        if self.args.search_string:
            search_result = self.parse_search(strings, self.args.search_string)
            if search_result:
                rows.append(['Search Results: ', search_result])
                self.log('table', dict(header=header, rows=rows))
                print('')
        else:
            self.log('table', dict(header=header, rows=rows))
Exemplo n.º 23
0
    def cmd_store(self, *args):
        def usage():
            print(
                "usage: store [-h] [-d] [-f <path>] [-s <size>] [-y <type>] [-n <name>] [-t]"
            )

        def help():
            usage()
            print("")
            print("Options:")
            print("\t--help (-h)\tShow this help message")
            print("\t--delete (-d)\tDelete the original file")
            print("\t--folder (-f)\tSpecify a folder to import")
            print("\t--file-size (-s)\tSpecify a maximum file size")
            print("\t--file-type (-y)\tSpecify a file type pattern")
            print("\t--file-name (-n)\tSpecify a file name pattern")
            print("\t--tags (-t)\tSpecify a list of comma-separated tags")
            print("")

        try:
            opts, argv = getopt.getopt(args, 'hdf:s:y:n:t:', [
                'help', 'delete', 'folder=', 'file-size=', 'file-type=',
                'file-name=', 'tags='
            ])
        except getopt.GetoptError as e:
            print(e)
            usage()
            return

        arg_delete = False
        arg_folder = False
        arg_file_size = None
        arg_file_type = None
        arg_file_name = None
        arg_tags = None

        for opt, value in opts:
            if opt in ('-h', '--help'):
                help()
                return
            elif opt in ('-d', '--delete'):
                arg_delete = True
            elif opt in ('-f', '--folder'):
                arg_folder = value
            elif opt in ('-s', '--file-size'):
                arg_file_size = value
            elif opt in ('-y', '--file-type'):
                arg_file_type = value
            elif opt in ('-n', '--file-name'):
                arg_file_name = value
            elif opt in ('-t', '--tags'):
                arg_tags = value

        def add_file(obj, tags=None):
            if get_sample_path(obj.sha256):
                print_warning(
                    "Skip, file \"{0}\" appears to be already stored".format(
                        obj.name))
                return False

            # Try to store file object into database.
            status = self.db.add(obj=obj, tags=tags)
            if status:
                # If succeeds, store also in the local repository.
                # If something fails in the database (for example unicode strings)
                # we don't want to have the binary lying in the repository with no
                # associated database record.
                new_path = store_sample(obj)
                print_success("Stored file \"{0}\" to {1}".format(
                    obj.name, new_path))
            else:
                return False

            # Delete the file if requested to do so.
            if arg_delete:
                try:
                    os.unlink(obj.path)
                except Exception as e:
                    print_warning("Failed deleting file: {0}".format(e))

            return True

        # If the user specified the --folder flag, we walk recursively and try
        # to add all contained files to the local repository.
        # This is note going to open a new session.
        # TODO: perhaps disable or make recursion optional?
        if arg_folder:
            # Check if the specified folder is valid.
            if os.path.isdir(arg_folder):
                # Walk through the folder and subfolders.
                for dir_name, dir_names, file_names in os.walk(arg_folder):
                    # Add each collected file.
                    for file_name in file_names:
                        file_path = os.path.join(dir_name, file_name)

                        if not os.path.exists(file_path):
                            continue
                        # Check if file is not zero.
                        if not os.path.getsize(file_path) > 0:
                            continue

                        # Check if the file name matches the provided pattern.
                        if arg_file_name:
                            if not fnmatch.fnmatch(file_name, arg_file_name):
                                #print_warning("Skip, file \"{0}\" doesn't match the file name pattern".format(file_path))
                                continue

                        # Check if the file type matches the provided pattern.
                        if arg_file_type:
                            if arg_file_type not in File(file_path).type:
                                #print_warning("Skip, file \"{0}\" doesn't match the file type".format(file_path))
                                continue

                        # Check if file exceeds maximum size limit.
                        if arg_file_size:
                            # Obtain file size.
                            if os.path.getsize(file_path) > arg_file_size:
                                print_warning(
                                    "Skip, file \"{0}\" is too big".format(
                                        file_path))
                                continue

                        file_obj = File(file_path)

                        # Add file.
                        add_file(file_obj, arg_tags)
            else:
                print_error(
                    "You specified an invalid folder: {0}".format(arg_folder))
        # Otherwise we try to store the currently opened file, if there is any.
        else:
            if __sessions__.is_set():
                if __sessions__.current.file.size == 0:
                    print_warning(
                        "Skip, file \"{0}\" appears to be empty".format(
                            __sessions__.current.file.name))
                    return False

                # Add file.
                if add_file(__sessions__.current.file, arg_tags):
                    # Open session to the new file.
                    self.cmd_open(*[__sessions__.current.file.sha256])
            else:
                print_error("No session opened")
Exemplo n.º 24
0
    def copy(self,
             id,
             src_project,
             dst_project,
             copy_analysis=True,
             copy_notes=True,
             copy_tags=True,
             copy_children=True,
             _parent_sha256=None):  # noqa
        session = self.Session()

        # make sure to open source project
        __project__.open(src_project)

        # get malware from DB
        malware = session.query(Malware). \
            options(subqueryload(Malware.analysis)). \
            options(subqueryload(Malware.note)). \
            options(subqueryload(Malware.parent)). \
            options(subqueryload(Malware.tag)). \
            get(id)

        # get path and load file from disk
        malware_path = get_sample_path(malware.sha256)
        sample = File(malware_path)
        sample.name = malware.name

        log.debug("Copying ID: {} ({}): from {} to {}".format(
            malware.id, malware.name, src_project, dst_project))
        # switch to destination project, add to DB and store on disk
        __project__.open(dst_project)
        dst_db = Database()
        dst_db.add(sample)
        store_sample(sample)
        print_success("Copied: {} ({})".format(malware.sha256, malware.name))

        if copy_analysis:
            log.debug("copy analysis..")
            for analysis in malware.analysis:
                dst_db.add_analysis(malware.sha256,
                                    cmd_line=analysis.cmd_line,
                                    results=analysis.results)

        if copy_notes:
            log.debug("copy notes..")
            for note in malware.note:
                dst_db.add_note(malware.sha256,
                                title=note.title,
                                body=note.body)

        if copy_tags:
            log.debug("copy tags..")
            dst_db.add_tags(malware.sha256, [x.tag for x in malware.tag])

        if copy_children:
            children = session.query(Malware).filter(
                Malware.parent_id == malware.id).all()
            if not children:
                pass
            else:
                _parent_sha256 = malware.sha256  # set current recursion item as parent
                for child in children:
                    self.copy(child.id,
                              src_project=src_project,
                              dst_project=dst_project,
                              copy_analysis=copy_analysis,
                              copy_notes=copy_notes,
                              copy_tags=copy_tags,
                              copy_children=copy_children,
                              _parent_sha256=_parent_sha256)
                    # restore parent-child relationships
                    log.debug("add parent {} to child {}".format(
                        _parent_sha256, child.sha256))
                    if _parent_sha256:
                        dst_db.add_parent(child.sha256, _parent_sha256)

        # switch back to source project
        __project__.open(src_project)

        # store tuple of ID (in source project) and sha256 of copied samples
        self.copied_id_sha256.append((malware.id, malware.sha256))

        return True
Exemplo n.º 25
0
    def copy(self, id, src_project, dst_project,
             copy_analysis=True, copy_notes=True, copy_tags=True, copy_children=True, _parent_sha256=None):  # noqa
        session = self.Session()

        # make sure to open source project
        __project__.open(src_project)

        # get malware from DB
        malware = session.query(Malware). \
            options(subqueryload(Malware.analysis)). \
            options(subqueryload(Malware.note)). \
            options(subqueryload(Malware.parent)). \
            options(subqueryload(Malware.tag)). \
            get(id)

        # get path and load file from disk
        malware_path = get_sample_path(malware.sha256)
        sample = File(malware_path)
        sample.name = malware.name

        log.debug("Copying ID: {} ({}): from {} to {}".format(malware.id, malware.name, src_project, dst_project))
        # switch to destination project, add to DB and store on disk
        __project__.open(dst_project)
        dst_db = Database()
        dst_db.add(sample)
        store_sample(sample)
        print_success("Copied: {} ({})".format(malware.sha256, malware.name))

        if copy_analysis:
            log.debug("copy analysis..")
            for analysis in malware.analysis:
                dst_db.add_analysis(malware.sha256, cmd_line=analysis.cmd_line, results=analysis.results)

        if copy_notes:
            log.debug("copy notes..")
            for note in malware.note:
                dst_db.add_note(malware.sha256, title=note.title, body=note.body)

        if copy_tags:
            log.debug("copy tags..")
            dst_db.add_tags(malware.sha256, [x.tag for x in malware.tag])

        if copy_children:
            children = session.query(Malware).filter(Malware.parent_id == malware.id).all()
            if not children:
                pass
            else:
                _parent_sha256 = malware.sha256  # set current recursion item as parent
                for child in children:
                    self.copy(child.id,
                              src_project=src_project, dst_project=dst_project,
                              copy_analysis=copy_analysis, copy_notes=copy_notes, copy_tags=copy_tags,
                              copy_children=copy_children, _parent_sha256=_parent_sha256)
                    # restore parent-child relationships
                    log.debug("add parent {} to child {}".format(_parent_sha256, child.sha256))
                    if _parent_sha256:
                        dst_db.add_parent(child.sha256, _parent_sha256)

        # switch back to source project
        __project__.open(src_project)

        # store tuple of ID (in source project) and sha256 of copied samples
        self.copied_id_sha256.append((malware.id, malware.sha256))

        return True
Exemplo n.º 26
0
    def cmd_store(self, *args):
        parser = argparse.ArgumentParser(prog="store", description="Store the opened file to the local repository")
        parser.add_argument('-d', '--delete', action="store_true", help="Delete the original file")
        parser.add_argument('-f', '--folder', type=str, nargs='+', help="Specify a folder to import")
        parser.add_argument('-s', '--file-size', type=int, help="Specify a maximum file size")
        parser.add_argument('-y', '--file-type', type=str, help="Specify a file type pattern")
        parser.add_argument('-n', '--file-name', type=str, help="Specify a file name pattern")
        parser.add_argument('-t', '--tags', type=str, nargs='+', help="Specify a list of comma-separated tags")

        try:
            args = parser.parse_args(args)
        except:
            return

        if args.folder is not None:
            # Allows to have spaces in the path.
            args.folder = " ".join(args.folder)

        if args.tags is not None:
            # Remove the spaces in the list of tags
            args.tags = "".join(args.tags)

        def add_file(obj, tags=None):
            if get_sample_path(obj.sha256):
                self.log('warning', "Skip, file \"{0}\" appears to be already stored".format(obj.name))
                return False

            # Try to store file object into database.
            status = self.db.add(obj=obj, tags=tags)
            if status:
                # If succeeds, store also in the local repository.
                # If something fails in the database (for example unicode strings)
                # we don't want to have the binary lying in the repository with no
                # associated database record.
                new_path = store_sample(obj)
                self.log("success", "Stored file \"{0}\" to {1}".format(obj.name, new_path))
            else:
                return False

            # Delete the file if requested to do so.
            if args.delete:
                try:
                    os.unlink(obj.path)
                except Exception as e:
                    self.log('warning', "Failed deleting file: {0}".format(e))

            return True

        # If the user specified the --folder flag, we walk recursively and try
        # to add all contained files to the local repository.
        # This is note going to open a new session.
        # TODO: perhaps disable or make recursion optional?
        if args.folder is not None:
            # Check if the specified folder is valid.
            if os.path.isdir(args.folder):
                # Walk through the folder and subfolders.
                for dir_name, dir_names, file_names in os.walk(args.folder):
                    # Add each collected file.
                    for file_name in file_names:
                        file_path = os.path.join(dir_name, file_name)

                        if not os.path.exists(file_path):
                            continue
                        # Check if file is not zero.
                        if not os.path.getsize(file_path) > 0:
                            continue

                        # Check if the file name matches the provided pattern.
                        if args.file_name:
                            if not fnmatch.fnmatch(file_name, args.file_name):
                                # self.log('warning', "Skip, file \"{0}\" doesn't match the file name pattern".format(file_path))
                                continue

                        # Check if the file type matches the provided pattern.
                        if args.file_type:
                            if args.file_type not in File(file_path).type:
                                # self.log('warning', "Skip, file \"{0}\" doesn't match the file type".format(file_path))
                                continue

                        # Check if file exceeds maximum size limit.
                        if args.file_size:
                            # Obtain file size.
                            if os.path.getsize(file_path) > args.file_size:
                                self.log('warning', "Skip, file \"{0}\" is too big".format(file_path))
                                continue

                        file_obj = File(file_path)

                        # Add file.
                        add_file(file_obj, args.tags)
            else:
                self.log('error', "You specified an invalid folder: {0}".format(args.folder))
        # Otherwise we try to store the currently opened file, if there is any.
        else:
            if __sessions__.is_set():
                if __sessions__.current.file.size == 0:
                    self.log('warning', "Skip, file \"{0}\" appears to be empty".format(__sessions__.current.file.name))
                    return False

                # Add file.
                if add_file(__sessions__.current.file, args.tags):
                    # Open session to the new file.
                    self.cmd_open(*[__sessions__.current.file.sha256])
            else:
                self.log('error', "No session opened")
Exemplo n.º 27
0
    def run(self):
        super(Similarity, self).run()

        if self.args is None:
            return
        elif self.args.imports and self.args.threshold == 0.75:  #todo: find a better way to check if thresholds haven't been set
            self.log(
                'warning',
                'Adjusting default threashold to 0.97 to scale for imports')
            threshold = 0.97
        elif self.args.threshold:
            threshold = self.args.threshold
            self.log('info',
                     'Setting Jaccard index threshold to ' + str(threshold))

        # Get all samples from viper database
        db = Database()
        samples = db.find(key='all')
        malware_features = dict()

        # Neo4j Setup
        ## Get Url from Config
        neo4j_url = cfg.similarity.url
        ## Get Username from Config
        neo4j_user = cfg.similarity.user
        ## Get Password from Config
        neo4j_pwd = cfg.similarity.pwd
        ## Connect to neo4j data and define a graph
        graph = Graph(neo4j_url, user=neo4j_user, password=neo4j_pwd)
        try:
            graph.delete_all()
        except:
            self.log(
                "Error",
                "Issue deleting graph. Are the credentials correct in the config file?"
            )
            return

        sample_nodes = []

        for sample in samples:
            malware_path = get_sample_path(sample.sha256)
            features = []

            timestamp = ""
            # Check arguments to determine what should be compared
            if self.args.exif:
                if not self.args.strings and not self.args.imports:  # Can I find a better way to do this?
                    features += self.get_exif(malware_path)
                metadata = []
                with exiftool.ExifTool() as et:
                    metadata = et.get_metadata(malware_path)
                if 'EXE:TimeStamp' in metadata:
                    timestamp = metadata['EXE:TimeStamp'][:10]
            if self.args.strings:
                features += self.get_strings(File(malware_path))
            if self.args.imports:
                imports = self.get_apis(malware_path)
                if imports is not None:
                    features += imports
                else:
                    self.log('warning',
                             'No imports found for {0}...'.format(sample.md5))

            # Adds path debug information to nodes
            pdb_label = ""
            if self.args.pdb:
                pdb = self.parse_pdb(malware_path)
                if pdb is not None:
                    self.log('success', 'Found pdb path {0}'.format(pdb))
                    try:
                        ## Was not sure if you had a special purpose behind parsing the pdb string
                        #project_start = pdb.index('\\Projects')
                        #project_end = pdb.index('\\x64\\')
                        #pdb_label = pdb[int(project_start)+9:int(project_end)]
                        pdb_label = pdb
                    except:
                        self.log('error', 'Unexpected pdb path')

            # Set default comparison
            if (not self.args.strings and not self.args.imports
                    and not self.args.exif):
                features += self.get_strings(File(malware_path))

            if len(features) == 0:
                self.log(
                    'error', 'Extracted {0} features from {1}...'.format(
                        len(features), sample.md5))
                continue

            self.log(
                'success', 'Extracted {0} features from {1}...'.format(
                    len(features), sample.md5))

            malware_features[malware_path] = features

            tx = graph.begin()

            #Create new nodes
            sample_node = Node("SampleNode",
                               name=str(sample.sha256),
                               timestamp=timestamp,
                               pdb=pdb_label)
            labels = [sample.sha256, timestamp]
            sample_node.cast(labels)
            tx.create(sample_node)
            tx.commit()
            sample_nodes.append(sample_node)

        # Determine the jaccard index beteween malware and graph realtionships
        self.log('info', 'Starting graphing process')
        for malware1, malware2 in itertools.combinations(sample_nodes, 2):
            # Compute the jaccard index for the current malware pair
            jaccard_index = self.jaccard(
                malware_features[get_sample_path(malware1["name"])],
                malware_features[get_sample_path(malware2["name"])])
            # If the jaccard distance is above the threshold draw a connection between nodes
            if jaccard_index > threshold:
                if jaccard_index > 0.95:
                    r = Relationship(malware1, "very_high", malware2)
                elif jaccard_index > 0.88:
                    r = Relationship(malware1, "high", malware2)
                elif jaccard_index > 0.83:
                    r = Relationship(malware1, "moderate", malware2)
                elif jaccard_index > 0.78:
                    r = Relationship(malware1, "low", malware2)
                elif jaccard_index > 0.60:
                    r = Relationship(malware1, "very_low", malware2)

                tx = graph.begin()
                tx.create(r)
                tx.commit()

        self.log('success', 'Finished graphing nodes and realtionships')
Exemplo n.º 28
0
def add_file():
    tags = request.forms.get('tag_list')
    uploads = request.files.getlist('file')

    # Set Project
    project = request.forms.get('project')
    if project in project_list():
        __project__.open(project)
    else:
        __project__.open('../')
        project = 'Main'
    db = Database()
    file_list = []
    # Write temp file to disk
    with upload_temp() as temp_dir:
        for upload in uploads:
            file_path = os.path.join(temp_dir, upload.filename)
            with open(file_path, 'w') as tmp_file:
                tmp_file.write(upload.file.read())
            # Zip Files
            if request.forms.get('compression') == 'zip':
                zip_pass = request.forms.get('zip_pass')
                try:
                    with ZipFile(file_path) as zf:
                        zf.extractall(temp_dir, pwd=zip_pass)
                    for root, dirs, files in os.walk(temp_dir, topdown=False):
                        for name in files:
                            if not name == upload.filename:
                                file_list.append(os.path.join(root, name))
                except Exception as e:
                    return template('error.tpl',
                                    error="Error with zipfile - {0}".format(e))
            # GZip Files
            elif request.forms.get('compression') == 'gz':
                try:
                    gzf = GzipFile(file_path, 'rb')
                    decompress = gzf.read()
                    gzf.close()
                    with open(file_path[:-3], "wb") as df:
                        df.write(decompress)
                    file_list.append(file_path[:-3])
                except Exception as e:
                    return template(
                        'error.tpl',
                        error="Error with gzipfile - {0}".format(e))
            # BZip2 Files
            elif request.forms.get('compression') == 'bz2':
                try:
                    bz2f = BZ2File(file_path, 'rb')
                    decompress = bz2f.read()
                    bz2f.close()
                    with open(file_path[:-3], "wb") as df:
                        df.write(decompress)
                    file_list.append(file_path[:-3])
                except Exception as e:
                    return template(
                        'error.tpl',
                        error="Error with bzip2file - {0}".format(e))
            # Tar Files (any, including tar.gz tar.bz2)
            elif request.forms.get('compression') == 'tar':
                try:
                    if not tarfile.is_tarfile(file_path):
                        return template('error.tpl',
                                        error="This is not a tar file")
                    with tarfile.open(file_path, 'r:*') as tarf:
                        tarf.extractall(temp_dir)
                    for root, dirs, files in os.walk(temp_dir, topdown=False):
                        for name in files:
                            if not name == upload.filename:
                                file_list.append(os.path.join(root, name))
                except Exception as e:
                    return template('error.tpl',
                                    error="Error with tarfile - {0}".format(e))
            # Non zip files
            elif request.forms.get('compression') == 'none':
                file_list.append(file_path)

        # Add each file
        for new_file in file_list:
            print new_file
            obj = File(new_file)
            new_path = store_sample(obj)
            success = True
            if new_path:
                # Add file to the database.
                success = db.add(obj=obj, tags=tags)
                if not success:
                    return template(
                        'error.tpl',
                        error="Unable to Store The File: {0}".format(
                            upload.filename))
    redirect("/project/{0}".format(project))
Exemplo n.º 29
0
    def test_add_unicode_py3(self, capsys, filename, name):
        f = File(os.path.join(FIXTURE_DIR, filename))

        instance = Database()
        ret = instance.add(f)
        assert ret is True
Exemplo n.º 30
0
    def _process_uploaded(db,
                          uploaded_file_path,
                          file_name,
                          tag_list=None,
                          note_title=None,
                          note_body=None):
        """_process_uploaded add one uploaded file to database and to storage then remove uploaded file"""

        log.debug("adding: {} as {}".format(uploaded_file_path, file_name))

        malware = File(uploaded_file_path)
        malware.name = file_name

        if get_sample_path(malware.sha256):
            error = {
                "error": {
                    "code":
                    "DuplicateFileHash",
                    "message":
                    "File hash exists already: {} (sha256: {})".format(
                        malware.name, malware.sha256)
                }
            }
            log.error("adding failed: {}".format(error))
            raise ValidationError(
                detail=error
            )  # TODO(frennkie) raise more specific error?! so that we can catch it..?!
        # Try to store file object into database
        if db.add(obj=malware, tags=tag_list):
            # If succeeds, store also in the local repository.
            # If something fails in the database (for example unicode strings)
            # we don't want to have the binary lying in the repository with no
            # associated database record.
            malware_stored_path = store_sample(malware)

            # run autoruns on the stored sample
            if cfg.get('autorun').enabled:
                autorun_module(malware.sha256)

            log.debug("added file \"{0}\" to {1}".format(
                malware.name, malware_stored_path))

            if note_body and note_title:
                db.add_note(malware.sha256, note_title, note_body)
                log.debug("added note: \"{0}\"".format(note_title))

        else:
            error = {
                "error": {
                    "code":
                    "DatabaseAddFailed",
                    "message":
                    "Adding File to Database failed: {} (sha256: {})".format(
                        malware.name, malware.sha256)
                }
            }
            log.error("adding failed: {}".format(error))
            raise ValidationError(detail=error)

        # clean up
        try:
            os.remove(uploaded_file_path)
        except OSError as err:
            log.error("failed to delete temporary file: {}".format(err))

        return malware
Exemplo n.º 31
0
    def run(self, *args):
        try:
            args = self.parser.parse_args(args)
        except SystemExit:
            return

        if args.folder is not None:
            # Allows to have spaces in the path.
            args.folder = " ".join(args.folder)

        if args.tags is not None:
            # Remove the spaces in the list of tags
            args.tags = "".join(args.tags)

        def add_file(obj, tags=None):
            if get_sample_path(obj.sha256):
                self.log(
                    'warning',
                    "Skip, file \"{0}\" appears to be already stored".format(
                        obj.name))
                return False

            if __sessions__.is_attached_misp(quiet=True):
                if tags is not None:
                    tags += ',misp:{}'.format(
                        __sessions__.current.misp_event.event.id)
                else:
                    tags = 'misp:{}'.format(
                        __sessions__.current.misp_event.event.id)

            # Try to store file object into database.
            status = Database().add(obj=obj, tags=tags)
            if status:
                # If succeeds, store also in the local repository.
                # If something fails in the database (for example unicode strings)
                # we don't want to have the binary lying in the repository with no
                # associated database record.
                new_path = store_sample(obj)
                self.log(
                    "success",
                    "Stored file \"{0}\" to {1}".format(obj.name, new_path))

            else:
                return False

            # Delete the file if requested to do so.
            if args.delete:
                try:
                    os.unlink(obj.path)
                except Exception as e:
                    self.log('warning', "Failed deleting file: {0}".format(e))

            return True

        # If the user specified the --folder flag, we walk recursively and try
        # to add all contained files to the local repository.
        # This is note going to open a new session.
        # TODO: perhaps disable or make recursion optional?
        if args.folder is not None:
            # Check if the specified folder is valid.
            if os.path.isdir(args.folder):
                # Walk through the folder and subfolders.
                for dir_name, dir_names, file_names in walk(args.folder):
                    # Add each collected file.
                    for file_name in file_names:
                        file_path = os.path.join(dir_name, file_name)

                        if not os.path.exists(file_path):
                            continue
                        # Check if file is not zero.
                        if not os.path.getsize(file_path) > 0:
                            continue

                        # Check if the file name matches the provided pattern.
                        if args.file_name:
                            if not fnmatch.fnmatch(file_name, args.file_name):
                                # self.log('warning', "Skip, file \"{0}\" doesn't match the file name pattern".format(file_path))
                                continue

                        # Check if the file type matches the provided pattern.
                        if args.file_type:
                            if args.file_type not in File(file_path).type:
                                # self.log('warning', "Skip, file \"{0}\" doesn't match the file type".format(file_path))
                                continue

                        # Check if file exceeds maximum size limit.
                        if args.file_size:
                            # Obtain file size.
                            if os.path.getsize(file_path) > args.file_size:
                                self.log(
                                    'warning',
                                    "Skip, file \"{0}\" is too big".format(
                                        file_path))
                                continue

                        file_obj = File(file_path)

                        # Add file.
                        add_file(file_obj, args.tags)
                        if add_file and __config__.get('autorun').enabled:
                            autorun_module(file_obj.sha256)
                            # Close the open session to keep the session table clean
                            __sessions__.close()

            else:
                self.log(
                    'error',
                    "You specified an invalid folder: {0}".format(args.folder))
        # Otherwise we try to store the currently opened file, if there is any.
        else:
            if __sessions__.is_set():
                if __sessions__.current.file.size == 0:
                    self.log(
                        'warning',
                        "Skip, file \"{0}\" appears to be empty".format(
                            __sessions__.current.file.name))
                    return False

                # Add file.
                if add_file(__sessions__.current.file, args.tags):
                    # TODO: review this. Is there a better way?
                    # Open session to the new file.
                    Open().run(*[__sessions__.current.file.sha256])
                    if __config__.get('autorun').enabled:
                        autorun_module(__sessions__.current.file.sha256)
            else:
                self.log(
                    'error',
                    "No open session. This command expects a file to be open.")