Exemplo n.º 1
0
def packer_mpress(filepath):
    '''
    Mpress packs the file in place and it might fail, so we need to compare the hash before and after.
    '''
    try:
        random_options = ['-q', '-q -m', '-q -r', '-q -s', '-q -m']
        selected_option = random_options[random.randint(
            0, len(random_options))]
        orig_hashes = utils.get_hashes(filepath=filepath)
        orig_hash = orig_hashes.get('md5', '')
        print("\nORIGINAL HASH: {}".format(orig_hash))
        print(orig_hashes)
        proc1 = subprocess.Popen(r"C:\packers\mpress\mpress.exe {} {}".format(
            selected_option, filepath))
        time.sleep(15.0)
        # Clean up
        proc1.kill()
        time.sleep(1.0)
        new_hashes = utils.get_hashes(filepath=filepath)
        new_hash = new_hashes.get('md5', '')
        print("NEW HASH: {}".format(new_hash))
        print(new_hashes)
        # They are both valid MD5 values, but not equal
        if len(orig_hash) == 32 and len(
                new_hash) == 32 and orig_hash != new_hash:
            return filepath
        return ''
    except Exception as e:
        return ''
Exemplo n.º 2
0
def packer_pecompact(filepath):
    '''
    PECompact packs the file in place and it might fail, so we need to compare the hash before and after.
    '''
    try:
        options = ''
        for option in [
                '/StripDebug', '/MultiCompress', '/TruncateLastSection',
                '/StripFixups', '/MergeSections', '/KeepOverlay',
                '/EnforceMemoryProtection', '/CompressResources'
        ]:
            if random.randint(0, 1):
                options = '{} {}'.format(options, option)
        orig_hashes = utils.get_hashes(filepath=filepath)
        orig_hash = orig_hashes.get('md5', '')
        print("\nORIGINAL HASH: {}".format(orig_hash))
        print(orig_hashes)
        proc1 = subprocess.Popen(
            r"C:\packers\PECompact\pec2.exe {} /Quiet {}".format(
                filepath, options))
        time.sleep(15.0)
        # Clean up
        proc1.kill()
        time.sleep(1.0)
        new_hashes = utils.get_hashes(filepath=filepath)
        new_hash = new_hashes.get('md5', '')
        print("NEW HASH: {}".format(new_hash))
        print(new_hashes)
        # They are both valid MD5 values, but not equal
        if len(orig_hash) == 32 and len(
                new_hash) == 32 and orig_hash != new_hash:
            return filepath
        return ''
    except Exception as e:
        return ''
Exemplo n.º 3
0
def packer_mew(filepath):
    '''
    Mew packs the file in place and it might fail, so we need to compare the hash before and after.
    '''
    try:
        # Save current working directory
        cwd = os.getcwd()
        # Change dir to temp
        os.chdir(os.path.dirname(filepath))
        shutil.copyfile("C:\\packers\\MEW\\mew11.exe",
                        os.path.join(os.path.dirname(filepath), "mew11.exe"))

        orig_hashes = utils.get_hashes(filepath=filepath)
        orig_hash = orig_hashes.get('md5', '')
        print("\nORIGINAL HASH: {}".format(orig_hash))
        proc1 = subprocess.Popen(r"mew11.exe {}".format(
            os.path.basename(filepath)))
        time.sleep(15.0)
        proc1.kill()
        time.sleep(1.0)
        new_hashes = utils.get_hashes(filepath=filepath)
        os.chdir(cwd)

        new_hash = new_hashes.get('md5', '')
        print("NEW HASH: {}".format(new_hash))
        # They are both valid MD5 values, but not equal
        if len(orig_hash) == 32 and len(
                new_hash) == 32 and orig_hash != new_hash:
            return filepath
        return ''
    except Exception as e:
        os.chdir(cwd)
        return ''
Exemplo n.º 4
0
def analyze_file(prepared_input):
    combined_results = {}
    filepath = prepared_input['filepath']
    yara_rules = prepared_input.get('yara_rules', None)
    full_details = prepared_input.get('full_details', False)
    temp_path = prepared_input.get('temp_path', './temp')
    label = prepared_input.get('label', 'n/a')

    individual_results = [{'filename': filepath, 'label': label}]

    is_gz = False
    if filepath.endswith('.gz'):
        is_gz = True
        temp_path_this_file = os.path.join(temp_path,
                                           os.path.basename(filepath))
        os.makedirs(temp_path_this_file, exist_ok=True)
        temp_name = os.path.join(
            temp_path_this_file,
            '{}.decompressed'.format(os.path.basename(filepath)))
        utils.decompress_file(filepath, temp_name)
        filepath = temp_name

    # Hashes / Strings
    try:
        individual_results.append(utils.get_strings(filepath=filepath))
        individual_results.append(utils.get_hashes(filepath=filepath))
        individual_results.append(utils.get_type(filepath=filepath))
    except Exception as e:
        individual_results.append({
            'msg_basic_file_analysis':
            'Basic file analysis error: {}'.format(e)
        })

    # EXIF
    try:
        exif_instance = exif.Exif_Engine()
        exif_results = exif_instance.analyze(filepath)
        exif_results_renamed = {
            'exif_{}'.format(key): value
            for key, value in exif_results.items()
        }
        individual_results.append(exif_results_renamed)
    except Exception as e:
        individual_results.append(
            {'msg_exif': 'Exif result error: {}'.format(e)})

    # PE
    try:
        pe_analyzer = pe_analysis.PEFile(filepath)
        pe_analyzer.analyze()
        pe_results = pe_analyzer.summarize_results()
        pe_results_renamed = {
            'pe_{}'.format(key): value
            for key, value in pe_results.items()
        }
        individual_results.append(pe_results_renamed)
    except Exception as e:
        individual_results.append(
            {'msg_pe': 'PE analysis result error: {}'.format(e)})

    # Yara
    try:
        if yara_rules is not None:
            yara_analyzer = yara_engine.Yara_Engine(yara_rules)
            yara_result = yara_analyzer.analyze(filepath)
            yara_result_renamed = {
                'yara_{}'.format(key): value
                for key, value in yara_result.items()
            }
            individual_results.append(yara_result_renamed)
    except Exception as e:
        individual_results.append(
            {'msg_yara': 'Yara result error: {}'.format(e)})

    for individual_result in individual_results:
        combined_results.update(individual_result)

    # Clean up
    if is_gz:
        try:
            shutil.rmtree(temp_path_this_file)
        except Exception as e:
            print("Error removing temp directory: {}".format(e))

    return combined_results
Exemplo n.º 5
0
def process_file(input_file):
    orig_filepath = input_file.get('filepath', 'none')
    output_dir = input_file.get('output_dir', 'none')
    temp_dir = input_file.get('temp_dir', 'none')
    packer = input_file.get('packer', 'none')
    result = input_file
    input_file_temp_dir = 'none'

    try:
        if not os.path.exists(orig_filepath):
            result['status'] = 'Input file did not exist'
            return result

        # Ensure our directories exist
        base_filename = os.path.basename(orig_filepath)
        input_file_temp_dir = os.path.join(temp_dir, base_filename)
        os.makedirs(input_file_temp_dir, exist_ok=True)
        os.makedirs(output_dir, exist_ok=True)
        temp_filepath = os.path.join(input_file_temp_dir, base_filename)

        # We either copy the file or unzip it
        if orig_filepath.endswith('.gz'):
            temp_filepath = temp_filepath[:-3]
            utils.decompress_file(orig_filepath, temp_filepath)
        else:
            shutil.copy(orig_filepath, temp_filepath)

        hashes = utils.get_hashes(filepath=temp_filepath)
        orig_hashes = {
            'orig_{}'.format(key): value
            for key, value in hashes.items()
        }
        result.update(orig_hashes)

        # Call packing routine
        packed_filepath = pack_file(temp_filepath, packer)
        if packed_filepath != '':
            hashes = utils.get_hashes(filepath=packed_filepath)
            packed_hashes = {
                'packed_{}'.format(key): value
                for key, value in hashes.items()
            }
            result.update(packed_hashes)
            new_packed_filename = '{}.gz'.format(hashes.get('sha256', 'error'))
            new_packed_filepath = os.path.join(output_dir, new_packed_filename)
            utils.compress_file(packed_filepath, new_packed_filepath)
        else:
            result['status'] = 'Failed to pack file: {}'.format(temp_filepath)
            shutil.rmtree(input_file_temp_dir,
                          ignore_errors=True,
                          onerror=None)
            return result

        # Destroy temporary space
        shutil.rmtree(input_file_temp_dir, ignore_errors=True, onerror=None)
        result['status'] = 'Success'

        # Some packers are creating .tmp files (e.g. MEW and PECompact)
        # Let's remove them to not fill disk
        try:
            temp_files = [
                str(filename)
                for filename in Path(os.getenv("TEMP")).glob('*.tmp*')
            ]
            now = calendar.timegm(time.gmtime())
            for filename in temp_files:
                filetime = os.path.getctime(filename)
                # delete temp files more than a minute old
                if now - filetime > 60:
                    os.remove(filename)
        except:
            pass

    except Exception as e:
        result['status'] = 'Unrecoverable error: {}'.format(e)
        if input_file_temp_dir != 'none':
            shutil.rmtree(input_file_temp_dir,
                          ignore_errors=True,
                          onerror=None)
        return result

    return result