Пример #1
0
def init(args):
    config = args.config
    print(Back.BLACK + Fore.GREEN + """
starting program loaded for B1 battle droid """ + Back.BLACK + Fore.BLUE + Style.BRIGHT + """
          ||/__'`.
          |//()'-.:
          |-.||
          |o(o)
          |||\\\  .==._
          |||(o)==::'
           `|T  ""
            ()
            |\\
            ||\\
            ()()
            ||//
            |//
           .'=`=.
    """)

    # parse config
    attack_range_config = Path(config)
    if attack_range_config.is_file():
        print(Back.BLACK + Fore.GREEN + "attack_range is using config at path " + Style.BRIGHT + "{0}".format(
            attack_range_config))
        configpath = str(attack_range_config)
    else:
        print("ERROR: attack_range failed to find a config file")
        sys.exit(1)

    # Parse config
    parser = CustomConfigParser()
    config = parser.load_conf(configpath)

    log = logger.setup_logging(config['log_path'], config['log_level'])
    log.info("INIT - attack_range v" + str(VERSION))

    if config['cloud_provider'] == 'azure':
        os.environ["AZURE_SUBSCRIPTION_ID"] = config['azure_subscription_id']

    if config['attack_range_password'] == 'Pl3ase-k1Ll-me:p':
        log.error('ERROR: please change attack_range_password in attack_range.conf')
        sys.exit(1)

    if config['cloud_provider'] == 'azure' and config['zeek_sensor'] == '1':
        log.error('ERROR: zeek sensor only available for aws in the moment. Plase change zeek_sensor to 0 and try again.')
        sys.exit(1)

    if config['cloud_provider'] == 'aws' and config['windows_client'] == '1':
        log.error('ERROR: windows client is only support for Azure.')
        sys.exit(1)

    return TerraformController(config, log), config, log
Пример #2
0
def init(args):
    config = args.config
    print("""
starting program loaded for B1 battle droid

          .-~~~-.
  .- ~ ~-(       )_ _
 /                     ~ -.
|   Cloud Attack Range     \
 \                         .'
   ~- . _____________ . -~
          ||/__'`.
          |//()'-.:
          |-.||
          |o(o)
          |||\\\  .==._    
          |||(o)==::'
           `|T  ""
            ()
            |\\
            ||\\
            ()()
            ||//
            |//
           .'=`=.
    """)

    # parse config
    attack_range_config = Path(config)

    if attack_range_config.is_file():
        print("attack_range is using config at path {0}".format(
            attack_range_config))
        configpath = str(attack_range_config)
    else:
        print("ERROR: attack_range failed to find a config file")
        sys.exit(1)

    # Parse config
    parser = CustomConfigParser()
    config = parser.load_conf(configpath)

    log = logger.setup_logging(config['log_path'], config['log_level'])
    log.info("INIT - attack_range v" + str(VERSION))

    # if ARG_VERSION:
    #     log.info("version: {0}".format(VERSION))
    #     sys.exit(0)

    return TerraformController(config, log), config, log
Пример #3
0
    attack_range_config = Path(config)
    if attack_range_config.is_file():
        print("attack_range is using config at path {0}".format(
            attack_range_config))
        configpath = str(attack_range_config)
    else:
        print(
            "ERROR: attack_range failed to find a config file at {0} or {1}..exiting"
            .format(attack_range_config))
        sys.exit(1)

    # Parse config
    parser = CustomConfigParser()
    config = parser.load_conf(configpath)

    log = logger.setup_logging(config['log_path'], config['log_level'])
    log.info("INIT - attack_range v" + str(VERSION))

    if ARG_VERSION:
        log.info("version: {0}".format(VERSION))
        sys.exit(0)

    if action == 'simulate' and not target:
        log.error('ERROR: Specify target for attack simulation')
        sys.exit(1)

    if action == 'test' and not test_file:
        log.error('ERROR: Specify test file --test_file to execute.')
        sys.exit(1)

    if action == "" and not list_machines:
Пример #4
0
def main():
    """ main function """
    parser = argparse.ArgumentParser(description='this is to get IP address for lynis audit only')
    parser.add_argument('-env', '--environment', required=True, help='The cloud on which the test-suite is to be run',
                        choices=['aws', 'gcp', 'azure'])
    parser.add_argument('-aip', '--audit_ip', required=False, help='The IP for which lynis Audit needs to be done .... by default tries root/Administrator if username not provided')
    parser.add_argument('-u', '--user_name', required=False, help='The username of the user to be logged in,for a specific user')
    parser.add_argument('-pem', '--pem_file', required=False, help='The pem file to access to AWS instance')
    parser.add_argument('-p', '--password', required=False, action='store_true', dest='password', help='hidden password prompt')
    parser.add_argument('-pId', '--project_id', help='Project ID for which GCP Audit needs to be run. Can be retrivied using `gcloud projects list`')
    parser.add_argument('-az_u', '--azure_user', required=False, help='username of azure account, optionally used if you want to run the azure audit with no user interaction.')
    parser.add_argument('-az_p', '--azure_pass', required=False, help='username of azure password, optionally used if you want to run the azure audit with no user interaction.')
    parser.add_argument('-o', '--output', required=False, default="cs-audit.log", help='writes a log in JSON of an audit, ideal for consumptions into SIEMS like ELK and Splunk. Defaults to cs-audit.log')
    parser.add_argument("-w", "--wipe", required=False, default=False, action='store_true',
                        help="rm -rf reports/ folder before executing an audit")
    parser.add_argument('-n', '--number', required=False, help='Retain number of report to store for a particular environment and user/project.')

    args = parser.parse_args()

    # set up logging
    log = logger.setup_logging(args.output, "INFO")

    log.info("starting cloud security suite v1.0")

    if args.number and args.wipe == True:
        print("Warning you can't use -w or -n flag at same time")
        exit(1)
    elif args.number:
        try:
           int(args.number)
        except Exception as _:
            print("Please provide a number for -n option only. ")
            print("EXITTING!!")
            exit(1)

    if args.password:
        password = getpass()

    
    if args.wipe:
        log.info("wiping reports/ folder before running")
        rm.rm("reports/")
    

    if args.environment == 'gcp':
        from modules import gcpaudit
        if not args.project_id:
            print ("Please pass project ID for the GCP Audit")
            print ("Exiting !!!")
            exit(0)
        else:
            log.info("running gcp audit")
            gcpaudit.gcp_audit(args.project_id)
            log.info("completed gcp audit")

    elif args.environment == 'aws':
        from modules import awsaudit
        from modules import merger
        from modules import localaudit
        if args.audit_ip:
            if not(args.user_name):
                args.user_name = None
            if not(args.pem_file):
                args.pem_file = None
            if not(args.password):
                password = None
            log.info("running aws local audit")
            localaudit.local_audit(args.audit_ip, args.user_name, args.pem_file, password)
            log.info("completed aws local audit")
            exit(0)
        else:
            log.info("running aws audit")
            awsaudit.aws_audit()
            merger.merge()
            log.info("completed aws audit")
        

    elif args.environment == 'azure':
        if args.azure_user and args.azure_pass:
            print("using azure credentials passed via cli")
            subprocess.call(['az', 'login', '-u', args.azure_user, '-p', args.azure_pass])
        else:
            print("azure authentication required")
            subprocess.call(['az', 'login'])
        log.info("running azure audit")
        from modules import azureaudit
        azureaudit.azure_audit()
        log.info("completed azure audit")


    if args.number > 0 and args.wipe == False:
        from modules import retainnumberofreports
        retainnumberofreports.retain_reports(args.environment, int(args.number))
        exit(0)
Пример #5
0
    def __init__(self):
        self.args = parse_args()

        # default path for configs
        self.app_config_dir = os.path.abspath("../config")

        # Update args from config file
        if self.args.config_file:
            abs_config_file = os.path.abspath(
                os.path.dirname(self.args.config_file))
            self.args.__dict__.update(
                AppetiteArgs.load_args(self.args.config_file))
            self.app_config_dir = abs_config_file

        self.app_commands_file = os.path.join(self.app_config_dir,
                                              "commands.conf")

        if self.args.command_conf:
            self.app_commands_file = os.path.join(
                os.path.abspath(os.path.expandvars(self.args.command_conf)))

        AppetiteArgs.args_check(self.args)

        # Set up logging after args are set
        Logger.setup_logging('appetite_%s' % self.args.refname,
                             self.args.refname, True,
                             self.args.disable_logging, self.args.silent,
                             self.args.logging_path)

        # Get path for lock file
        self.scratch_location = os.path.join(
            os.path.abspath(os.path.expandvars(self.args.scratch_dir)),
            self.args.refname)

        self.run_check.set_lockfile(
            os.path.join(self.scratch_location, Helpers.APPETITE_LOCKFILE))

        # Makes sure there is only one instance of this script running
        if self.run_check.lock().is_running:
            Logger.info("Appetite is already processing")
            self.print_track_info(False, Helpers.get_track())
            return

        # Get host classes for filtering
        self.host_classes = self.args.host_classes.split(" ") \
            if isinstance(self.args.host_classes, basestring) \
            else self.args.host_classes

        # Reverse sorting needed for name filtering
        self.host_classes.sort(reverse=True)

        # Get boot ordering
        self.boot_ordering = Helpers.get_enchanced_boot_order(
            self.args.boot_order, self.host_classes)

        self.repo_name = self.args.repo_url.split('/')[-1].split('.')[0]

        self.repo_path = os.path.join(self.scratch_location, self.repo_name)
        self.apps_folder = os.path.join(self.repo_path, self.args.apps_folder)
        self.manifest_path = os.path.join(self.repo_path,
                                          Consts.CONFIG_PATH_NAME,
                                          self.args.apps_manifest)
        self.tmp_folder = os.path.join(self.scratch_location,
                                       self.args.tmp_folder)
        self.meta_folder = os.path.join(self.scratch_location, 'meta')

        self.tars_folder = os.path.join(self.tmp_folder, 'tars')
        self.hosts_folder = os.path.join(self.tmp_folder, 'hosts')
        self.remote_apps_path = os.path.normpath(self.args.app_folder)
        self.base_location, self.base_name = os.path.split(
            self.remote_apps_path)
        self.name_formatting = self.args.name_formatting.strip('"\'')
        self.meta_name = "%s%s" % (Consts.APPS_METADATE_FILENAME,
                                   self.args.refname)
        self.meta_remote_folder = os.path.join(self.remote_apps_path,
                                               Consts.META_DIR)
        self.meta_remote_logs_folder = os.path.join(
            self.meta_remote_folder, Consts.HOST_LOGS_FOLDER_NAME)
        self.meta_remote_file = "%s.json" % os.path.join(
            self.meta_remote_folder, self.meta_name)

        if self.args.clean:
            Helpers.delete_path(self.scratch_location)

        self.repo_manager = RepoManager(self.repo_name, self.args.repo_url,
                                        self.args.repo_branch, "",
                                        self.scratch_location,
                                        self.args.apps_manifest,
                                        self.args.dryrun)

        Logger.debug_on(self.args.debug)

        self.ssh_app_commands = None
        self.deployment_manager = None
        self.template_values = {}
Пример #6
0
    def __init__(self, is_already_running):
        self.args = parse_args()

        # default path for configs
        self.app_config_dir = os.path.abspath("../config")

        # Update args from config file
        if self.args.config_file:
            abs_config_file = os.path.abspath(
                os.path.dirname(self.args.config_file))
            self.args.__dict__.update(
                AppetiteArgs.load_args(self.args.config_file))
            self.app_config_dir = abs_config_file

        self.app_commands_file = os.path.join(self.app_config_dir,
                                              "commands.conf")

        if self.args.command_conf:
            self.app_commands_file = os.path.join(
                os.path.abspath(os.path.expandvars(self.args.command_conf)))

        AppetiteArgs.args_check(self.args)

        # Set up logging after args are set
        Logger.setup_logging('appetite_%s' % self.args.refname,
                             self.args.refname, True,
                             self.args.disable_logging, self.args.silent,
                             self.args.logging_path)

        if is_already_running:
            Logger.info("Appetite is already processing")
            self.print_track_info(False, Helpers.get_track())
            return

        # Get host classes for filtering
        self.host_classes = self.args.host_classes.split(" ") \
            if isinstance(self.args.host_classes, basestring) \
            else self.args.host_classes

        # Reverse sorting needed for name filtering
        self.host_classes.sort(reverse=True)

        # Get boot ordering
        self.boot_ordering = Helpers.get_enchanced_boot_order(
            self.args.boot_order, self.host_classes)

        self.repo_name = self.args.repo_url.split('/')[-1].split('.')[0]
        self.scratch_location = os.path.join(
            os.path.abspath(os.path.expandvars(self.args.scratch_dir)),
            self.args.refname)
        self.repo_path = os.path.join(self.scratch_location, self.repo_name)
        self.apps_folder = os.path.join(self.repo_path, self.args.apps_folder)
        self.manifest_path = os.path.join(self.repo_path,
                                          Consts.CONFIG_PATH_NAME,
                                          self.args.apps_manifest)
        self.tmp_folder = os.path.join(self.scratch_location,
                                       self.args.tmp_folder)
        self.meta_folder = os.path.join(self.scratch_location, 'meta')
        self.tars_folder = os.path.join(self.tmp_folder, 'tars')
        self.hosts_folder = os.path.join(self.tmp_folder, 'hosts')
        self.remote_apps_path = os.path.normpath(self.args.app_folder)
        self.base_location, self.base_name = os.path.split(
            self.remote_apps_path)
        self.name_formatting = self.args.name_formatting.strip('"\'')
        self.meta_name = "%s%s" % (Consts.APPS_METADATE_FILENAME,
                                   self.args.refname)
        self.meta_remote_folder = os.path.join(self.remote_apps_path,
                                               Consts.META_DIR)
        self.meta_remote_logs_folder = os.path.join(
            self.meta_remote_folder, Consts.HOST_LOGS_FOLDER_NAME)
        self.meta_remote_file = "%s.json" % os.path.join(
            self.meta_remote_folder, self.meta_name)

        if self.args.clean:
            Helpers.delete_path(self.scratch_location)

        self.repo_manager = RepoManager(self.repo_name, self.args.repo_url,
                                        self.args.repo_branch, "",
                                        self.scratch_location,
                                        self.args.apps_manifest,
                                        self.args.dryrun)

        Logger.debug_on(self.args.debug)

        if not self.args.tmp_folder:
            Logger.errorout("tmp folder must be defined")

        # Deleting the tmp folder to keep installs clean
        Helpers.delete_path(self.tmp_folder)

        self.template_values = {}

        try:
            if self.args.template_files:
                template_paths = self.args.template_files

                # Incase one long string is entered
                if isinstance(self.args.template_files, basestring):
                    template_paths = self.args.template_files.split(' ')

                self.template_values = Helpers.load_templating(template_paths)
        except Exception as exception:
            Logger.errorout("No templating problem: %s" % exception.message)

        if self.args.template_json:
            try:
                self.template_values.update(
                    json.loads(self.args.template_json.replace('\\"', '"')))
            except Exception as err:
                Logger.errorout("Error parsing --template-json",
                                error=err.message)

        if self.args.template_filtering:
            self.template_values = Helpers.filter_object(
                self.template_values, self.args.template_filtering)

        ConnManager.set_globals(self.args.ssh_user, self.args.ssh_keyfile,
                                self.args.ssh_port, self.args.app_folder,
                                self.args.app_binary, self.args.dryrun)

        self.ssh_app_commands = ConnManager.SshAppCommands(
            self.app_commands_file, self.template_values)

        # Load any files reference to appetite scripts folder before this
        # Working directories change with repo management
        repo_status = self.repo_manager.pull_repo(self.args.clean_repo)

        if not self.args.dryrun and repo_status < 0:
            Logger.errorout('Repo Error, Look at logs for details')

        repo_check_status = self.repo_manager.check_for_update()

        Logger.add_track_info(self.repo_manager.track)

        triggered = repo_check_status['triggered'] or repo_status == 1

        Logger.info('Repo pull',
                    output=repo_check_status['output'],
                    triggered=triggered)

        if not self.args.dryrun and not self.args.skip_repo_sync and not triggered:
            Logger.info('No repo update found', complete=False)
            self.print_track_info(False)
            sys.exit(0)

        self.repo_manager.set_commit_id()

        # Load in deploymentmethods.conf
        self.deployment_manager = DeploymentMethodsManager(
            self.repo_name, "", self.scratch_location)

        # Generate hosts
        if self.args.hosts:
            # Incase one long string is entered
            if len(self.args.hosts) == 1:
                self.args.hosts = self.args.hosts[0].split(' ')

            for host in self.args.hosts:
                split_hostname = host.strip("'\"").split(':')
                clean_hostname = split_hostname[0].split('.')[0].strip("'\"")

                # With user name, the ssh hostname can be defined.  This allows
                # for IP addresses to be defined incase there is no DNS.
                host_data = Helpers.pull_class_from_host(
                    self.name_formatting, clean_hostname, self.host_classes)

                if host_data:
                    # Can use a specified hostname/IP.
                    # Default is the given hostname
                    ssh_host = split_hostname[len(split_hostname) - 1]
                    self.appetite_hosts.add_host(self, clean_hostname,
                                                 host_data, ssh_host)
        else:
            # Create hosts based on classes
            for host_class in self.host_classes:
                self.appetite_hosts.add_host(
                    self,
                    Helpers.build_hostname(
                        self.name_formatting,  # pylint: disable=no-value-for-parameter
                        host_class,
                        1))

        if self.appetite_hosts.is_empty():
            Logger.errorout("No hosts found after filtering")

        if self.args.clean_metas:
            Helpers.delete_path(self.meta_folder)

        # Only update if a manifest file is not found
        self.update_manifests(check_if_exists=True)

        Logger.info("appetite started",
                    use_templating=self.args.templating,
                    firstrun=self.args.firstrun)

        self.populate_apps_to_hosts()

        changes_found = self.create_host_directories_and_tar()

        if changes_found:
            Logger.info("Start host updates")

            self.update_hosts()

            Logger.info("End host updates")

        self.print_track_info(changes_found)
        Logger.info("Appetite complete", complete=True, changes=changes_found)