def set_definition(self, rule_definitions, attributes=[], ip_ranges=[], params={}): """ Update every attribute of the rule by setting the argument values as necessary :param parameterized_input: :param arg_values: :param convert: :return: """ string_definition = rule_definitions[self.filename].string_definition parameters = re.findall(r'(_ARG_([a-zA-Z0-9]+)_)', string_definition) for param in parameters: index = int(param[1]) string_definition = string_definition.replace( param[0], self.args[index]) definition = json.loads(string_definition) # Set special values (IP ranges, AWS account ID, ...) if len(attributes): for condition in definition['conditions']: if type(condition) != list or len(condition) == 1 or type( condition[2]) == list: continue for testcase in testcases: result = testcase['regex'].match(condition[2]) if result and (testcase['name'] == 'ip_ranges_from_file' or testcase['name'] == 'ip_ranges_from_local_file'): filename = result.groups()[0] if filename == ip_ranges_from_args: prefixes = [] for filename in ip_ranges: prefixes += read_ip_ranges(filename, local_file=True, ip_only=True) condition[2] = prefixes else: local_file = True if testcase[ 'name'] == 'ip_ranges_from_local_file' else False condition[2] = read_ip_ranges( filename, local_file=local_file, ip_only=True) break else: condition[2] = testcase['regex'] break if len(attributes) == 0: attributes = [attr for attr in definition] for attr in attributes: if attr in definition: setattr(self, attr, definition[attr]) if hasattr(self, 'path'): self.service = format_service_name(self.path.split('.')[0]) if not hasattr(self, 'key'): setattr(self, 'key', self.filename) setattr(self, 'key', self.key.replace('.json', ''))
def load_json_rule(self, rule_metadata, ip_ranges, aws_account_id): config = None config_file = rule_metadata['filename'] config_args = rule_metadata['args'] if 'args' in rule_metadata else [] # Determine the file path if not os.path.isfile(config_file): # Not a valid relative / absolute path, check locally under findings/ or filters/ if not config_file.startswith( 'findings/') and not config_file.startswith('filters/'): config_file = '%s/%s' % (self.rule_type, config_file) if not os.path.isfile(config_file): config_file = os.path.join(self.rules_data_path, config_file) # Read the file try: #print('Reading %s' % config_file) with open(config_file, 'rt') as f: config = f.read() # Replace arguments for idx, argument in enumerate(config_args): config = config.replace('_ARG_' + str(idx) + '_', str(argument).strip()) config = json.loads(config) config['filename'] = rule_metadata['filename'] if 'args' in rule_metadata: config['args'] = rule_metadata['args'] # Load lists from files for c1 in config['conditions']: if c1 in condition_operators: continue if not type(c1[2]) == list and not type(c1[2]) == dict: values = re_ip_ranges_from_file.match(c1[2]) if values: filename = values.groups()[0] conditions = json.loads(values.groups()[1]) if filename == aws_ip_ranges_filename: c1[2] = read_ip_ranges(aws_ip_ranges_filename, False, conditions, True) elif filename == ip_ranges_from_args: c1[2] = [] for ip_range in ip_ranges: c1[2] = c1[2] + read_ip_ranges( ip_range, True, conditions, True) if c1[2] and aws_account_id: if not type(c1[2]) == list: c1[2] = c1[2].replace('_AWS_ACCOUNT_ID_', aws_account_id) # Set lists list_value = re_list_value.match(str(c1[2])) if list_value: values = [] for v in list_value.groups()[0].split(','): values.append(v.strip()) c1[2] = values except Exception as e: printException(e) printError('Error: failed to read the rule from %s' % config_file) return config
def get_cidr_name(cidr, ip_ranges_files, ip_ranges_name_key): for filename in ip_ranges_files: ip_ranges = read_ip_ranges(filename, local_file = True) for ip_range in ip_ranges: ip_prefix = netaddr.IPNetwork(ip_range['ip_prefix']) cidr = netaddr.IPNetwork(cidr) if cidr in ip_prefix: return ip_range[ip_ranges_name_key].strip() for ip_range in aws_ip_ranges: ip_prefix = netaddr.IPNetwork(ip_range['ip_prefix']) cidr = netaddr.IPNetwork(cidr) if cidr in ip_prefix: return 'Unknown CIDR in %s %s' % (ip_range['service'], ip_range['region']) return 'Unknown CIDR'
def test_awsrecipes_create_ip_ranges(self): successful_aws_recipes_create_ip_ranges_runs = True recipe = os.path.join(self.recipes_dir, 'awsrecipes_create_ip_ranges.py') test_cases = [ # Matching header names, use all data ['--csv-ip-ranges tests/data/ip-ranges-1.csv --force', 'ip-ranges-1a.json'], # Matching header names, use partial data ['--csv-ip-ranges tests/data/ip-ranges-1.csv --force --attributes ip_prefix field_b --skip-first-line', 'ip-ranges-1b.json'], # Matching header names, use partial data with mappings (must skip first line) ['--csv-ip-ranges tests/data/ip-ranges-1.csv --force --attributes ip_prefix field_b --mappings 0 2 --skip-first-line', 'ip-ranges-1c.json'], # Matching header names but swap with mappings (must skip first line) ['--csv-ip-ranges tests/data/ip-ranges-1.csv --force --attributes ip_prefix field_a --mappings 0 2 --skip-first-line', 'ip-ranges-1d.json'], # No headers, use all data ['--csv-ip-ranges tests/data/ip-ranges-2.csv --force --attributes ip_prefix field_a, field_b --mappings 0 1 2', 'ip-ranges-2a.json'], # No headers, use partial data ['--csv-ip-ranges tests/data/ip-ranges-2.csv --force --attributes ip_prefix field_b --mappings 0 2', 'ip-ranges-2b.json'], # Different header names (must skip first line) ['--csv-ip-ranges tests/data/ip-ranges-3.csv --force --attributes ip_prefix new_field_b --mappings 0 2 --skip-first-line', 'ip-ranges-3a.json'], # Different header names with ip_prefix not in first column (must skip first line) ['--csv-ip-ranges tests/data/ip-ranges-4.csv --force --attributes ip_prefix new_field_a new_field_b --mappings 1 0 2 --skip-first-line', 'ip-ranges-4a.json'], ] for test_case in test_cases: args, result_file = test_case cmd = ['python' , recipe] + args.split(' ') process = Popen(cmd, stdout=PIPE) exit_code = process.wait() if exit_code != 0: print('The recipe %s failed to run with arguments %s.' % (recipe, args)) successful_aws_recipes_create_ip_ranges_runs = False continue test_results = read_ip_ranges('ip-ranges-default.json') known_results = read_ip_ranges(os.path.join(self.result_dir, result_file)) if self.cmp(test_results, known_results) != 0: print('Failed when comparing:\n%s\n%s\n' % (test_results, known_results)) successful_aws_recipes_create_ip_ranges_runs = False os.remove('ip-ranges-default.json') assert(successful_aws_recipes_create_ip_ranges_runs)
def get_cidr_name(cidr, ip_ranges_files, ip_ranges_name_key): """ Read display name for CIDRs from ip-ranges files :param cidr: :param ip_ranges_files: :param ip_ranges_name_key: :return: """ for filename in ip_ranges_files: ip_ranges = read_ip_ranges(filename, local_file=True) for ip_range in ip_ranges: ip_prefix = netaddr.IPNetwork(ip_range['ip_prefix']) cidr = netaddr.IPNetwork(cidr) if cidr in ip_prefix: return ip_range[ip_ranges_name_key].strip() for ip_range in aws_ip_ranges: ip_prefix = netaddr.IPNetwork(ip_range['ip_prefix']) cidr = netaddr.IPNetwork(cidr) if cidr in ip_prefix: return 'Unknown CIDR in %s %s' % (ip_range['service'], ip_range['region']) return 'Unknown CIDR'
def set_definition(self, rule_definitions, attributes = [], ip_ranges = [], params = {}): """ Update every attribute of the rule by setting the argument values as necessary :param parameterized_input: :param arg_values: :param convert: :return: """ string_definition = rule_definitions[self.filename].string_definition # Load condition dependencies definition = json.loads(string_definition) definition['conditions'] += self.conditions loaded_conditions = [] for condition in definition['conditions']: if condition[0].startswith('_INCLUDE_('): include = re.findall(r'_INCLUDE_\((.*?)\)', condition[0])[0] #new_conditions = load_data(include, key_name = 'conditions') with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/%s' % include), 'rt') as f: new_conditions = f.read() for (i, value) in enumerate(condition[1]): new_conditions = re.sub(condition[1][i], condition[2][i], new_conditions) new_conditions = json.loads(new_conditions)['conditions'] loaded_conditions.append(new_conditions) else: loaded_conditions.append(condition) definition['conditions'] = loaded_conditions string_definition = json.dumps(definition) # Set parameters parameters = re.findall(r'(_ARG_([a-zA-Z0-9]+)_)', string_definition) for param in parameters: index = int(param[1]) if len(self.args) <= index: string_definition = string_definition.replace(param[0], '') elif type(self.args[index]) == list: value = '[ %s ]' % ', '.join('"%s"' % v for v in self.args[index]) string_definition = string_definition.replace('"%s"' % param[0], value) else: string_definition = string_definition.replace(param[0], self.args[index]) # Strip dots if necessary stripdots = re_strip_dots.findall(string_definition) for value in stripdots: string_definition = string_definition.replace(value[0], value[1].replace('.', '')) definition = json.loads(string_definition) # Set special values (IP ranges, AWS account ID, ...) for condition in definition['conditions']: if type(condition) != list or len(condition) == 1 or type(condition[2]) == list: continue for testcase in testcases: result = testcase['regex'].match(condition[2]) if result and (testcase['name'] == 'ip_ranges_from_file' or testcase['name'] == 'ip_ranges_from_local_file'): filename = result.groups()[0] conditions = result.groups()[1] if len(result.groups()) > 1 else [] # TODO :: handle comma here... if filename == ip_ranges_from_args: prefixes = [] for filename in ip_ranges: prefixes += read_ip_ranges(filename, local_file = True, ip_only = True, conditions = conditions) condition[2] = prefixes break else: local_file = True if testcase['name'] == 'ip_ranges_from_local_file' else False condition[2] = read_ip_ranges(filename, local_file = local_file, ip_only = True, conditions = conditions) break break elif result: condition[2] = params[testcase['name']] break if len(attributes) == 0: attributes = [attr for attr in definition] for attr in attributes: if attr in definition: setattr(self, attr, definition[attr]) if hasattr(self, 'path'): self.service = format_service_name(self.path.split('.')[0]) if not hasattr(self, 'key'): setattr(self, 'key', self.filename) setattr(self, 'key', self.key.replace('.json', '')) if self.key_suffix: setattr(self, 'key', '%s-%s' % (self.key, self.key_suffix))
def set_definition(self, rule_definitions, attributes=[], ip_ranges=[], params={}): """ Update every attribute of the rule by setting the argument values as necessary :param parameterized_input: :param arg_values: :param convert: :return: """ string_definition = rule_definitions[self.filename].string_definition # Load condition dependencies definition = json.loads(string_definition) loaded_conditions = [] for condition in definition['conditions']: if condition[0].startswith('_INCLUDE_('): include = re.findall(r'_INCLUDE_\((.*?)\)', condition[0])[0] #new_conditions = load_data(include, key_name = 'conditions') with open( os.path.join( os.path.dirname(os.path.realpath(__file__)), 'data/%s' % include), 'rt') as f: new_conditions = f.read() for (i, value) in enumerate(condition[1]): new_conditions = re.sub(condition[1][i], condition[2][i], new_conditions) new_conditions = json.loads(new_conditions)['conditions'] loaded_conditions.append(new_conditions) else: loaded_conditions.append(condition) definition['conditions'] = loaded_conditions string_definition = json.dumps(definition) # Set parameters parameters = re.findall(r'(_ARG_([a-zA-Z0-9]+)_)', string_definition) for param in parameters: index = int(param[1]) if type(self.args[index]) == list: value = '[ %s ]' % ', '.join('"%s"' % v for v in self.args[index]) string_definition = string_definition.replace( '"%s"' % param[0], value) else: string_definition = string_definition.replace( param[0], self.args[index]) # Strip dots if necessary stripdots = re_strip_dots.findall(string_definition) for value in stripdots: string_definition = string_definition.replace( value[0], value[1].replace('.', '')) definition = json.loads(string_definition) # Set special values (IP ranges, AWS account ID, ...) for condition in definition['conditions']: if type(condition) != list or len(condition) == 1 or type( condition[2]) == list: continue for testcase in testcases: result = testcase['regex'].match(condition[2]) if result and (testcase['name'] == 'ip_ranges_from_file' or testcase['name'] == 'ip_ranges_from_local_file'): filename = result.groups()[0] conditions = result.groups()[1] if len( result.groups()) > 1 else [] # TODO :: handle comma here... if filename == ip_ranges_from_args: prefixes = [] for filename in ip_ranges: prefixes += read_ip_ranges(filename, local_file=True, ip_only=True, conditions=conditions) condition[2] = prefixes break else: local_file = True if testcase[ 'name'] == 'ip_ranges_from_local_file' else False condition[2] = read_ip_ranges(filename, local_file=local_file, ip_only=True, conditions=conditions) break break elif result: condition[2] = params[testcase['name']] break if len(attributes) == 0: attributes = [attr for attr in definition] for attr in attributes: if attr in definition: setattr(self, attr, definition[attr]) if hasattr(self, 'path'): self.service = format_service_name(self.path.split('.')[0]) if not hasattr(self, 'key'): setattr(self, 'key', self.filename) setattr(self, 'key', self.key.replace('.json', ''))
def main(): # Parse arguments parser = OpinelArgumentParser() parser.add_argument('debug') parser.add_argument('profile') parser.add_argument('force') parser.add_argument('dry-run') parser.add_argument('regions') parser.add_argument('partition-name') parser.parser.add_argument('--interactive', dest='interactive', default=False, action='store_true', help='Interactive prompt to manually enter CIDRs.') parser.parser.add_argument('--csv-ip-ranges', dest='csv_ip_ranges', default=[], nargs='+', help='CSV file(s) containing CIDRs information.') parser.parser.add_argument('--skip-first-line', dest='skip_first_line', default=False, action='store_true', help='Skip first line when parsing CSV file.') parser.parser.add_argument('--attributes', dest='attributes', default=[], nargs='+', help='Name of the attributes to enter for each CIDR.') parser.parser.add_argument('--mappings', dest='mappings', default=[], nargs='+', help='Column number matching attributes when headers differ.') args = parser.parse_args() # Configure the debug level configPrintException(args.debug) # Check version of opinel if not check_requirements(os.path.realpath(__file__)): return 42 # Initialize the list of regions to work with regions = build_region_list('ec2', args.regions, args.partition_name) # For each profile/environment... for profile_name in args.profile: # Interactive mode if args.interactive: # Initalize prefixes attributes = args.attributes filename = 'ip-ranges-%s.json' % profile_name if os.path.isfile(filename): printInfo('Loading existing IP ranges from %s' % filename) prefixes = read_ip_ranges(filename) # Initialize attributes from existing values if attributes == []: for prefix in prefixes: for key in prefix: if key not in attributes: attributes.append(key) else: prefixes = [] # IP prefix does not need to be specified as an attribute attributes = [a for a in attributes if a != 'ip_prefix'] # Prompt for new entries while prompt_4_yes_no('Add a new IP prefix to the ip ranges'): ip_prefix = prompt_4_value('Enter the new IP prefix:') obj = {} for a in attributes: obj[a] = prompt_4_value('Enter the \'%s\' value:' % a) prefixes.append(new_prefix(ip_prefix, obj)) # Support loading from CSV file elif len(args.csv_ip_ranges) > 0: # Initalize prefixes prefixes = [] # Load CSV file contents for filename in args.csv_ip_ranges: with open(filename, 'rt') as f: csv_contents = f.readlines() # Initialize mappings attributes = args.attributes mappings = {} if attributes == []: # Follow structure of first line headers = csv_contents.pop(0).strip().split(',') for index, attribute in enumerate(headers): mappings[attribute] = index elif attributes and args.mappings == []: # Follow structure of first line but only map a subset of fields headers = csv_contents.pop(0).strip().split(',') attributes.append('ip_prefix') for attribute in set(attributes): mappings[attribute] = headers.index(attribute) else: # Indices of columns are provided as an argument for index, attribute in enumerate(attributes): mappings[attribute] = int(args.mappings[index]) if args.skip_first_line: csv_contents.pop(0) # For each line... for line in csv_contents: ip_prefix = {} values = line.strip().split(',') if len(values) < len(mappings): continue for attribute in mappings: ip_prefix[attribute] = values[mappings[attribute]] if 'ip_prefix' in mappings and 'mask' in mappings: ip = ip_prefix.pop('ip_prefix') mask = ip_prefix.pop('mask') ip_prefix['ip_prefix'] = '%s/%s' % (ip, mask.replace('/','')) prefixes.append(ip_prefix) # AWS mode else: # Initialize IP addresses printInfo('Fetching public IP information for the \'%s\' environment...' % profile_name) ip_addresses = {} # Search for AWS credentials credentials = read_creds(profile_name) if not credentials['AccessKeyId']: return 42 # For each region... for region in regions: # Connect to EC2 ec2_client = connect_service('ec2', credentials, region) if not ec2_client: continue # Get public IP addresses associated with EC2 instances printInfo('...in %s: EC2 instances' % region) reservations = handle_truncated_response(ec2_client.describe_instances, {}, ['Reservations']) for reservation in reservations['Reservations']: for i in reservation['Instances']: if 'PublicIpAddress' in i: ip_addresses[i['PublicIpAddress']] = new_ip_info(region, i['InstanceId'], False) get_name(i, ip_addresses[i['PublicIpAddress']], 'InstanceId') if 'NetworkInterfaces' in i: for eni in i['NetworkInterfaces']: if 'Association' in eni: ip_addresses[eni['Association']['PublicIp']] = new_ip_info(region, i['InstanceId'], False) # At that point, we don't know whether it's an EIP or not... get_name(i, ip_addresses[eni['Association']['PublicIp']], 'InstanceId') # Get all EIPs (to handle unassigned cases) printInfo('...in %s: Elastic IP addresses' % region) eips = handle_truncated_response(ec2_client.describe_addresses, {}, ['Addresses']) for eip in eips['Addresses']: instance_id = eip['InstanceId'] if 'InstanceId' in eip else None # EC2-Classic non associated EIPs have an empty string for instance ID (instead of lacking the attribute in VPC) if instance_id == '': instance_id = None ip_addresses[eip['PublicIp']] = new_ip_info(region, instance_id, True) ip_addresses[eip['PublicIp']]['name'] = instance_id # Format prefixes = [] for ip in ip_addresses: prefixes.append(new_prefix(ip, ip_addresses[ip])) # Generate an ip-ranges-<profile>.json file save_ip_ranges(profile_name, prefixes, args.force_write, args.debug)