def get_port(protocol): ''' A function returns the port of a given protocol. Args: protocol (string): Name of the protocol Returns: string: The port value parameter for the protocol Raises: ''' yaml_file = get_protocol_yaml(protocol) if is_a_file(yaml_file): try: with open(yaml_file, 'r') as f: for line in f: if 'PROTOCOL PORT:' in line: return line.split(':')[1].strip() except (IOError, OSError) as e: log_utils.display_message(logging.ERROR, '%s - %s' % (e.filename, e.strerror)) else: return ''
def are_environment_variables_sourced(): ''' To check if a given file name is a file. Args: Returns: boolean: Whether the environment variables are sourced or not. Raises: ''' environmentVariables = [ 'OS_USERNAME', 'OS_PASSWORD', 'OS_TENANT_NAME', 'OS_NO_CACHE', 'OS_AUTH_URL', 'OS_AUTH_STRATEGY', 'OS_REGION_NAME' ] for variable in environmentVariables: try: os.environ[variable] except KeyError as e: log_utils.display_message( logging.ERROR, 'Missing environment variable! You must provide %s. Please source the credentials file.' % variable) return False return True
def read_one_line_file(filename): ''' A function to read all lines in a file to a single line. Args: filename (string): The file name to read Returns: string: The one-liner text which is read from a file Raises: ''' oneline = '' try: with open(filename, 'r') as f: for line in f: oneline = oneline + line.strip() + ' ' oneline = oneline.strip() except (IOError, OSError) as e: log_utils.display_message(logging.ERROR, '%s - %s' % (e.filename, e.strerror)) return oneline
def transfer_files_from_controller_to_vanalyzer(params): ''' A function which transfers PCAP files from Controller node to Analyzer Engine. Args: params (dictionary): The dictionary which has values for parameters Returns: Raises: ''' log_utils.display_message( logging.DEBUG, 'Transferring PCAP files from Controller node to Analyze Engine') md5_list_vAnalyzer = {} logging.debug( 'Transfer files from controller to vAnalyzer parameters: %s' % params) # Logging into device ip = params['vanalyzer_ip'] username = params['vanalyzer_username'] password = params['vanalyzer_password'] port = 22 fromFolder = params['from_folder'] toFolder = params['to_folder'] extension = params['extension'] transport = paramiko.Transport((ip, port)) transport.connect(username=username, password=password) sftp = paramiko.SFTPClient.from_transport(transport) filepath = fromFolder localpath = toFolder for dirpath, dirnames, filenames in os.walk(filepath): remote_path = os.path.join(localpath, dirpath) # make remote directory ... for filename in filenames: if filename.endswith(extension): local_path = os.path.join(dirpath, filename) remote_filepath = os.path.join(localpath, filename) # put file sftp.put(local_path, remote_filepath) sftp.close() transport.close()
def delete_file(filename): ''' A function to delete a file. Args: filename (string): The file name to delete Returns: Raises: ''' log_utils.display_message(logging.INFO, 'Deleting %s File' % filename) if os.path.exists(filename): try: os.remove(filename) log_utils.display_message( logging.INFO, 'File %s Deleted Successfully' % filename) except (IOError, OSError) as e: log_utils.display_message(logging.ERROR, '%s - %s' % (e.filename, e.strerror)) else: log_utils.display_message(logging.ERROR, 'File %s not found!' % filename)
def compare(params): ''' A function to import captured pcap files into database. Args: params (dictionary): vAnalyzer configurations Returns: Raises: ''' log_utils.display_message(logging.INFO, 'vAnalyzer Compare') # Get instance name from parameters dictionary instance_name = params['vAnalyzer']['instance_name'] # Create a new vOpenstack instance openstack = vOpenstack() # Check if there are any running processes if sys_utils.is_a_file(params['Environment']['pid_file']): log_utils.display_message(logging.ERROR, 'Please stop tracing process and try again') sys_utils.die() # Check if instance or stack with given name exists instanceId = openstack.get_instance_id(instance_name) stackId = openstack.get_stack_id(instance_name) if instanceId != '' and stackId != '': # Get vAnalyzer Instance IP Address public_net_name = openstack.get_network_name(params['vAnalyzer']['private_net_id']) vanalyzer_ip = openstack.get_instance_ip(instance_name, public_net_name) parameters = dict() parameters.update({'vanalyzer_ip': vanalyzer_ip}) parameters.update({'vanalyzer_username': params['vAnalyzer']['username']}) parameters.update({'vanalyzer_password': params['vAnalyzer']['password']}) # Import PCAPs to Elasticsearch DB log_utils.display_message(logging.DEBUG, 'Importing PCAP files from /home/ubuntu/vanalyzer-inbox folder into Elasticsearch database.') network_utils.import_pcaps_to_db_in_vanalyzer(parameters) # Move PCAPs from inbox folder to archive folder log_utils.display_message(logging.DEBUG, 'Moving PCAP files from /home/ubuntu/vanalyzer-inbox folder to /home/ubuntu/vanalyzer-archive folder.') network_utils.move_pcaps_to_archive_folder(parameters) # Instance or stack does not exist else: log_utils.display_message(logging.ERROR, '%s Instance or Stack Does Not Exist!' % instance_name) sys_utils.die()
def trace_start(params): ''' A function which starts tracing on Compute node. Args: params (dictionary): The dictionary which has values for parameters Returns: Raises: ''' # Assign parameters ip = params['ip'] username = params['username'] password = params['password'] traceList = params['traceList'] protocol = params['protocol'] pid_file = params['pid_file'] computeNode = params['computeNode'] # Open an SSH Connection to Fuel Node ssh_conn = SshConnection(ip, username, password) # Establish an SSH Connection to the compute node ssh_conn.open_ssh_connection(computeNode, username, password) log_utils.display_message( logging.INFO, 'Running tcpdump With Tap Device ID List %s For %s Protocol.' % (traceList, protocol)) command = build_trace_command(protocol, traceList) # Using nohup to keep tcpdump process running upon ssh connection close output = ssh_conn.send(command) logging.debug('Output From Compute: %s' % output) pid = get_pid_from_output(output) log_utils.display_message( logging.DEBUG, 'tcpdump Process ID List For %s Protocol: %s' % (protocol, pid)) # Record pid value in a file for terminate operation sys_utils.append_file(pid_file, pid)
def vanalyzer_terminate(params): ''' A function to terminate the vAnalyzer instance. Args: params (dictionary): vAnalyzer configurations Returns: Raises: ''' log_utils.display_message(logging.INFO, 'vAnalyzer Terminate') # Get instance name from parameters dictionary instance_name = params['vAnalyzer']['instance_name'] # Check if there are any running processes if sys_utils.is_a_file(params['Environment']['pid_file']): log_utils.display_message(logging.ERROR, 'Please stop tracing process and try again') sys_utils.die() # Ask for approval of deletion if log_utils.response_to_question('Do you really want to delete the %s stack? ' % instance_name, default='yes'): # Create a new vOpenstack instance openstack = vOpenstack() # Check if instance or stack with given name exists instanceId = openstack.get_instance_id(instance_name) stackId = openstack.get_stack_id(instance_name) if instanceId != '' and stackId != '': # Delete stack openstack.delete_stack(instance_name, stackId) log_utils.display_message(logging.INFO, 'Heat Stack Deletion In Progress.') # Delete YAML file sys_utils.delete_file(params['Environment']['yaml_file']) # Instance or stack does not exist else: log_utils.display_message(logging.ERROR, '%s Instance or Stack Does Not Exist!' % instance_name) sys_utils.die() # Deletion aborted else: log_utils.display_message(logging.INFO, 'Exiting vAnalyzer.') sys_utils.die()
def transfer_pcaps_from_compute_to_controller(params): ''' A function which transfers PCAP files from Compute node to Controller node. Args: params (dictionary): The dictionary which has values for parameters Returns: Raises: ''' # Assign parameters ip = params['ip'] username = params['username'] password = params['password'] instance_name = params['instance_name'] computeNode = params['computeNode'] vanalyzer_username = params['vanalyzer_username'] vAnalyzer_ip = params['vanalyzer_ip'] ssh_conn = params['compute_ssh_conn'] controllerNode = params['controllerNode'] log_utils.display_message( logging.DEBUG, 'Transferring PCAP files from Compute node to Fuel Node') command = 'scp trace_*.pcap ' + username + '@' + ip + ':/tmp' ssh_conn.send(command, '\'s password: '******'Transferring PCAP files from Fuel node to Controller Node') ssh_conn.send('hostname') command = 'scp /tmp/trace_*.pcap ' + controllerNode + ':/tmp' ssh_conn.send(command, ']# ') # Exit to Compute Node ssh_conn.close_last_ssh_connection()
def func_dispatcher(args): ''' A function dispatcher that calls functions according to a given CLI command when the main function is called. Args: args (string): System arguments passed when calling main function Returns: Raises: ''' log_utils.display_message(logging.debug, 'Arguments: %s' % args) allowed_commands = { 'boot': vanalyzer_boot, 'terminate': vanalyzer_terminate, 'trace-start': trace_start, 'trace-end': trace_end, 'compare': compare, 'report': report, 'usage': usage } print_usage = allowed_commands['usage'] try: # Get first parameter passed to vAnalyzer tool command = str(args[1]) # Check if parameter is valid if command in allowed_commands: log_utils.display_message(logging.DEBUG, 'vAnalyzer started with parameters: %s' % command) # Get configuration values vanalyzer_params = config_utils.read_configuration() logging.debug('vAnalyzer configurations: %s' % vanalyzer_params) # Check if environment variables are sourced if sys_utils.are_environment_variables_sourced(): # Get corresponding function for parameter passed and execute it func = allowed_commands[command] func(vanalyzer_params) log_utils.display_message(logging.INFO, 'vAnalyzer %s operation finished successfully.' % command) # Invalid parameter is passed else: print_usage() log_utils.display_message(logging.ERROR, 'Please enter valid arguments!') # No parameter passed except IndexError: print_usage()
def append_file(filename, text_to_append): ''' A function to append to a file. Args: filename (string): The file name to append text_to_append (string): The text to write into file Returns: Raises: ''' try: with open(filename, 'a') as f: f.write('%s\n' % text_to_append) except (IOError, OSError) as e: log_utils.display_message(logging.ERROR, '%s - %s' % (e.filename, e.strerror))
def vanalyzer_boot(params): ''' A function to boot the vAnalyzer instance. Args: params (dictionary): vAnalyzer configurations Returns: Raises: ''' log_utils.display_message(logging.INFO, 'vAnalyzer Boot') # Create a new vOpenstack instance openstack = vOpenstack() # Get instance name and YAML template file from parameters dictionary instance_name = params['vAnalyzer']['instance_name'] yaml_file = params['Environment']['yaml_file'] # Check if an instance or a stack exists with the same name if openstack.get_instance_id(instance_name) == '' and openstack.get_stack_id(instance_name) == '': # Create Heat YAML file from template sys_utils.create_heat_template(params) # Create Stack openstack.create_stack(instance_name, yaml_file) log_utils.display_message(logging.INFO, 'Heat Stack Create In Progress') # Instance or stack already exists else: log_utils.display_message(logging.ERROR, '%s Instance or Stack Already Exists!' % instance_name) sys_utils.die()
def check_pcap_file_integrity(params): ''' A function which checks transferred PCAP files integrity. Args: params (dictionary): The dictionary which has values for parameters Returns: boolean: Returns True if PCAP file integrities match or False if they don't. Raises: ''' # Get SSH Connections compute_ssh_conn = params['compute_ssh_conn'] vanalyzer_ssh_conn = params['vanalyzer_ssh_conn'] # For analyze engine instance, go to vanalyzer-inbox folder vanalyzer_ssh_conn.send('cd vanalyzer-inbox') result = True md5_list_compute = {} md5_list_vAnalyzer = {} # Check MD5 sum of each of pcap file in vAnalyzer node and store it in dictionary md5_list_vAnalyzer={} md5_list_vAnalyzer = get_integrity(vanalyzer_ssh_conn) logging.debug('md5 sums of files in vAnalyzer node: %s' % md5_list_vAnalyzer) # Check MD5 sum of each of pcap file in compute node and store it in dictionary md5_list_compute={} md5_list_compute = get_integrity(compute_ssh_conn) logging.debug('md5 sums of files in compute node: %s' % md5_list_compute) log_utils.display_message( logging.DEBUG, 'Checking md5 sums of files in Compute node and vAnalyzer node') # Compare md5sums for key, value in md5_list_compute.iteritems(): if key not in list(md5_list_vAnalyzer.keys()): log_utils.display_message( logging.ERROR, 'There is an error when transferring file: %s , does not exist ' % key) result = False break else: if value != md5_list_vAnalyzer[key]: log_utils.display_message( logging.ERROR, 'There is an error when transferring file: %s , corrupted .' % key) result = False break return result
def read_file(filename): ''' A function to read all lines in a file to a string . Args: filename (string): The file name to read Returns: string: The variable which holds text from a file Raises: ''' file_string = '' try: f = open(filename, 'r') file_string = f.read() except (IOError, OSError) as e: log_utils.display_message(logging.ERROR, '%s - %s' % (e.filename, e.strerror)) return file_string
def get_pid_from_output(output): ''' A function which returns PID from SSH connection command output. Args: output (string): The output which returned from a command execution Returns: string: The PID to be returned Raises: ''' # PIDs are printed as ***PID***= in ssh output pid_pattern = '...PID...=' # Search for the pattern in ssh output regex = re.compile('(?<=' + pid_pattern + ').+') pid_value = regex.findall(output) log_utils.display_message(logging.DEBUG, 'PID Value: %s' % pid_value) # Returns every 2nd element of the list pid = pid_value[1].strip() return pid
def create_heat_template(params): ''' A function to create Heat YAML file from a template file. Args: params (dictionary): Dictionary object which holds parameter values for Heat YAML file. Returns: Raises: ''' # Assign Parameters name = params['vAnalyzer']['instance_name'] image = params['vAnalyzer']['image_name'] password = params['vAnalyzer']['password'] flavor = params['vAnalyzer']['flavor_name'] publicNetId = params['vAnalyzer']['public_net_id'] privateNetId = params['vAnalyzer']['private_net_id'] privateSubnetId = params['vAnalyzer']['private_subnet_id'] volume = params['vAnalyzer']['volume_size'] logging.debug( 'vAnalyzer Instance Parameters: %s' % [name, image, password, flavor, publicNetId, privateSubnetId, volume]) yaml_file = params['Environment']['yaml_file'] temp_file = params['Environment']['temp_file'] log_utils.display_message(logging.INFO, 'Creating %s File' % temp_file) try: with open(yaml_file, 'wt') as fout: with open(temp_file, 'rt') as fin: for line in fin: new_line = line new_line = new_line.replace('$$$instance_name$$$', name) new_line = new_line.replace('$$$image$$$', image) new_line = new_line.replace('$$$os_password$$$', password) new_line = new_line.replace('$$$flavor$$$', flavor) new_line = new_line.replace('$$$public_net_id$$$', publicNetId) new_line = new_line.replace('$$$private_net_id$$$', privateNetId) new_line = new_line.replace('$$$private_subnet_id$$$', privateSubnetId) new_line = new_line.replace('$$$volume_size$$$', volume) fout.write(new_line) log_utils.display_message(logging.INFO, 'File %s Created Successfully' % yaml_file) except (IOError, OSError) as e: log_utils.display_message(logging.ERROR, '%s - %s' % (e.filename, e.strerror))
def trace_end(params): ''' A function to end network package trace. Args: params (dictionary): vAnalyzer configurations Returns: Raises: ''' log_utils.display_message(logging.INFO, 'vAnalyzer Trace End') # Get instance name from parameters dictionary instance_name = params['vAnalyzer']['instance_name'] # Check if there is existing traces pid_file = params['Environment']['pid_file'] if sys_utils.is_a_file(pid_file): log_utils.display_message(logging.DEBUG, 'Ending tcpdump process on Compute node') # Create a new vOpenstack instance openstack = vOpenstack() # Check if instance or stack with given name exists instanceId = openstack.get_instance_id(instance_name) stackId = openstack.get_stack_id(instance_name) if instanceId != '' and stackId != '': # Check Compute Hostname computeNode = openstack.get_compute_hostname(instance_name) # Get vAnalyzer Instance IP Address public_net_name = openstack.get_network_name(params['vAnalyzer']['private_net_id']) vanalyzer_ip = openstack.get_instance_ip(instance_name, public_net_name) # Ask for confirmation on trace end operation if log_utils.response_to_question('Do you want to end the trace(s) on %s ? ' % computeNode, default='yes'): parameters = dict() parameters.update({'ip': params['Fuel']['fuel_node']}) parameters.update({'username': params['Fuel']['fuel_node_username']}) parameters.update({'password': params['Fuel']['fuel_node_password']}) parameters.update({'pid_file': params['Environment']['pid_file']}) parameters.update({'computeNode': computeNode}) parameters.update({'instance_name': instance_name}) parameters.update({'vanalyzer_ip': vanalyzer_ip}) parameters.update({'vanalyzer_username': params['vAnalyzer']['username']}) parameters.update({'vanalyzer_password': params['vAnalyzer']['password']}) network_utils.trace_end(parameters) # No existing traces else: log_utils.display_message(logging.ERROR, 'There are no running processes. Please start trace first!') sys_utils.die()
def delete_pcap_files(params): ''' A function to delete PCAP files on Controller, Compute and Fuel nodes. Args: params (dictionary): The dictionary which has values for parameters Returns: Raises: ''' username = params['username'] password = params['password'] compute_ssh_conn = params['compute_ssh_conn'] controllerNode = params['controllerNode'] computeNode = params['computeNode'] # On Compute Node logging.debug('Connected to %s server...' % compute_ssh_conn.get_last_ssh_connection()) log_utils.display_message(logging.DEBUG, 'Deleting pcap files from Compute node') compute_ssh_conn.send('find . -type f -iname \*.pcap -delete') compute_ssh_conn.close_last_ssh_connection() # On Fuel Node logging.debug('Connected to %s server...' % compute_ssh_conn.get_last_ssh_connection()) log_utils.display_message(logging.DEBUG, 'Deleting pcap files from Fuel node') compute_ssh_conn.send('cd /tmp') compute_ssh_conn.send('find . -type f -iname \*.pcap -delete') # On Controller Node log_utils.display_message(logging.DEBUG, 'Deleting pcap files from Controller node') compute_ssh_conn.open_ssh_connection(controllerNode, username, password) logging.debug('Connected to %s server...' % compute_ssh_conn.get_last_ssh_connection()) compute_ssh_conn.send('cd /tmp') compute_ssh_conn.send('find . -type f -iname \*.pcap -delete') compute_ssh_conn.close_last_ssh_connection() # On Fuel Node logging.debug('Connected to %s server...' % compute_ssh_conn.get_last_ssh_connection()) compute_ssh_conn.open_ssh_connection(computeNode, username, password) # On Compute Node logging.debug('Connected to %s server...' % compute_ssh_conn.get_last_ssh_connection())
def report(params): ''' A function to compare the traces with specifications and generate report. Args: params (dictionary): vAnalyzer configurations Returns: Raises: ''' log_utils.display_message(logging.INFO, 'vAnalyzer Report') # Get instance name from parameters dictionary instance_name = params['vAnalyzer']['instance_name'] # Create a new vOpenstack instance openstack = vOpenstack() # Get vAnalyzer Instance IP Address public_net_name = openstack.get_network_name(params['vAnalyzer']['private_net_id']) vanalyzer_ip = openstack.get_instance_ip(instance_name, public_net_name) params.update({'vanalyzer_ip': vanalyzer_ip}) params.update({'vanalyzer_ip': vanalyzer_ip}) params.update({'vanalyzer_username': params['vAnalyzer']['username']}) params.update({'vanalyzer_password': params['vAnalyzer']['password']}) # Generate report reportFile = report_utils.generate_report(params) # Move report file to vAnalyzer instance log_utils.display_message(logging.DEBUG, 'Transfering report file to vAnalyzer instance.') params.update({'extension': 'json'}) params.update({'from_folder': sys_utils.get_current_directory()}) logging.debug(params['from_folder']) params.update({'to_folder': '/home/ubuntu/vanalyzer-reports/'}) network_utils.transfer_files_from_controller_to_vanalyzer(params) # Delete JSON Report File log_utils.display_message(logging.DEBUG, 'Deleting report file from Controller node.') sys_utils.delete_file(reportFile)
def trace_start(params): ''' A function to start network package trace. Args: params (dictionary): vAnalyzer configurations Returns: Raises: ''' log_utils.display_message(logging.INFO, 'vAnalyzer Trace Start') # Get instance name from parameters dictionary instance_name = params['vAnalyzer']['instance_name'] # Create a new vOpenstack instance openstack = vOpenstack() # Check if there are any running processes if sys_utils.is_a_file(params['Environment']['pid_file']): log_utils.display_message(logging.ERROR, 'Please stop tracing process and try again') sys_utils.die() # Check if instance or stack with given name exists instanceId = openstack.get_instance_id(instance_name) stackId = openstack.get_stack_id(instance_name) if instanceId != '' and stackId != '': # Check Compute Hostname computeNode = openstack.get_compute_hostname(instance_name) # Ask for confirmation on trace start operation if log_utils.response_to_question('Do you want to start the trace on %s ? ' % computeNode, default='yes'): # Get list of all virtual machines vmList = openstack.get_all_vms() log_utils.display_message(logging.DEBUG, 'Available VMs: %s' % vmList) # Get list of available protocols protocols = sys_utils.get_supported_protocols() log_utils.display_message(logging.DEBUG, 'Supported Protocols: %s' % protocols) # Get list of tap device ids tapDeviceIDList = openstack.get_tap_device_id_list() # Get list of trace protocols in config file traces = config_utils.get_options('Trace') # For each protocol in trace list, start trace on selected tap device ids for protocol in traces: # Check if this protocol is valid if protocol in protocols: # Get VM list for this protocol protocolVms = params['Trace'][protocol].split(',') protocolVms = map(lambda x: x.strip(), protocolVms) # Check if VM list is empty or not if len(protocolVms) != 0: logging.debug('VM List: %s' % protocolVms) traceList = list() for vm in protocolVms: logging.debug('VM Name: %s' % vm) # Check if VM's tap device id is in tap device id list or not if vm not in tapDeviceIDList: log_utils.display_message(logging.WARNING, 'Instance %s does not exist! %s protocol will be ignored.' % (vm, protocol)) del traceList[:] break # Get VM id by VM name vmId = openstack.get_instance_id(vm) # Check if VM exists or not if vmId == '': log_utils.display_message(logging.WARNING, 'Instance %s is not running! %s protocol will be ignored.' % (vm, protocol)) del traceList[:] break traceList.append(tapDeviceIDList.get(vm)) # Connect to Fuel Node. The function then sets another ssh connection to Compute Node if len(traceList) != 0: parameters = dict() parameters.update({'ip': params['Fuel']['fuel_node']}) parameters.update({'username': params['Fuel']['fuel_node_username']}) parameters.update({'password': params['Fuel']['fuel_node_password']}) parameters.update({'traceList': traceList}) parameters.update({'protocol': protocol}) parameters.update({'computeNode': computeNode}) parameters.update({'pid_file': params['Environment']['pid_file']}) network_utils.trace_start(parameters) # If no protocol is specified elif protocol == 'ANY': pass # No VMs found for this protocol else: log_utils.display_message(logging.WARNING, 'No Configuration Found for %s Protocol.' % protocol) # Protocol is not valid else: log_utils.display_message(logging.INFO, '%s protocol will be ignored because it is not supported.' % protocol) # Trace start aborted else: log_utils.display_message(logging.INFO, 'Exiting Program.') sys_utils.die() # Instance or stack does not exist else: log_utils.display_message(logging.ERROR, '%s Instance or Stack Does Not Exist!' % instance_name) sys_utils.die()
def trace_end(params): ''' A function which ends tracing on Compute node. Args: params (dictionary): The dictionary which has values for parameters Returns: Raises: ''' # Assign parameters ip = params['ip'] username = params['username'] password = params['password'] # vAnalyzer Instance Parameters vanalyzer_ip = params['vanalyzer_ip'] vanalyzer_username = params['vanalyzer_username'] vanalyzer_password = params['vanalyzer_password'] computeNode = params['computeNode'] # Open an SSH Connection to Fuel Node compute_ssh_conn = SshConnection(ip, username, password) # Establish an SSH Connection to the compute node compute_ssh_conn.open_ssh_connection(computeNode, username, password) # Open an SSH Connection to Analyzer Engine vanalyzer_ssh_conn = SshConnection(vanalyzer_ip, vanalyzer_username, vanalyzer_password) params.update({'compute_ssh_conn': compute_ssh_conn}) params.update({'vanalyzer_ssh_conn': vanalyzer_ssh_conn}) # Controller and Compute Nodes controllerNode = get_controller_hostname() params.update({'controllerNode': controllerNode}) logging.debug('Controller Node: %s' % controllerNode) # Stop tracing and kill processes kill_processes(params) # Transfer PCAP files from Compute Node to Controller Node transfer_pcaps_from_compute_to_controller(params) # Transfer PCAP files from Controller node to Analyzer Engine params.update({'extension': 'pcap'}) params.update({'from_folder': '/tmp/'}) params.update({'to_folder': '/home/ubuntu/vanalyzer-inbox/'}) transfer_files_from_controller_to_vanalyzer(params) # Check file integrity res = check_pcap_file_integrity(params) logging.debug('Check result for md5sum: %s' % res) # If integrities match, delete the files if res: delete_pcap_files(params) # Integrities don't match else: log_utils.display_message( logging.ERROR, 'Deleting pcap files from controller node failed: md5 check sum is wrong!' )
def generate_report(parameters): ''' A function which generates JSON report and returns file name. Args: parameters (dictionary): The dictionary of values Returns: string: The file name of the JSON report Raises: ''' vanalyzer_ip = parameters['vanalyzer_ip'] # Create Elasticsearch object elasticsearch = vElasticsearch(vanalyzer_ip) # Get yaml files from protocol folder protocols = sys_utils.get_supported_protocols() results = list() for protocol in protocols: protocol_report = {} log_utils.display_message( logging.INFO, 'Executing query for %s protocol.' % protocol) yaml_file = sys_utils.get_protocol_yaml(protocol) # Get invalid packets for desired protocols packets = elasticsearch.execute_yaml_query(yaml_file) invalid_packets = int(packets['hits']['total']) log_utils.display_message( logging.DEBUG, 'Number of invalid packets: %i' % invalid_packets) # Get all packets for desired protocols packets = elasticsearch.get_all_packets_by_protocol(protocol) all_packets = int(packets['hits']['total']) log_utils.display_message(logging.DEBUG, 'Number of all packets: %i' % all_packets) percentage = 0.0 if all_packets != 0: percentage = 100.0 * invalid_packets / all_packets log_utils.display_message( logging.DEBUG, 'Percentage of invalid packets: %.2f%%' % percentage) protocol_report.update({ 'invalid packets': invalid_packets, 'all packets': all_packets, 'percentage': percentage }) results.append({'protocol': protocol, 'results': protocol_report}) report_dict = {'report': results, 'configuration': parameters} # Write report dictionary to file log_utils.display_message(logging.DEBUG, 'Writing results to report.json file.') reportFileName = build_json_filename() sys_utils.save_as_json(reportFileName, report_dict) if log_utils.response_to_question( 'Do you want to move analyzed packets to archive?', default='no'): # Reindex packets-* database as analyzed log_utils.display_message(logging.DEBUG, 'Archiving new packets.') elasticsearch.reindex_packets_as_analyzed() # Delete packets-* database log_utils.display_message(logging.DEBUG, 'Deleting analyzed packets.') elasticsearch.delete_packets_index() return reportFileName