Example #1
0
    def __init__(self, options, positionals):
        """
            Initialize our controller.  This includes initializing all of the 
            configurations and opening all of the appropriate logfiles.
        """

        # Set our port to bind to
        self._sock = None
        self.PORT_NUM = options.port

        # AMQP Host (Only used to hand off to analysis
        self.services_host = options.services_host

        # What config file are we loading?
        sensor_config_file = options.sensor_config_file

        # What config file are we loading?
        machine_config_file = options.machine_config_file

        # Disk images config file
        images_config_file = options.images_config_file

        # Import our available sensors
        logger.debug("Importing sensor config file (%s)" % sensor_config_file)
        self.sensor_list = Configs.import_from_config(sensor_config_file,
                                                      "sensor")

        # Import our available machines
        logger.debug("Importing machine config file (%s)" %
                     machine_config_file)
        self.machine_list = Configs.import_from_config(machine_config_file,
                                                       "machine")

        # Import our image mappings
        logger.debug("Importing images config file (%s)" % images_config_file)
        self.images_map = Configs.import_from_config(images_config_file,
                                                     "images")

        # Provision the number of requested VMs
        print "* Initializing %d virtual machines..." % options.vm_count
        for x in range(0, options.vm_count):
            tmp_name = "lophi-%d" % x
            self.machine_list[tmp_name] = VirtualMachine(tmp_name,
                                                         force_new=True)

        # Build our dictionary of queues
        # This queues are handed to analysis for scheduling
        self.MACHINE_QUEUES = {}

        self.ANALYSIS_SCHEDULER = {}

        print "* Assigning sensors to machines, and configure machines"
        for m in self.machine_list:

            # Setup their image maps
            self.machine_list[m].add_image_map( \
                 self.images_map[self.machine_list[m].type])

            # Add sensors and PXE server to physical machines
            if self.machine_list[m].type == G.MACHINE_TYPES.PHYSICAL:
                # Add sensors
                self.machine_list[m].add_sensors(self.sensor_list)
                # Add pxe_server
                from lophinet.pxeserver import PXEServer
                pxe_server = PXEServer(options.pxe_server)
                self.machine_list[m].add_pxe_server(pxe_server)

        manager = multiprocessing.Manager()
        for m in self.machine_list:
            # Get our indices
            t = self.machine_list[m].type
            # New queue?
            if t not in self.MACHINE_QUEUES:
                machine_queue = manager.Queue()
                self.MACHINE_QUEUES[t] = machine_queue
                self.ANALYSIS_SCHEDULER[t] = LophiScheduler(
                    self.machine_list, machine_queue)
                self.ANALYSIS_SCHEDULER[t].start()
            # Add to queue
            self.MACHINE_QUEUES[t].put(m)

        # Ensure that we can share this list with our analysis threads
#         self.machine_list = multiprocessing.Manager().dict(self.machine_list)

# Setup our FTP info
        self.ftp_ip_physical = self.ftp_ip_virtual = None
        try:
            self.ftp_ip_physical = NET.get_ip_address(options.ftp_physical)
        except:
            logger.error("Could not find ip for physical FTP interface. (%s)" %
                         options.ftp_physical)
        try:
            self.ftp_ip_virtual = NET.get_ip_address(options.ftp_virtual)
        except:
            logger.error("Could not find ip for virtual FTP interface. (%s)" %
                         options.ftp_virtual)

        # Server stuff
        self.RUNNING = True

        # Init our multiprocess
        multiprocessing.Process.__init__(self)
        """ 
Example #2
0
    def run(self):
        logger.info("Started LO-PHI analysis scheduler. (PID: %d)" %
                    os.getpid())

        # Loop forever, consuming analyses and assigning machines to them
        while True:

            # Cleanup any previous analysis
            self._cleanup_pointers()

            # Get an analysis
            (analysis_file, command) = self.analysis_queue.get()

            # Initialize our analysis
            AnalysisClass = self.load_analysis(analysis_file)
            analysis = LoPhiAnalysisEngine(running_dict=ANALYSIS_STATUS,
                                           services_host=SERVICES_HOST)

            ANALYSIS_QUEUED.append(analysis.id)

            if command.machine is None:

                logger.debug("Got Analysis: %s" % AnalysisClass)

                machine_name = self.machine_queue.get()
                machine = self.machine_list[machine_name]

                logger.debug("Got Machine: %s" % machine)
                analysis.start(AnalysisClass,
                               lophi_command=command,
                               machine_name=machine_name,
                               machine_list=self.machine_list,
                               machine_queue=self.machine_queue)
            else:
                logger.debug("Got Machine: %s" % command.machine)

                # Did the user define a machine?
                machine_name = command.machine
                if machine_name in self.machine_list.keys():
                    # Get our machine
                    machine = self.machine_list[machine_name]
                    if machine.ALLOCATED >= 0:
                        logger.warning(
                            "Machine (%s) is already allocated to %d." %
                            (machine.config.name, machine.ALLOCATED))

                # Check to see if a vm with this name exists
                elif command.machine_type != G.MACHINE_TYPES.PHYSICAL:

                    # Init a VM object
                    vm = VirtualMachine(command.machine, command.machine_type)
                    if vm.power_status(
                    ) == G.SENSOR_CONTROL.POWER_STATUS.UNKNOWN:
                        logger.error("Virtual machine (%s) does not exist." %
                                     command.machine)
                        return False
                    else:
                        machine = vm
                else:
                    logger.error("Could not find machine: %s" % machine_name)
                    return False

                analysis.start(AnalysisClass,
                               lophi_command=command,
                               machine=machine)

            # Update our dict
            # This is neccesary to keep a pointer to the analysis so that the
            # scheduler won't kill the thread
            self.ANALYSIS_DICT.update({analysis.id: analysis})

            # Print some status
            print "* Starting analysis (%s) on machine (%s)." % (
                AnalysisClass.NAME, machine.config.name)

            time.sleep(1)
def run_analysis(options):

    # Get our FTP IP
    try:
        ftp_ip = NET.get_ip_address(options.ftp_interface)
    except:
        logger.error("Could not find ip for the given interface. (%s)" %
                     options.ftp_interface)

    # Add a sensors to physical machines if needed
    if options.machine_type == G.MACHINE_TYPES.PHYSICAL:
        has_memory = has_disk = False

        if options.machine_config is None:
            logger.error("No machine config file given.")
            return

        # This isn't the class we use in practice, but fake it here for simplicity
        machines = CONF.import_from_config(options.machine_config, "machine")

        if options.machine not in machines:
            logger.error("%s is not a valid machine from the config file." %
                         options.machine)
            logger.error("Valid targets are: %s" % machines.keys())
            return

        # Get our machine object
        machine = machines[options.machine]

        # Ensure that a sensor config is defined
        if options.sensor_config is None:
            logger.error(
                "A sensor config file must be defined for physical analysis")
            return
        # Get the list of sensors
        sensors = CONF.import_from_config(options.sensor_config, "sensor")

        # Add sensors to our machine
        print "* Trying to find physical sensors for %s..." % options.machine
        added_sensors = machine.add_sensors(sensors)

    else:
        machine = VirtualMachine(options.machine,
                                 vm_type=options.machine_type,
                                 volatility_profile=options.profile)

    ftp_info = {
        'user': G.FTP_USER,
        'pass': G.FTP_PASSWORD,
        'ip': ftp_ip,
        'port': G.FTP_PORT,
        'dir': None
    }

    print "* Machine is: %s" % machine.power_status()

    ra = RemoteAnalysis(options.profile, machine.control, ftp_info)

    parameters = {
        #                   1:'INTmark (write)',
        #                   2:'INTmark (Read)',
        3: 'INTmem',
        #                   4:'FLOATmark (write)',
        #                   5:'FLOATmark (Read)',
        6: 'FLOATmem',
        #                   7:'MMXmark (write)',
        #                   8:'MMXmark (Read)',
        9: 'MMXmem',
        #                   10:'SSEmark (write)',
        #                   11:'SSEmark (Read)',
        12: 'SSEmem'
    }
    # Create a run for all of our parameters (Only *mem will run in batches)
    for b_param in parameters:
        # Should we be reading memory?
        if options.enable_sensor:
            memory_thread = MemoryThread(machine)
            memory_thread.daemon = True
            memory_thread.start()

        param_name = parameters[b_param]

        print "* Running %s test, %d times..." % (param_name,
                                                  options.run_count)

        response = ra.run_analysis("ramspeed-win32.exe -b %d -l %d" %
                                   (b_param, options.run_count),
                                   "artifacts-memory",
                                   init_commands=[],
                                   bind_ip=ftp_ip)

        # Now store our results
        results_file = os.path.join(
            options.output_dir,
            "trial_b%d_l%d.txt" % (b_param, options.run_count))
        sensor_file = os.path.join(
            options.output_dir,
            "trial_b%d_l%d_sensor.txt" % (b_param, options.run_count))
        print "* Storing results (%d bytes) in %s." % (len(response),
                                                       results_file)
        f = open(results_file, "w+")
        f.write(response)
        f.close()

        if options.enable_sensor:
            (time_elapsed, bytes_read) = memory_thread.join()
            f = open(sensor_file, "w+")
            f.write(str(time_elapsed) + "\t" + str(bytes_read))
            f.close()
Example #4
0
def run_analysis(options):

    # Get our FTP IP
    try:
        ftp_ip = NET.get_ip_address(options.ftp_interface)
    except:
        logger.error("Could not find ip for the given interface. (%s)" %
                     options.ftp_interface)

    # Add a sensors to physical machines if needed
    if options.machine_type == G.MACHINE_TYPES.PHYSICAL:
        has_memory = has_disk = False

        if options.machine_config is None:
            logger.error("No machine config file given.")
            return

        # This isn't the class we use in practice, but fake it here for simplicity
        machines = CONF.import_from_config(options.machine_config, "machine")

        if options.machine not in machines:
            logger.error("%s is not a valid machine from the config file." %
                         options.machine)
            logger.error("Valid targets are: %s" % machines.keys())
            return

        # Get our machine object
        machine = machines[options.machine]

        # Ensure that a sensor config is defined
        if options.sensor_config is None:
            logger.error(
                "A sensor config file must be defined for physical analysis")
            return
        # Get the list of sensors
        sensors = CONF.import_from_config(options.sensor_config, "sensor")

        # Add sensors to our machine
        print "Trying to find physical sensors for %s..." % options.machine
        added_sensors = machine.add_sensors(sensors)

        if options.enable_sensor:
            machine.disk.sata_enable_all()

    else:
        machine = VirtualMachine(options.machine,
                                 vm_type=options.machine_type,
                                 volatility_profile=options.profile)

    ftp_info = {
        'user': G.FTP_USER,
        'pass': G.FTP_PASSWORD,
        'ip': ftp_ip,
        'port': G.FTP_PORT,
        'dir': None
    }

    print machine.power_status()

    ra = RemoteAnalysis(options.profile, machine.control, ftp_info)

    for trial in range(44, options.run_count):

        print "Running trial #%d..." % trial

        # Should we fire up our disk sensor?
        #         if options.enable_sensor:
        #             log_dcap_filename = os.path.join(options.output_dir,"trial_%d.dcap"%trial)
        #             log_dcap_queue = multiprocessing.Queue()
        #             log_dcap_writer = CaptureWriter(log_dcap_filename,
        #                                             log_dcap_queue)
        #             log_dcap_writer.start()
        #
        #             dcap_engine = DiskCaptureEngine(machine, log_dcap_queue)
        #             dcap_engine.start()

        iozone_cmd = "iozone.exe -a -g 2G -i 0 -i 1"
        response = ra.run_analysis(
            iozone_cmd,
            None,
            init_commands=["cd C:\Program Files\Benchmarks\Iozone3_414"],
            bind_ip=ftp_ip)

        f = open(os.path.join(options.output_dir, "trial_%d.txt" % trial),
                 "w+")
        f.write(response)
        f.close()

        #         if options.enable_sensor:
        #             dcap_engine.stop()
        #             log_dcap_writer.stop()

        print response
Example #5
0
def main(options):
    """
        Implement your function here
    """

    # Keep track of the type of analysis that is possible (for physical)
    has_memory = has_disk = True

    # Add a sensors to physical machines if needed
    if options.machine_type == G.MACHINE_TYPES.PHYSICAL:
        has_memory = has_disk = False

        if options.machine_config is None:
            logger.error("No machine config file given.")
            return

        # This isn't the class we use in practice, but fake it here for simplicity
        machines = CONF.import_from_config(options.machine_config, "machine")

        if options.machine not in machines:
            logger.error("%s is not a valid machine from the config file." %
                         options.machine)
            logger.error("Valid targets are: %s" % machines.keys())
            return

        # Get our machine object
        machine = machines[options.machine]

        # Ensure that a sensor config is defined
        if options.sensor_config is None:
            logger.error(
                "A sensor config file must be defined for physical analysis")
            return
        # Get the list of sensors
        sensors = CONF.import_from_config(options.sensor_config, "sensor")

        # Add sensors to our machine
        print "Trying to find physical sensors for %s..." % options.machine
        added_sensors = machine.add_sensors(sensors)

        # See which sensors were added
        for sensor in added_sensors:
            print "* Added %s to %s" % (sensor.id, machine.config.name)
            if issubclass(sensor.__class__, MemorySensor):
                has_memory = True
            if issubclass(sensor.__class__, DiskSensor):
                has_disk = True
    else:
        machine = VirtualMachine(options.machine,
                                 vm_type=options.machine_type,
                                 volatility_profile=options.volatility_profile)

    if options.analysis is not None:
        analysis = analysis_scripts[options.analysis]

        lae = LoPhiAnalysisEngine()
        lae.start(analysis[0], machine=machine)

        print "Running Analysis (%s)..." % options.analysis
        while True:
            print "* The following commands are available"
            print "   p - Pause, r - Resume, s - Stop"
            command = raw_input('cmd: ')

            if command == "p":
                lae.pause()
                print "Analysis PAUSED."

            elif command == "r":
                lae.resume()
                print "Analysis RESUMED."

            elif command == "s":
                lae.stop()
                print "Analysis STOPPED."
                sys.exit(0)
            else:
                print "Unrecognized command (%s)." % command

    if False and has_memory:
        print "Starting memory analysis"
        # Create a queue and start our analysis
        output_queue = multiprocessing.Queue()
        mem_analysis = MemoryAnalysisEngine(machine,
                                            output_queue,
                                            plugins=['pslist'])
        mem_cap = CaptureWriter("memory.cap", output_queue)
        #         mem_cap.start()
        mem_analysis.start()

        for i in range(10):
            print output_queue.get()

#         mem_cap.stop()
        mem_analysis.stop()

    if has_disk:
        print "Starting disk analysis"
        # create a queue and start analysis
        output_queue = multiprocessing.Queue()
        disk_analysis = DiskAnalysisEngine(machine, output_queue)
        disk_cap = CaptureWriter("disk.cap", output_queue)
        #         disk_cap.start()

        disk_analysis.start()

        for i in range(100):
            print output_queue.get()


#         disk_cap.stop()
        disk_analysis.stop()