Esempio n. 1
0
 def __init__(self,
              default_filename="",
              tick_interval=100,
              capture=["cpu", "mem", "net", "disk"]):
     CSV_File.__init__(self, default_filename)
     self.tick_interval = tick_interval
     self.__running = True
     self.capture = capture
Esempio n. 2
0
 def after_experiment_run(self, parameter_type):
     if parameter_type == "output":
         self.__running = False
         time.sleep(self.tick_interval / 1000.0)
         CSV_File.after_experiment_run(self, "output")
         self.event_file.after_experiment_run("output")
Esempio n. 3
0
 def before_experiment_run(self, parameter_type):
     if parameter_type == "output":
         CSV_File.before_experiment_run(self, "output")
         self.event_file = CSV_File(self.path + ".events")
         self.event_file.before_experiment_run("output")
         self.thread = thread.start_new_thread(self.monitor_thread, tuple())
Esempio n. 4
0
 def inp_extract_cmdline_parser(self, opts, args):
     CSV_File.inp_parser_extract(self, opts, None)
     self.event_file = CSV_File(self.path + ".events")
Esempio n. 5
0
class MachineMonitor(CSV_File):
    """Can be used as: **input parameter** and **output parameter**

    With this parameter the systems status during the experiment can
    be monitored. The tick interval can specified on creation and also
    what values should be captured.

    This parameter creates a :class:`~versuchung.files.CSV_File` with
    the given name. When the experiment starts the monitor fires up a
    thread which will every ``tick_interval`` milliseconds capture the
    status of the system and store the information as a row in the
    normal csv.

    A short example::

        class SimpleExperiment(Experiment):
            outputs = {"ps": MachineMonitor("ps_monitor", tick_interval=100)}

            def run(self):
                shell("sleep 1")
                shell("seq 1 100000 | while read a; do echo > /dev/null; done")
                shell("sleep 1")

        experiment = SimpleExperiment()
        experiment(sys.argv)

    >>> experiment.o.ps.extract(["time", "net_send"])
    [[1326548338.701827, 0],
     [1326548338.810422, 3],
     [1326548338.913667, 0],
     [1326548339.016836, 0],
     [1326548339.119982, 2],
     ....

    """
    def __init__(self,
                 default_filename="",
                 tick_interval=100,
                 capture=["cpu", "mem", "net", "disk"]):
        CSV_File.__init__(self, default_filename)
        self.tick_interval = tick_interval
        self.__running = True
        self.capture = capture

    def __get_cpu(self):
        return [self.psutil.cpu_percent()]

    def __get_memory(self):
        phymem = self.psutil.phymem_usage()
        virtmem = self.psutil.virtmem_usage()
        cached = self.psutil.cached_phymem()
        buffers = self.psutil.phymem_buffers()

        return [
            phymem.total, phymem.used, phymem.free, virtmem.total,
            virtmem.used, virtmem.free, cached, buffers
        ]

    def __get_net(self):
        if not hasattr(self, "old_network_stat"):
            self.old_network_stat = self.psutil.network_io_counters()
        stat = self.psutil.network_io_counters()
        ret = [
            stat.bytes_sent - self.old_network_stat.bytes_sent,
            stat.bytes_recv - self.old_network_stat.bytes_recv
        ]
        self.old_network_stat = stat
        return ret

    def __get_disk(self):
        if not hasattr(self, "old_disk_stat"):
            self.old_disk_stat = self.psutil.disk_io_counters()
        stat = self.psutil.disk_io_counters()
        ret = [
            stat.read_bytes - self.old_disk_stat.read_bytes,
            stat.write_bytes - self.old_disk_stat.write_bytes
        ]
        self.old_disk_stat = stat
        return ret

    def monitor_thread(self):
        try:
            import psutil
            self.psutil = psutil
        except ImportError:
            raise RuntimeError("Please install psutil to use PsMonitor")

        while self.__running:
            row = [time.time()]
            if "cpu" in self.capture:
                row += self.__get_cpu()
            else:
                row += [-1]

            if "mem" in self.capture:
                row += self.__get_memory()
            else:
                row += [-1, -1, -1, -1, -1, -1, -1, -1]

            if "net" in self.capture:
                row += self.__get_net()
            else:
                row += [-1, -1]

            if "disk" in self.capture:
                row += self.__get_disk()
            else:
                row += [-1, -1]

            assert len(row) == len(self.sample_keys)
            self.append(row)

            time.sleep(self.tick_interval / 1000.0)

    def inp_extract_cmdline_parser(self, opts, args):
        CSV_File.inp_parser_extract(self, opts, None)
        self.event_file = CSV_File(self.path + ".events")

    def before_experiment_run(self, parameter_type):
        if parameter_type == "output":
            CSV_File.before_experiment_run(self, "output")
            self.event_file = CSV_File(self.path + ".events")
            self.event_file.before_experiment_run("output")
            self.thread = thread.start_new_thread(self.monitor_thread, tuple())

    def after_experiment_run(self, parameter_type):
        if parameter_type == "output":
            self.__running = False
            time.sleep(self.tick_interval / 1000.0)
            CSV_File.after_experiment_run(self, "output")
            self.event_file.after_experiment_run("output")

    sample_keys = [
        "time", "cpu_percentage", "phymem_total", "phymem_used", "phymem_free",
        "virtmem_total", "virtmem_used", "virtmem_free", "cached", "buffers",
        "net_send", "net_recv", "disk_read", "disk_write"
    ]
    """The various fields in the csv file are organized like the
    strings in this list. E.g. The unix time is the first field of the
    csv file."""

    def extract(self, keys=["time", "cpu_percentage"]):
        """Extract single columns from the captured
        information. Useful keys are defined in
        :attr:`~.sample_keys`"""
        indices = [self.sample_keys.index(x) for x in keys]
        ret = []
        for row in self.value:
            r = []
            for index in indices:
                r.append(row[index])
            ret.append(r)
        return ret
Esempio n. 6
0
from __future__ import print_function

from versuchung.experiment import Experiment
from versuchung.files import CSV_File


class CSVExperiment(Experiment):
    inputs = {}
    outputs = {"csv": CSV_File("csv_output")}

    def run(self):
        self.outputs.csv.value.append([1, 2, 3])


if __name__ == "__main__":
    import shutil, sys
    experiment = CSVExperiment()
    dirname = experiment(sys.argv)

    csv = CSV_File(dirname + "/" + "csv_output")

    assert csv.value == [["1", "2", "3"]]

    if dirname:
        shutil.rmtree(dirname)
    print("success")
Esempio n. 7
0
class CSVExperiment(Experiment):
    inputs = {}
    outputs = {"csv": CSV_File("csv_output")}

    def run(self):
        self.outputs.csv.value.append([1, 2, 3])
Esempio n. 8
0
 def after_experiment_run(self, parameter_type):
     if parameter_type == "output":
         self.__running = False
         time.sleep(self.tick_interval/1000.0)
         CSV_File.after_experiment_run(self, "output")
         self.event_file.after_experiment_run("output")
Esempio n. 9
0
 def before_experiment_run(self, parameter_type):
     if parameter_type == "output":
         CSV_File.before_experiment_run(self, "output")
         self.event_file = CSV_File(self.path + ".events")
         self.event_file.before_experiment_run("output")
         self.thread = thread.start_new_thread(self.monitor_thread, tuple())
Esempio n. 10
0
 def inp_extract_cmdline_parser(self, opts, args):
     CSV_File.inp_parser_extract(self, opts, None)
     self.event_file = CSV_File(self.path + ".events")
Esempio n. 11
0
 def __init__(self, default_filename = "", tick_interval=100, capture = ["cpu", "mem", "net", "disk"]):
     CSV_File.__init__(self, default_filename)
     self.tick_interval = tick_interval
     self.__running = True
     self.capture = capture
Esempio n. 12
0
class MachineMonitor(CSV_File):
    """Can be used as: **input parameter** and **output parameter**

    With this parameter the systems status during the experiment can
    be monitored. The tick interval can specified on creation and also
    what values should be captured.

    This parameter creates a :class:`~versuchung.files.CSV_File` with
    the given name. When the experiment starts the monitor fires up a
    thread which will every ``tick_interval`` milliseconds capture the
    status of the system and store the information as a row in the
    normal csv.

    A short example::

        class SimpleExperiment(Experiment):
            outputs = {"ps": MachineMonitor("ps_monitor", tick_interval=100)}

            def run(self):
                shell("sleep 1")
                shell("seq 1 100000 | while read a; do echo > /dev/null; done")
                shell("sleep 1")

        experiment = SimpleExperiment()
        experiment(sys.argv)

    >>> experiment.o.ps.extract(["time", "net_send"])
    [[1326548338.701827, 0],
     [1326548338.810422, 3],
     [1326548338.913667, 0],
     [1326548339.016836, 0],
     [1326548339.119982, 2],
     ....

    """
    def __init__(self, default_filename = "", tick_interval=100, capture = ["cpu", "mem", "net", "disk"]):
        CSV_File.__init__(self, default_filename)
        self.tick_interval = tick_interval
        self.__running = True
        self.capture = capture

    def __get_cpu(self):
        return [self.psutil.cpu_percent()]

    def __get_memory(self):
        phymem = self.psutil.phymem_usage()
        virtmem = self.psutil.virtmem_usage()
        cached = self.psutil.cached_phymem()
        buffers = self.psutil.phymem_buffers()

        return [phymem.total, phymem.used, phymem.free,
                virtmem.total, virtmem.used, virtmem.free,
                cached, buffers]

    def __get_net(self):
        if not hasattr(self, "old_network_stat"):
            self.old_network_stat = self.psutil.network_io_counters()
        stat = self.psutil.network_io_counters()
        ret = [stat.bytes_sent - self.old_network_stat.bytes_sent,
               stat.bytes_recv - self.old_network_stat.bytes_recv]
        self.old_network_stat = stat
        return ret

    def __get_disk(self):
        if not hasattr(self, "old_disk_stat"):
            self.old_disk_stat = self.psutil.disk_io_counters()
        stat = self.psutil.disk_io_counters()
        ret = [stat.read_bytes  - self.old_disk_stat.read_bytes,
               stat.write_bytes - self.old_disk_stat.write_bytes]
        self.old_disk_stat = stat
        return ret


    def monitor_thread(self):
        try:
            import psutil
            self.psutil = psutil
        except ImportError:
            raise RuntimeError("Please install psutil to use PsMonitor")

        while self.__running:
            row = [time.time()]
            if "cpu" in self.capture:
                row += self.__get_cpu()
            else:
                row += [-1]

            if "mem" in self.capture:
                row += self.__get_memory()
            else:
                row += [-1,-1,-1,-1,-1,-1,-1,-1]

            if "net" in self.capture:
                row += self.__get_net()
            else:
                row += [-1,-1]

            if "disk" in self.capture:
                row += self.__get_disk()
            else:
                row += [-1,-1]

            assert len(row) == len(self.sample_keys)
            self.append(row)


            time.sleep(self.tick_interval/1000.0)

    def inp_extract_cmdline_parser(self, opts, args):
        CSV_File.inp_parser_extract(self, opts, None)
        self.event_file = CSV_File(self.path + ".events")

    def before_experiment_run(self, parameter_type):
        if parameter_type == "output":
            CSV_File.before_experiment_run(self, "output")
            self.event_file = CSV_File(self.path + ".events")
            self.event_file.before_experiment_run("output")
            self.thread = thread.start_new_thread(self.monitor_thread, tuple())

    def after_experiment_run(self, parameter_type):
        if parameter_type == "output":
            self.__running = False
            time.sleep(self.tick_interval/1000.0)
            CSV_File.after_experiment_run(self, "output")
            self.event_file.after_experiment_run("output")


    sample_keys = ["time", "cpu_percentage",
                   "phymem_total", "phymem_used", "phymem_free",
                   "virtmem_total", "virtmem_used", "virtmem_free",
                   "cached", "buffers", "net_send", "net_recv",
                   "disk_read", "disk_write"]

    """The various fields in the csv file are organized like the
    strings in this list. E.g. The unix time is the first field of the
    csv file."""


    def extract(self, keys = ["time", "cpu_percentage"]):
        """Extract single columns from the captured
        information. Useful keys are defined in
        :attr:`~.sample_keys`"""
        indices = [self.sample_keys.index(x) for x in keys]
        ret = []
        for row in self.value:
            r = []
            for index in indices:
                r.append(row[index])
            ret.append(r)
        return ret