Exemple #1
0
def _summarize_task_completed(end_date, steps, nodes):
    """Create a list of datetime points containing the amounts of
    completed tasks per hour, day, week up to the given
    date"""

    result = {}
    for step_name, step in steps.iteritems():
        past = end_date - datetime.timedelta(
            minutes=step.get("step") * step.get("times")
        )
        max_completed = 0
        result[step_name] = {
            "info": {
                "max_points": step.get("times"),
                "step_size": step.get("step"),
                "start": past.strftime("%Y-%m-%d %H:%M:%S"),
                "finish": end_date.strftime("%Y-%m-%d %H:%M:%S")
            },
            "points": []
        }

        for x in range(step.get("times")):
            later = past + datetime.timedelta(minutes=step.get("step"))

            # Find completed tasks between date ranges
            tasks = StatsCache().get_stat(
                "completed", later, step.get("step")
            )

            if tasks is None:
                tasks = Task.query.filter(
                    Task.completed >= past, Task.completed <= later
                ).count()

                StatsCache().update(
                    "completed", step.get("step"),
                    set_dt=later, set_value=tasks
                )

            if tasks > max_completed:
                max_completed = tasks

            time_key = later.strftime("%Y-%m-%d %H:%M:%S")
            result[step_name]["points"].append({
                "datetime": time_key,
                "value": tasks
            })

            past = later

        result[step_name]["info"]["max"] = max_completed

    return result
    def test_update_increment_changed(self):
        sc = StatsCache()
        sc._init_stats()
        dt = datetime.datetime.now()
        key = sc.round_nearest_step(dt, 15).strftime(sc.dt_ftm)
        sc.update(name="test1", step_size=15)
        sc.update(name="test1", step_size=15, increment_by=1337)

        assert sc.stats["test1"][key] == 1338
Exemple #3
0
def _summarize_task_uncompleted(end_date, steps, nodes):
    """Create a list of datetime points containing the amounts of
    uncompleted/queued tasks by last hour, day, week up to the given
    date"""

    result = {}
    for step_name, step in steps.iteritems():
        past = end_date - datetime.timedelta(minutes=step.get("step") *
                                             step.get("times"))
        max_uncompleted = 0
        result[step_name] = {
            "info": {
                "max_points": step.get("times"),
                "step_size": step.get("step"),
                "start": past.strftime("%Y-%m-%d %H:%M:%S"),
                "finish": end_date.strftime("%Y-%m-%d %H:%M:%S")
            },
            "points": []
        }

        for x in range(step.get("times")):
            later = past + datetime.timedelta(minutes=step.get("step"))

            # Find all submissions that are not completed yet/still queued.
            # If not in cache, search db
            uncompleted = StatsCache().get_stat("uncompleted", later,
                                                step.get("step"))

            if uncompleted is None:
                uncompleted = Task.query.filter(
                    Task.status < Task.FINISHED,
                    Task.submitted <= later).count()
                StatsCache().update("uncompleted",
                                    step.get("step"),
                                    set_dt=later,
                                    set_value=uncompleted)

            if uncompleted > max_uncompleted:
                max_uncompleted = uncompleted

            time_key = later.strftime("%Y-%m-%d %H:%M:%S")
            result[step_name]["points"].append({
                "datetime": time_key,
                "value": uncompleted
            })

            past = later

        result[step_name]["info"]["max"] = max_uncompleted

    return result
    def test_round_nearest_step(self):
        now = datetime.datetime(2017, 5, 15, 15, 11, 22)
        five_min = datetime.datetime(2017, 5, 15, 15, 15)
        ten_min = datetime.datetime(2017, 5, 15, 15, 20)
        fifteen_min = datetime.datetime(2017, 5, 15, 15, 15)
        thirty_min = datetime.datetime(2017, 5, 15, 15, 30)
        sixty_min = datetime.datetime(2017, 5, 15, 16)

        sc = StatsCache()
        assert sc.round_nearest_step(now, 5) == five_min
        assert sc.round_nearest_step(now, 10) == ten_min
        assert sc.round_nearest_step(now, 15) == fifteen_min
        assert sc.round_nearest_step(now, 30) == thirty_min
        assert sc.round_nearest_step(now, 60) == sixty_min
Exemple #5
0
    def test_round_nearest_step(self):
        now = datetime.datetime(2017, 5, 15, 15, 11, 22)
        five_min = datetime.datetime(2017, 5, 15, 15, 15)
        ten_min = datetime.datetime(2017, 5, 15, 15, 20)
        fifteen_min = datetime.datetime(2017, 5, 15, 15, 15)
        thirty_min = datetime.datetime(2017, 5, 15, 15, 30)
        sixty_min = datetime.datetime(2017, 5, 15, 16)

        sc = StatsCache()
        assert sc.round_nearest_step(now, 5) == five_min
        assert sc.round_nearest_step(now, 10) == ten_min
        assert sc.round_nearest_step(now, 15) == fifteen_min
        assert sc.round_nearest_step(now, 30) == thirty_min
        assert sc.round_nearest_step(now, 60) == sixty_min
Exemple #6
0
    def test_update_increment_changed(self):
        sc = StatsCache()
        sc._init_stats()
        dt = datetime.datetime.now()
        key = sc.round_nearest_step(dt, 15).strftime(sc.dt_ftm)
        sc.update(name="test1", step_size=15)
        sc.update(name="test1", step_size=15, increment_by=1337)

        assert sc.stats["test1"][key] == 1338
    def test_get_nonexistant(self):
        sc = StatsCache()
        sc._init_stats()
        dt = datetime.datetime(2017, 5, 15, 15, 9, 22)

        assert sc.get_stat(name="test5", dt=dt, step_size=15) is None
        sc.update(name="test5",
                  step_size=15,
                  set_dt=datetime.datetime(2017, 5, 15, 1, 5, 19))
        assert sc.get_stat(name="test5", dt=dt, step_size=15) is None
    def test_update_to_default(self):
        sc = StatsCache()
        sc._init_stats()
        dt = datetime.datetime(2017, 5, 15, 15, 9, 22)

        sc.update(name="test8", set_dt=dt, step_size=15)
        assert sc.get_stat(name="test8", dt=dt, step_size=15) == {}
    def test_update_changed_default(self):
        sc = StatsCache()
        sc._init_stats()
        dt = datetime.datetime(2017, 5, 15, 15, 9, 22)

        sc.update(name="test9", set_dt=dt, step_size=15, default="Doge")
        assert sc.get_stat(name="test9", dt=dt, step_size=15) == "Doge"
    def test_update_set_dt_value(self):
        sc = StatsCache()
        sc._init_stats()
        value = os.urandom(64)
        dt1 = datetime.datetime(2017, 5, 15, 15, 9, 22)
        sc.update(name="test2", step_size=15, set_dt=dt1, set_value=value)
        dt2 = datetime.datetime(2017, 5, 15, 15, 13, 42)

        assert sc.get_stat(name="test2", dt=dt2, step_size=15) == value
Exemple #11
0
    def test_update_changed_default(self):
        sc = StatsCache()
        sc._init_stats()
        dt = datetime.datetime(2017, 5, 15, 15, 9, 22)

        sc.update(name="test9", set_dt=dt, step_size=15, default="Doge")
        assert sc.get_stat(name="test9", dt=dt, step_size=15) == "Doge"
Exemple #12
0
    def test_update_to_default(self):
        sc = StatsCache()
        sc._init_stats()
        dt = datetime.datetime(2017, 5, 15, 15, 9, 22)

        sc.update(name="test8", set_dt=dt, step_size=15)
        assert sc.get_stat(name="test8", dt=dt, step_size=15) == {}
    def test_get_now_is_none(self):
        sc = StatsCache()
        sc._init_stats()
        value = os.urandom(64)
        sc.update(name="test4",
                  step_size=15,
                  set_value=value,
                  set_dt=datetime.datetime.now())

        assert sc.get_stat(
            name="test4", step_size=15, dt=datetime.datetime.now()) is None
Exemple #14
0
    def test_get_nonexistant(self):
        sc = StatsCache()
        sc._init_stats()
        dt = datetime.datetime(2017, 5, 15, 15, 9, 22)

        assert sc.get_stat(name="test5", dt=dt, step_size=15) is None
        sc.update(
            name="test5", step_size=15,
            set_dt=datetime.datetime(2017, 5, 15, 1, 5, 19)
        )
        assert sc.get_stat(name="test5", dt=dt, step_size=15) is None
    def test_update_key_prefix(self):
        sc = StatsCache()
        sc._init_stats()
        value = os.urandom(64)
        dt1 = datetime.datetime(2017, 5, 15, 15, 9, 22)
        dt2 = datetime.datetime(2017, 5, 15, 15, 11, 42)
        sc.update(name="test3",
                  step_size=15,
                  set_dt=dt1,
                  set_value=value,
                  key_prefix="node1")

        key = "node1%s" % sc.round_nearest_step(dt1, 15).strftime(sc.dt_ftm)
        assert sc.get_stat("test3", dt2, 15, key_prefix="node1") == value
        assert sc.stats["test3"][key] == value
Exemple #16
0
    def test_update_set_dt_value(self):
        sc = StatsCache()
        sc._init_stats()
        value = os.urandom(64)
        dt1 = datetime.datetime(2017, 5, 15, 15, 9, 22)
        sc.update(name="test2", step_size=15, set_dt=dt1, set_value=value)
        dt2 = datetime.datetime(2017, 5, 15, 15, 13, 42)

        assert sc.get_stat(name="test2", dt=dt2, step_size=15) == value
Exemple #17
0
    def test_update_key_prefix(self):
        sc = StatsCache()
        sc._init_stats()
        value = os.urandom(64)
        dt1 = datetime.datetime(2017, 5, 15, 15, 9, 22)
        dt2 = datetime.datetime(2017, 5, 15, 15, 11, 42)
        sc.update(
            name="test3", step_size=15, set_dt=dt1,
            set_value=value, key_prefix="node1"
        )

        key = "node1%s" % sc.round_nearest_step(dt1, 15).strftime(sc.dt_ftm)
        assert sc.get_stat("test3", dt2, 15, key_prefix="node1") == value
        assert sc.stats["test3"][key] == value
Exemple #18
0
    def test_get_now_is_none(self):
        sc = StatsCache()
        sc._init_stats()
        value = os.urandom(64)
        sc.update(
            name="test4", step_size=15, set_value=value,
            set_dt=datetime.datetime.now()
        )

        assert sc.get_stat(
            name="test4", step_size=15, dt=datetime.datetime.now()
        ) is None
 def test_reset(self):
     sc = StatsCache()
     assert len(sc.stats) > 1
     sc._reset_at = datetime.datetime.now() - datetime.timedelta(days=1)
     sc.get_stat(name="test7", dt=datetime.datetime.now(), step_size=15)
     assert len(sc.stats) == 1
Exemple #20
0
def _summarize_ram_usage(end_date, steps, nodes):
    """Create a list of datetime points containing the amounts of
    RAM usage per node per hour, day, week up to the given
    date"""

    result = {}

    for step_name, step in steps.iteritems():
        past = end_date - datetime.timedelta(
            minutes=step.get("step") * step.get("times")
        )
        result[step_name] = {
            node.name: {
                "info": {
                    "max_points": step.get("times"),
                    "step_size": step.get("step"),
                    "start": past.strftime("%Y-%m-%d %H:%M:%S"),
                    "finish": end_date.strftime("%Y-%m-%d %H:%M:%S"),
                    "max": 100,
                },
                "points": [],
            }
            for node in nodes
        }

        for x in range(step.get("times")):
            later = past + datetime.timedelta(minutes=step.get("step"))

            for node in nodes:
                # Query for latest entry for current node in given time range
                status = StatsCache().get_stat(
                    "status", later, step.get("step"), key_prefix=node.name
                )

                if status is None:
                    q = NodeStatus.query.filter(
                        NodeStatus.name == node.name,
                        NodeStatus.timestamp >= past
                    ).order_by(NodeStatus.timestamp.asc()).first()

                    if q is not None:
                        status = q.status

                    StatsCache().update(
                        "status", step.get("step"), set_value=status,
                        set_dt=later, key_prefix=node.name
                    )

                if not status or status == {} or status.get("memory") is None:
                    continue

                try:
                    memory = int(status.get("memory"))
                except ValueError:
                    continue

                time_key = later.strftime("%Y-%m-%d %H:%M:%S")
                result[step_name][node.name]["points"].append({
                    "datetime": time_key,
                    "value": memory
                })

            past = later

    return result
Exemple #21
0
def _summarize_ram_usage(end_date, steps, nodes):
    """Create a list of datetime points containing the amounts of
    RAM usage per node per hour, day, week up to the given
    date"""

    result = {}

    for step_name, step in steps.iteritems():
        past = end_date - datetime.timedelta(minutes=step.get("step") *
                                             step.get("times"))
        result[step_name] = {
            node.name: {
                "info": {
                    "max_points": step.get("times"),
                    "step_size": step.get("step"),
                    "start": past.strftime("%Y-%m-%d %H:%M:%S"),
                    "finish": end_date.strftime("%Y-%m-%d %H:%M:%S"),
                    "max": 100,
                },
                "points": [],
            }
            for node in nodes
        }

        for x in range(step.get("times")):
            later = past + datetime.timedelta(minutes=step.get("step"))

            for node in nodes:
                # Query for latest entry for current node in given time range
                status = StatsCache().get_stat("status",
                                               later,
                                               step.get("step"),
                                               key_prefix=node.name)

                if status is None:
                    q = NodeStatus.query.filter(
                        NodeStatus.name == node.name,
                        NodeStatus.timestamp >= past).order_by(
                            NodeStatus.timestamp.asc()).first()

                    if q is not None:
                        status = q.status

                    StatsCache().update("status",
                                        step.get("step"),
                                        set_value=status,
                                        set_dt=later,
                                        key_prefix=node.name)

                if not status or status == {} or status.get("memory") is None:
                    continue

                try:
                    memory = int(status.get("memory"))
                except ValueError:
                    continue

                time_key = later.strftime("%Y-%m-%d %H:%M:%S")
                result[step_name][node.name]["points"].append({
                    "datetime": time_key,
                    "value": memory
                })

            past = later

    return result
Exemple #22
0
def _summarize_cpu_usage(end_date, steps, nodes):
    """Create a list of datetime points containing the amounts of
    CPU usage in percent per node per hour, day, week up to the given
    date"""

    result = {}

    for step_name, step in steps.iteritems():
        past = end_date - datetime.timedelta(minutes=step.get("step") *
                                             step.get("times"))
        result[step_name] = {
            node.name: {
                "info": {
                    "max_points": step.get("times"),
                    "step_size": step.get("step"),
                    "start": past.strftime("%Y-%m-%d %H:%M:%S"),
                    "finish": end_date.strftime("%Y-%m-%d %H:%M:%S"),
                    "max": 100
                },
                "points": []
            }
            for node in nodes
        }

        for x in range(step.get("times")):
            later = past + datetime.timedelta(minutes=step.get("step"))

            for node in nodes:
                # Query for latest entry for current node in given time range
                status = StatsCache().get_stat("status",
                                               later,
                                               step.get("step"),
                                               key_prefix=node.name)

                if status is None:
                    q = NodeStatus.query.filter(
                        NodeStatus.name == node.name,
                        NodeStatus.timestamp >= past).order_by(
                            NodeStatus.timestamp.asc()).first()

                    if q is not None:
                        status = q.status

                    StatsCache().update("status",
                                        step.get("step"),
                                        set_value=status,
                                        set_dt=later,
                                        key_prefix=node.name)

                if not status or not status.get("cpu_count"):
                    continue

                cpu_count = status.get("cpu_count")
                cpu_load = status.get("cpuload")

                # Use average load of last minute. See doc (os.getloadavg)
                load = int(cpu_load[0] / cpu_count * 100)

                time_key = later.strftime("%Y-%m-%d %H:%M:%S")

                result[step_name][node.name]["points"].append({
                    "datetime": time_key,
                    "value": load
                })

            past = later

    return result
Exemple #23
0
def _summarize_vms_running(end_date, steps, nodes):
    """Create a list of datetime points containing the amounts of
    running vms by hour, day, week up to the given
    date"""

    results = {}

    vm_count = 0
    # Determine the total current VMs
    for node in nodes:
        node_status = NodeStatus.query.filter(
            NodeStatus.name == node.name).order_by(
                NodeStatus.timestamp.desc()).first()

        if node_status:
            vm_count += node_status.status["machines"].get("total")

    for step_name, step in steps.iteritems():
        past = end_date - datetime.timedelta(minutes=step.get("step") *
                                             step.get("times"))
        results[step_name] = {
            "info": {
                "max": vm_count,
                "max_points": step.get("times"),
                "step_size": step.get("step"),
                "start": past.strftime("%Y-%m-%d %H:%M:%S"),
                "finish": end_date.strftime("%Y-%m-%d %H:%M:%S")
            },
            "points": []
        }
        max_running = 0

        for x in range(step.get("times")):
            later = past + datetime.timedelta(minutes=step.get("step"))
            running = None

            for node in nodes:
                # Query for latest entry for current node in given time range
                status = StatsCache().get_stat("status",
                                               later,
                                               step.get("step"),
                                               key_prefix=node.name)

                if status is None:
                    q = NodeStatus.query.filter(
                        NodeStatus.name == node.name,
                        NodeStatus.timestamp >= past).order_by(
                            NodeStatus.timestamp.asc()).first()

                    if q is not None:
                        status = q.status

                    StatsCache().update("status",
                                        step.get("step"),
                                        set_value=status,
                                        set_dt=later,
                                        key_prefix=node.name)

                if not status:
                    continue

                if running is None:
                    running = 0

                total_vms = status["machines"].get("total")
                running += total_vms - status["machines"].get("available")

            time_key = later.strftime("%Y-%m-%d %H:%M:%S")

            if running is not None:
                results[step_name]["points"].append({
                    "datetime": time_key,
                    "value": running
                })

            if running > max_running:
                max_running = running

            past = later

        # Check if the amount of running VMs in the past is higher than current
        # total VMs. This can happen if VMs are removed.
        if max_running > vm_count:
            vm_count = max_running

        results[step_name]["info"]["max"] = vm_count

    return results
Exemple #24
0
def _summarize_disk_usage(end_date, steps, nodes):
    """Create a list of datetime points containing the amounts of
    currently used disk space per node by last hour, day, week up to the given
    date"""

    results = {}

    # For each node, determine which storages there are and their total
    # storage volume
    storage_nodes = {}
    for node in nodes:
        node_status = NodeStatus.query.filter(
            NodeStatus.name == node.name).order_by(
                NodeStatus.timestamp.desc()).first()

        if node_status:
            storage_nodes[node.name] = {
                disk_n: {
                    "total": val["total"]
                }
                for disk_n, val in node_status.status.get(
                    "diskspace").iteritems()
            }

    for step_name, step in steps.iteritems():
        past = end_date - datetime.timedelta(minutes=step.get("step") *
                                             step.get("times"))
        results[step_name] = {
            node.name: {
                "info": {
                    "disks": storage_nodes[node.name],
                    "max_points": step.get("times"),
                    "step_size": step.get("step"),
                    "start": past.strftime("%Y-%m-%d %H:%M:%S"),
                    "finish": end_date.strftime("%Y-%m-%d %H:%M:%S")
                },
                "points": {}
            }
            for node in nodes
        }

        for x in range(step.get("times")):
            later = past + datetime.timedelta(minutes=step.get("step"))

            for node in nodes:
                # Query for latest entry for current node in given time range
                status = StatsCache().get_stat("status",
                                               later,
                                               step.get("step"),
                                               key_prefix=node.name)

                if status is None:
                    q = NodeStatus.query.filter(
                        NodeStatus.name == node.name,
                        NodeStatus.timestamp >= past).order_by(
                            NodeStatus.timestamp.asc()).first()

                    if q is not None:
                        status = q.status

                    StatsCache().update("status",
                                        step.get("step"),
                                        set_value=status,
                                        set_dt=later,
                                        key_prefix=node.name)

                if not status:
                    continue

                time_key = later.strftime("%Y-%m-%d %H:%M:%S")
                current = results[step_name][node.name]["points"]
                for st_name, val in status.get("diskspace").iteritems():
                    storage_name = "%s_used" % st_name

                    if storage_name not in current:
                        current[storage_name] = []

                    current[storage_name].append({
                        "datetime": time_key,
                        "value": val["used"]
                    })

            past = later

    return results
Exemple #25
0
def _summarize_cpu_usage(end_date, steps, nodes):
    """Create a list of datetime points containing the amounts of
    CPU usage in percent per node per hour, day, week up to the given
    date"""

    result = {}

    for step_name, step in steps.iteritems():
        past = end_date - datetime.timedelta(
            minutes=step.get("step") * step.get("times")
        )
        result[step_name] = {
            node.name: {
                "info": {
                    "max_points": step.get("times"),
                    "step_size": step.get("step"),
                    "start": past.strftime("%Y-%m-%d %H:%M:%S"),
                    "finish": end_date.strftime("%Y-%m-%d %H:%M:%S"),
                    "max": 100
                },
                "points": []
            }
            for node in nodes
        }

        for x in range(step.get("times")):
            later = past + datetime.timedelta(minutes=step.get("step"))

            for node in nodes:
                # Query for latest entry for current node in given time range
                status = StatsCache().get_stat(
                    "status", later, step.get("step"), key_prefix=node.name
                )

                if status is None:
                    q = NodeStatus.query.filter(
                        NodeStatus.name == node.name,
                        NodeStatus.timestamp >= past
                    ).order_by(NodeStatus.timestamp.asc()).first()

                    if q is not None:
                        status = q.status

                    StatsCache().update(
                        "status", step.get("step"), set_value=status,
                        set_dt=later, key_prefix=node.name
                    )

                if not status or not status.get("cpu_count"):
                    continue

                cpu_count = status.get("cpu_count")
                cpu_load = status.get("cpuload")

                # Use average load of last minute. See doc (os.getloadavg)
                load = int(cpu_load[0] / cpu_count * 100)

                time_key = later.strftime("%Y-%m-%d %H:%M:%S")

                result[step_name][node.name]["points"].append({
                    "datetime": time_key,
                    "value": load
                })

            past = later

    return result
 def test_constants(self):
     sc = StatsCache()
     assert sc.dt_ftm == "%Y-%m-%d %H:%M:%S"
     assert sc.max_cache_days == 60
Exemple #27
0
 def test_reset(self):
     sc = StatsCache()
     assert len(sc.stats) > 1
     sc._reset_at = datetime.datetime.now() - datetime.timedelta(days=1)
     sc.get_stat(name="test7", dt=datetime.datetime.now(), step_size=15)
     assert len(sc.stats) == 1
Exemple #28
0
def _summarize_disk_usage(end_date, steps, nodes):
    """Create a list of datetime points containing the amounts of
    currently used disk space per node by last hour, day, week up to the given
    date"""

    results = {}

    # For each node, determine which storages there are and their total
    # storage volume
    storage_nodes = {}
    for node in nodes:
        node_status = NodeStatus.query.filter(
            NodeStatus.name == node.name
        ).order_by(NodeStatus.timestamp.desc()).first()

        if node_status:
            storage_nodes[node.name] = {
                disk_n: {
                    "total": val["total"]
                }
                for disk_n, val in
                node_status.status.get("diskspace").iteritems()
            }

    for step_name, step in steps.iteritems():
        past = end_date - datetime.timedelta(
            minutes=step.get("step") * step.get("times")
        )
        results[step_name] = {
            node.name: {
                "info": {
                    "disks": storage_nodes[node.name],
                    "max_points": step.get("times"),
                    "step_size": step.get("step"),
                    "start": past.strftime("%Y-%m-%d %H:%M:%S"),
                    "finish": end_date.strftime("%Y-%m-%d %H:%M:%S")
                },
                "points": {}
            }
            for node in nodes
        }

        for x in range(step.get("times")):
            later = past + datetime.timedelta(minutes=step.get("step"))

            for node in nodes:
                # Query for latest entry for current node in given time range
                status = StatsCache().get_stat(
                    "status", later, step.get("step"), key_prefix=node.name
                )

                if status is None:
                    q = NodeStatus.query.filter(
                        NodeStatus.name == node.name,
                        NodeStatus.timestamp >= past
                    ).order_by(NodeStatus.timestamp.asc()).first()

                    if q is not None:
                        status = q.status

                    StatsCache().update(
                        "status", step.get("step"), set_value=status,
                        set_dt=later, key_prefix=node.name
                    )

                if not status:
                    continue

                time_key = later.strftime("%Y-%m-%d %H:%M:%S")
                current = results[step_name][node.name]["points"]
                for st_name, val in status.get("diskspace").iteritems():
                    storage_name = "%s_used" % st_name

                    if storage_name not in current:
                        current[storage_name] = []

                    current[storage_name].append({
                        "datetime": time_key,
                        "value": val["used"]
                    })

            past = later

    return results