Esempio n. 1
0
File: master.py Progetto: jul/dsat
#!/usr/bin/env python
# -*- coding : "utf-8" -*-
import sys
import os
from dsat.state import get_connection, construct_info
import sched
from dsat.message import send_vector
from dsat.linux_mtime import m_time as time
from time import sleep
from threading import Timer
from random import randint

ticker = sched.scheduler(time, sleep)


CFG, L_CFG, ID = construct_info(sys.argv, "master")
cnx = get_connection(CFG, L_CFG)
task_id = int(time())
ev = {
    "seq":0, "type" : "cpu","when" : 0, "event" : "INIT", 
    "next":"orchester", "job_id": "0",
    "task_id":0,"seq":0, 
    "arg" : {"load" : 0, "5min" : 0}, "where" : "localhost", 
    "step" :"master", "wid":"0", "pid":str(os.getpid()),"retry":2 }
print ev
### BUG in construct_info
send_vector( cnx["orchester_out"], ev)
while True:
    sleep(1)
def reschedule(scheduler, vector, socket):
    """push a job on socket at rescheduled interval
Esempio n. 2
0
from functools import wraps
from collections import defaultdict
from configparser import ConfigParser
from multiprocessing import Process, Queue
from repoze.lru import ExpiringLRUCache as expiringCache
from circus.util import DEFAULT_ENDPOINT_SUB, DEFAULT_ENDPOINT_DEALER

#pyzmq is string agnostic, so we ensure we use bytes
loads = jsonapi.loads
dumps = jsonapi.dumps
SENTINEL = object
#### let's 
#time_keeper = scheduler(time, sleep)
if not len(sys.argv) >= 1:
    raise( Exception("Arg"))
CONFIG, LOCAL_INFO, ID = construct_info(sys.argv, "orchester")

dictConfig(CONFIG.get("logging",{}))

CONFIG.update(LOCAL_INFO)
import __main__ as main

log = logging.getLogger("orchester")
D = log.debug
D("Started %r" % main.__file__)

LOCAL_INFO = dict(
    where = CONFIG["where"],
    step = "orchester",
    pid = os.getpid(),
    wid = "0",
Esempio n. 3
0
File: ping.py Progetto: jul/dsat
import sys
from os import path, getpid
from dsat.state import state_wrapper, construct_info
import psutil
from time import sleep

CFG, L_CFG, ID = construct_info(sys.argv, "ping")


def ping(cnx, arg):
    measure = dict(psutil.network_io_counters().__dict__)
    measure["data"] = measure.values()
    return measure


state_wrapper(sys.argv, ping)
Esempio n. 4
0
File: proc.py Progetto: jul/dsat
import sys
from os import path, getpid
from dsat.state import state_wrapper, construct_info
from dsat.carbon import carbon_maker
from archery.bow import Hankyu as dict
import psutil
from time import time, sleep

CFG, L_CFG, ID = construct_info(sys.argv, "proc")

send = carbon_maker(**CFG)


def proc(cnx, arg):
    res= dict()

    sum_file = dict()
    procc= dict()
    sum_connection = dict() 
    percent_mem= dict()
    all_proc = psutil.get_process_list()
    carbon_measure = dict()
    interesting = { 
        '/usr/lib/firefox/firefox', '/opt/google/chrome/chrome', 'mysqld', 
        'mongod', "ping.py", 
        "proc.py", "master.py", "tracker.py","cpu.py", "rrd.py", "csvw.py"}
    for x in all_proc:
        try:
            key = ( set(x.cmdline()) & interesting) and "me" or "other"
            carbon_key=None
            cmd = x.cmdline()
Esempio n. 5
0
File: tracker.py Progetto: jul/dsat
from functools import wraps
from collections import defaultdict
from configparser import ConfigParser
from multiprocessing import Process, Queue
from repoze.lru import ExpiringLRUCache as expiringCache
from circus.util import DEFAULT_ENDPOINT_SUB, DEFAULT_ENDPOINT_DEALER

#pyzmq is string agnostic, so we ensure we use bytes
loads = jsonapi.loads
dumps = jsonapi.dumps
SENTINEL = object
#### let's 
#time_keeper = scheduler(time, sleep)
if not len(sys.argv) >= 1:
    raise( Exception("Arg"))
CONFIG, LOCAL_INFO, ID = construct_info(sys.argv, "tracker")

dictConfig(CONFIG.get("logging",{}))
log = logging.getLogger("tracker")
CONFIG.update(LOCAL_INFO)
D = log.debug

def q_madd(q, what):
    q.put(dumps(what))

def q_mget(q):
    return loads(q.get())



D("Started tracker")
Esempio n. 6
0
File: clock.py Progetto: jul/dsat
import logging
from logging.config import dictConfig
import os
import sched
from threading import Timer





smoothing_factor = 0

ticker = sched.scheduler(time, sleep)


CFG, L_CFG, ID = construct_info(sys.argv, "clock")
cnx = get_connection(CFG, L_CFG)
dictConfig(CFG.get("logging",{}))

#task_id = int(time())
log = logging.getLogger("orchester")
D = log.debug
vector = {
    "seq":0, "type" : "cpu", "event" : "INIT", 
    "next":"cpu", "job_id": "0",
    "task_id":"0","seq":0,
    "emitter" : "clock(%d)" % int(ID),
    "serialization": "simplejson",
    "arg" : '{"load" : 0, "5min" : 0}', "where" : "localhost", 
    "step" :"master", "wid":"0", "pid":L_CFG["pid"] ,"retry":2 }