mfdict[item] = []
    m.writeline('fetch' + item)
    while True:
        l = m.readline()
        if not l:
            break
        if l.startswith('.'):
            break
        mfdict[item].append(l.rstrip())

# Close munin-node
m.writeline('quit')

# Init connection to cloudwatch
cw = cloudwatch.connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) 

# Init item old value dictionary
movalue = {}
if not os.path.exists(STATEFILE):
    f = open(STATEFILE, 'w')
    pickle.dump(movalue, f)
    f.close()

# Read old derive value
f = open(STATEFILE)
movalue = pickle.load(f)
f.close()

# Init item new value dictionary
mnvalue = {}
Exemplo n.º 2
0
import cloudwatch
import httplib2, simplejson
import hoover
from hoover import utils

# init our connection to cloudwatch
cw = cloudwatch.connection('AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY_ID') 

# init our connection to loggly
hoover.authorize('geekceo', 'kordless', 'password')

# cloudwatch namespace
namespace = 'Loggly'

# get back the number of events for the website
geekceo = hoover.utils.get_input_by_name('default')
num_results = geekceo.facets(q='*', starttime='NOW-6MINUTES', endtime='NOW-1MINUTE', buckets=1)['data'].items()[0][1]
# push it to cloudwatch
cw.putData(namespace, "WebEventCount", num_results)

# get back the number of 404s for the website
geekceo = hoover.utils.get_input_by_name('loggly_web')
num_results = geekceo.facets(q='GET AND 404', starttime='NOW-6MINUTES', endtime='NOW-1MINUTE', buckets=1)['data'].items()[0][1]
# push it to cloudwatch
cw.putData(namespace, "404Count", num_results)

# print to stdout, which is then piped to logger by cron (you could also just log from here using HTTP via hoover)
print 'cloudwatch: finished run'


Exemplo n.º 3
0
import cloudwatch
import httplib2, simplejson
import hoover
from hoover import utils

# init our connection to cloudwatch
cw = cloudwatch.connection('AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY_ID')

# init our connection to loggly
hoover.authorize('geekceo', 'kordless', 'password')

# cloudwatch namespace
namespace = 'Loggly'

# get back the number of events for the website
geekceo = hoover.utils.get_input_by_name('default')
num_results = geekceo.facets(q='*',
                             starttime='NOW-6MINUTES',
                             endtime='NOW-1MINUTE',
                             buckets=1)['data'].items()[0][1]
# push it to cloudwatch
cw.putData(namespace, "WebEventCount", num_results)

# get back the number of 404s for the website
geekceo = hoover.utils.get_input_by_name('loggly_web')
num_results = geekceo.facets(q='GET AND 404',
                             starttime='NOW-6MINUTES',
                             endtime='NOW-1MINUTE',
                             buckets=1)['data'].items()[0][1]
# push it to cloudwatch
cw.putData(namespace, "404Count", num_results)