def main(): utils.setup_logging() utils.clear_directory_contents(utils.grabber_dir()) buildout.grab_all() nginx.grab_all() apache.grab_all() server.grab_all()
def grab_one(directory): """Grab and write info on one buildout.""" logger.info("Grabbing buildout info from %s", directory) result = {} result["directory"] = directory result["extends"] = extends_info(directory) result["eggs"] = eggs_info(directory) result["vcs"] = vcs_info(directory) result["id"] = id(directory) result["hostname"] = utils.hostname() outfile = os.path.join(utils.grabber_dir(), FILENAME.format(id=id(directory))) open(outfile, "w").write(json.dumps(result, sort_keys=True, indent=4)) logger.debug("Wrote info to %s", outfile)
def grab_one(configfile): """Grab and write info on one apache config.""" logger.info("Grabbing apache info from %s", configfile) result = {} result['configfile'] = configfile contents = open(configfile).readlines() result['contents'] = [line.rstrip() for line in contents] result['hostname'] = utils.hostname() servernames = set() ips = set() ports = set() directories = set() # There should be only one conf per deployment, but we check anyway. for confline in contents: servernames.update(servername_regex.findall(confline)) servernames.update(serveralias_regex.findall(confline)) ips.update(ip_regex.findall(confline)) ports.update(port_regex.findall(confline)) directories.update(directory_regex.findall(confline)) if not servernames: logger.info("No servernames found, probably empty default config.") return logger.debug("Servernames/aliases found: %s", servernames) logger.debug("IP addresses we listen to found: %s", ips) logger.debug("Local ports we redirect to found: %s", ports) result['server_names'] = list(servernames) result['ips'] = list(ips) result['ports'] = list(ports) result['id'] = id(result['server_names']) if directories: directory = list(directories)[0] result['buildout_id'] = directory result['buildout_directory'] = '/srv/' + directory outfile = os.path.join(utils.grabber_dir(), FILENAME.format(id=result['id'])) open(outfile, 'w').write( json.dumps(result, sort_keys=True, indent=4)) logger.debug("Wrote info to %s", outfile)
def grab_one(configfile): """Grab and write info on one nginx config.""" logger.info("Grabbing nginx info from %s", configfile) result = {} result['configfile'] = configfile contents = open(configfile).readlines() result['contents'] = [line.rstrip() for line in contents] settings = {} for line in contents: line = line.strip() parts = line.split(' ', 1) if len(parts) == 1: continue # We treat the first word on the line as a setting name. Good enough # for our purpose. settings[parts[0]] = parts[1].rstrip(';') server_names = settings['server_name'] server_names = server_names.split() server_names = [name for name in server_names if name] result['server_names'] = server_names result['hostname'] = utils.hostname() # Assumption: access log is in the buildout directory where our site is, # so something like /srv/DIRNAME/var/log/access.log. logfile = settings.get('access_log') if logfile is not None: parts = logfile.split('/') result['buildout_id'] = parts[2] result['buildout_directory'] = '/srv/%s' % parts[2] if 'proxy_pass' in settings: proxy_pass = settings['proxy_pass'] result['proxy_pass'] = proxy_pass # Looks like 'proxy_pass http://localhost:9000'. parts = proxy_pass.split(':') port = parts[-1] result['proxy_port'] = port result['id'] = id(server_names) outfile = os.path.join(utils.grabber_dir(), FILENAME.format(id=id(server_names))) open(outfile, 'w').write( json.dumps(result, sort_keys=True, indent=4)) logger.debug("Wrote info to %s", outfile)
from serverinfo import utils FILENAME = 'server___{id}.json' logger = logging.getLogger(__name__) def grab_all(): """Grab and write info on the whole server.""" logger.info("Grabbing info for whole server.") result = {} hostname = utils.hostname() result['id'] = hostname result['hostname'] = hostname result['users'] = [d for d in os.listdir('/home') if not d.startswith('.')] backupninja_dir = '/etc/backup.d/' try: if os.path.exists(backupninja_dir): result['backup_jobs'] = os.listdir(backupninja_dir) except OSError, e: logger.warn(e) result['backup_jobs'] = ( '/etc/backup.d is not accessible to the serverinfo script.') outfile = os.path.join(utils.grabber_dir(), FILENAME.format(id=hostname)) open(outfile, 'w').write( json.dumps(result, sort_keys=True, indent=4)) logger.debug("Wrote info to %s", outfile)