def do_report(sender, limit): username = CONFIG.get('openstack', 'user') key = CONFIG.get('openstack', 'passwd') tenant_name = CONFIG.get('openstack', 'name') url = CONFIG.get('openstack', 'url') c_client = client(username, key, tenant_name, url) volumes = [volume._info for volume in all_volumes(c_client, limit)] now = int(time.time()) by_tenant(volumes, now, sender) by_az_by_tenant(volumes, now, sender) sender.flush()
def get_auth_session(): username = CONFIG.get('openstack', 'user') password = CONFIG.get('openstack', 'passwd') project_name = CONFIG.get('openstack', 'name') auth_url = CONFIG.get('openstack', 'url') auth = v3.Password(username=username, password=password, project_name=project_name, auth_url=auth_url, user_domain_id='default', project_domain_id='default') return session.Session(auth=auth)
def report_metrics(sender, from_time, to_time): username = CONFIG.get('rcshibboleth', 'username') password = CONFIG.get('rcshibboleth', 'password') host = CONFIG.get('rcshibboleth', 'host') database = CONFIG.get('rcshibboleth', 'database') db = connection(host, database, username, password) while from_time < to_time: now = int(from_time.strftime("%s")) users = list(list_users(db, from_time)) count(sender, users, now) by_idp(sender, users, now) from_time = from_time + timedelta(hours=1) sender.flush()
def get_auth_session(): username = CONFIG.get("openstack", "user") password = CONFIG.get("openstack", "passwd") project_name = CONFIG.get("openstack", "name") auth_url = CONFIG.get("openstack", "url") auth = v3.Password( username=username, password=password, project_name=project_name, auth_url=auth_url, user_domain_id="default", project_domain_id="default", ) return session.Session(auth=auth)
def change_over_time(servers_by_az, now, sender): current_servers = dict([(az, set([server['id'] for server in servers])) for az, servers in servers_by_az.items()]) working_dir = CONFIG.get('metrics', 'working_dir') previous_servers_file = path.join(working_dir, "previous_servers.pickle") if not os.path.exists(previous_servers_file): pickle.dump(current_servers, open(previous_servers_file, 'w')) try: previous_servers = pickle.load(open(previous_servers_file)) except EOFError: logger.warning("Invalid data in pickle %s" % previous_servers_file) previous_servers = current_servers # Override the pickle each time no matter what. this will # prevent massive launch rates if the script fails fro a # while. pickle.dump(current_servers, open(previous_servers_file + '.tmp', 'w')) shutil.move(previous_servers_file + '.tmp', previous_servers_file) for zone, servers in current_servers.items(): if zone not in previous_servers: # If the zone isn't in the list of previous servers # then skip it. continue previous_zone_servers = previous_servers.get(zone) intersection = servers.intersection(previous_zone_servers) instances_deleted = len(previous_zone_servers) - len(intersection) sender.send_by_az(zone, 'instances_deleted', instances_deleted, now) instances_created = len(servers) - len(intersection) sender.send_by_az(zone, 'instances_created', instances_created, now)
def setup(filename, file_level='INFO', console_level='INFO'): config = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'simple': { 'format': '%(asctime)s %(name)s %(levelname)s %(message)s' }, }, 'handlers': { 'console': { 'level': console_level, 'class': 'logging.StreamHandler', 'formatter': 'simple' }, 'file': { 'level': file_level, 'class': 'logging.FileHandler', 'formatter': 'simple', 'filename': "", }, }, 'root': { 'handlers': ['console', 'file'], 'level': 'DEBUG', }, } log_dir = CONFIG.get('metrics', 'log_dir') if log_dir: config['handlers']['file']['filename'] = path.join(log_dir, filename) else: del config['handlers']['file'] config['root']['handlers'].remove('file') # Disable console logging if it's not used. if not console_level: del config['handlers']['console'] config['root']['handlers'].remove('console') logging.config.dictConfig(config)
def __init__(self): super(GnocchiSender, self).__init__() self.client = self._get_client() self.archive_policy = CONFIG.get('gnocchi', 'archive_policy')