def api_function_wrapper(config, http_context, sessions, module, function_name): """ API function wrapper in charge of: - instanciate a new logger; - check the user session id; - call a function named 'function_name' from 'module_name' module and return its result; """ logger = get_logger(config) logger.debug("Calling %s.%s()." % (module.__name__, function_name,)) logger.debug(http_context) try: username = check_sessionid(http_context['headers'], sessions) http_context['username'] = username dm = getattr(module, function_name)(config, http_context) logger.debug("Done.") return dm except (Exception, HTTPError) as e: logger.traceback(get_tb()) logger.error(str(e)) logger.debug("Failed.") if isinstance(e, HTTPError): raise e else: raise HTTPError(500, "Internal error.")
def worker_vacuum(commands, command, config): start_time = time.time() * 1000 set_logger_name("vacuum_worker") logger = get_logger(config) logger.info("Starting with pid=%s" % (os.getpid())) logger.debug("commandid=%s" % (command.commandid, )) try: command.state = COMMAND_START command.time = time.time() commands.update(command) parameters = pickle.loads(base64.b64decode(command.parameters)) logger.debug("table=%s, mode=%s, database=%s" % ( parameters['table'], parameters['mode'], parameters['database'], )) conn = connector(host=config.postgresql['host'], port=config.postgresql['port'], user=config.postgresql['user'], password=config.postgresql['password'], database=parameters['database']) conn.connect() if parameters['mode'] == 'standard': query = "VACUUM %s" % (parameters['table'], ) else: query = "VACUUM %s %s" % ( parameters['mode'], parameters['table'], ) conn.execute(query) conn.close() except (error, SharedItem_not_found, Exception) as e: command.state = COMMAND_ERROR command.result = str(e) command.time = time.time() logger.traceback(get_tb()) logger.error(str(e)) try: commands.update(command) conn.close() except Exception as e: pass logger.info("Failed.") return try: command.state = COMMAND_DONE command.time = time.time() commands.update(command) except Exception as e: logger.traceback(get_tb()) logger.error(str(e)) logger.info("Done.") logger.debug(" in %s s." % (str( (time.time() * 1000 - start_time) / 1000), ))
def api_function_wrapper(config, http_context, sessions, module, function_name): """ Simple API function wrapper in charge of: - instanciate a new logger; - check the user session id; - call a function named 'function_name' from 'module_name' module and return its result; """ logger = get_logger(config) logger.info("%s - %s" % ( module.__name__, function_name, )) username = check_sessionid(http_context['headers'], sessions) http_context['username'] = username try: dm = getattr(module, function_name)(config, http_context) return dm except HTTPError as e: logger.error(format_exc()) raise HTTPError(e.code, e.message['error']) except Exception as e: logger.error(format_exc()) raise HTTPError(500, "Internal error.")
def __init__(self, cmd_queue, commands, config, sessions, *args, **kwargs): """ Constructor. """ # Commands queue. self.cmd_queue = cmd_queue # Commands array in shared memory. self.commands = commands # Sessions array in shared memory. self.sessions = sessions # Configuration instance. self.config = config # Logger. set_logger_name("httpd") self.logger = get_logger(config) # HTTP server version. self.server_version = "temboard-agent/0.0.1" # HTTP request method self.http_method = None # HTTP query. self.query = None # HTTP POST content in json format. self.post_json = None # Call HTTP request handler constructor. BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
def post_pg_ident(conn, config, http_context): set_logger_name("settings") logger = get_logger(config) if 'content' not in http_context['post']: raise HTTPError(406, "Parameter 'content' not sent.") try: conn.execute( "SELECT setting FROM pg_settings WHERE name = 'ident_file'") pg_ident_file = list(conn.get_rows())[0]['setting'] except error as e: logger.error(str(e.message)) raise HTTPError(500, 'Internal error.') with open(pg_ident_file, 'r') as fd: pg_ident_data = fd.read() fd.close() try: with open(pg_ident_file + ".previous", 'w') as fdp: fdp.write(pg_ident_data) fdp.close() except Exception as e: raise HTTPError(500, 'Internal error.') with open(pg_ident_file, 'w') as fd: fd.write(http_context['post']['content']) fd.close() return {'update': True}
def get_hba_versions(conn, config, http_context): set_logger_name("settings") logger = get_logger(config) hba_file = get_setting(conn, 'hba_file') return { 'filepath': hba_file, 'versions': HBAManager.get_versions(hba_file) }
def httpd_run(commands, queue_in, config, sessions): """ Serve HTTP for ever and reload configuration from the conf file on SIGHUP signal catch. """ server_address = (config.temboard['address'], config.temboard['port']) handler_class = handleRequestsUsing(commands, queue_in, config, sessions) httpd = ThreadedHTTPServer(server_address, handler_class) httpd.socket = ssl.wrap_socket(httpd.socket, keyfile=config.temboard['ssl_key_file'], certfile=config.temboard['ssl_cert_file'], server_side=True) # We need a timeout here because the code after httpd.handle_request() call # is written to handle configuration re-loading and needs to be ran periodicaly. httpd.timeout = 1 set_logger_name("httpd") logger = get_logger(config) while True: httpd.handle_request() if reload_true(): # SIGHUP caught # Try to load configuration from the configuration file. try: logger.info("SIGHUP signal caught, trying to reload " "configuration.") new_config = Configuration(config.configfile) # Prevent any change on plugins list.. new_config.temboard['plugins'] = config.temboard['plugins'] new_config.plugins = load_plugins_configurations(new_config) # New RequestHandler using the new configuration. httpd.RequestHandlerClass = handleRequestsUsing( commands, queue_in, new_config, sessions) del logger # ... and re-create a new one with the new # configuration. set_logger_name("httpd") logger = get_logger(new_config) logger.info("Done.") except (ConfigurationError, ImportError) as e: logger.traceback(get_tb()) logger.error(str(e)) logger.info("Keeping previous configuration.") # Reset the global var indicating a SIGHUP signal. set_global_reload(False)
def api_vacuum(http_context, queue_in = None, config = None, sessions = None, commands = None): set_logger_name("administration") worker = b'vacuum' # Get a new logger. logger = get_logger(config) try: check_sessionid(http_context['headers'], sessions) post = http_context['post'] # Check POST parameters. validate_parameters(post, [ ('database', T_OBJECTNAME, False), ('table', T_OBJECTNAME, False), ('mode', T_VACUUMMODE, False) ]) # Serialize parameters. parameters = base64.b64encode( pickle.dumps({ 'database': post['database'], 'table': post['table'], 'mode': post['mode'] })).decode('utf-8') except (Exception, HTTPError) as e: logger.traceback(get_tb()) logger.error(str(e)) if isinstance(e, HTTPError): raise e else: raise HTTPError(500, "Internal error.") # Check command uniqueness. try: commands.check_uniqueness(worker, parameters) except SharedItem_exists as e: logger.traceback(get_tb()) logger.error(str(e)) raise HTTPError(402, "Vaccum '%s' already running on table '%s'." % (post['mode'], post['table'])) cid = hash_id(worker + b'-' + parameters.encode('utf-8')) command = Command( cid.encode('utf-8'), time.time(), 0, worker, parameters, 0, u'') try: commands.add(command) # Put the Command in the command queue queue_in.put(command) return {"cid": cid} except SharedItem_no_free_slot_left as e: logger.traceback(get_tb()) logger.error(str(e)) raise HTTPError(500, "Internal error.")
def profile(http_context, queue_in = None, config = None, sessions = None, commands = None): """ @api {get} /profile Get current user name. @apiVersion 0.0.1 @apiName Profile @apiGroup User @apiHeader {String} X-Session Session ID. @apiSuccess {String} username Username. @apiExample {curl} Example usage: curl -k -H "X-Session: fa452548403ac53f2158a65f5eb6db9723d2b07238dd83f5b6d9ca52ce817b63" https://localhost:2345/profile @apiSuccessExample Success-Reponse: HTTP/1.0 200 OK Server: temboard-agent/0.0.1 Python/2.7.8 Date: Wed, 22 Apr 2015 12:33:19 GMT Content-type: application/json { "username": "******" } @apiError (500 error) error Internal error. @apiError (401 error) error Invalid session ID. @apiError (406 error) error Session ID malformed. @apiErrorExample 401 error example HTTP/1.0 401 Unauthorized Server: temboard-agent/0.0.1 Python/2.7.8 Date: Wed, 22 Apr 2015 12:36:33 GMT Content-type: application/json {"error": "Invalid session."} @apiErrorExample 406 error example HTTP/1.0 406 Not Acceptable Server: temboard-agent/0.0.1 Python/2.7.8 Date: Wed, 22 Apr 2015 12:37:23 GMT Content-type: application/json {"error": "Parameter 'X-Session' is malformed."} """ headers = http_context['headers'] set_logger_name("api") logger = get_logger(config) check_sessionid(headers, sessions) logger.info("[profile] User session: %s" % (headers['X-Session'])) try: session = sessions.get_by_sessionid(headers['X-Session'].encode('utf-8')) return {'username': session.username} except SharedItem_not_found: raise HTTPError(401, "Invalid session.")
def supervision_sender_worker(commands, command, config): signal.signal(signal.SIGTERM, supervision_worker_sigterm_handler) start_time = time.time() * 1000 set_logger_name("supervision_sender_worker") logger = get_logger(config) # TODO: logging methods in supervision plugin must be aligned. logging.root = logger logger.info("Start pid=%s id=%s" % ( os.getpid(), command.commandid, )) command.state = COMMAND_START command.time = time.time() command.pid = os.getpid() commands.update(command) c = 0 while True: # Let's do it smoothly.. time.sleep(0.5) q = Queue('%s/metrics.q' % (config.temboard['home']), max_size=1024 * 1024 * 10, overflow_mode='slide') msg = q.shift(delete=False) if msg is None: break try: send_output(config.plugins['supervision']['ssl_ca_cert_file'], config.plugins['supervision']['collector_url'], config.plugins['supervision']['agent_key'], msg.content) except urllib2.HTTPError as e: logger.error("Failed to send data.") logger.debug(e.message) logger.info("End. Duration: %s." % (str(time.time() * 1000 - start_time))) # On an error 409 (DB Integrity) we need to remove the message. if int(e.code) != 409: return except Exception as e: logger.error("Failed to send data.") logger.debug(str(e)) logger.info("End. Duration: %s." % (str(time.time() * 1000 - start_time))) return _ = q.shift(delete=True, check_msg=msg) if c > 60: logger.info("End. Duration: %s." % (str(time.time() * 1000 - start_time))) return c += 1 logger.info("End. Duration: %s." % (str(time.time() * 1000 - start_time)))
def post_hba(conn, config, http_context): new_version = False set_logger_name("settings") logger = get_logger(config) # Push a notification. try: NotificationMgmt.push( config, Notification(username=http_context['username'], message="HBA file updated")) except NotificationError as e: logger.error(e.message) if 'entries' not in http_context['post']: raise HTTPError(406, "Parameter 'entries' not sent.") if http_context and 'new_version' in http_context['post']: # Check parameter 'version' validate_parameters(http_context['post'], [('new_version', T_NEW_VERSION, False)]) if http_context['post']['new_version'] is True: new_version = True hba_file = get_setting(conn, 'hba_file') hba_entries = [] logger.debug(http_context['post']['entries']) for entry in http_context['post']['entries']: if 'comment' in entry and len(entry['connection']) == 0: new_hba_entry = HBAComment() new_hba_entry.comment = entry['comment'] else: new_hba_entry = HBAEntry() try: new_hba_entry.connection = entry[ 'connection'] if 'connection' in entry else '' new_hba_entry.database = entry[ 'database'] if 'database' in entry else '' new_hba_entry.user = entry['user'] if 'user' in entry else '' new_hba_entry.address = entry[ 'address'] if 'address' in entry else '' new_hba_entry.auth_method = entry[ 'auth_method'] if 'auth_method' in entry else '' new_hba_entry.auth_options = entry[ 'auth_options'] if 'auth_options' in entry else '' except Exception as e: logger.error(e.message) raise HTTPError(406, "Invalid HBA entry.") new_hba_entry.lazy_check() hba_entries.append(new_hba_entry) return HBAManager.save_entries(hba_file, hba_entries, new_version)
def Worker(commands, command, config): """ Routing function in charge of calling the right worker function. """ # Add a signal handler on SIGTERM and SIGHUP signals. signal.signal(signal.SIGTERM, worker_sigterm_handler) signal.signal(signal.SIGHUP, worker_sighup_handler) try: get_worker(command.worker)(commands, command, config) except (AttributeError, Exception) as e: set_logger_name("scheduler") logger = get_logger(config) logger.error(str(e))
def get_command(http_context, queue_in = None, config = None, sessions = None, commands = None): set_logger_name("api") logger = get_logger(config) check_sessionid(http_context['headers'], sessions) cid = http_context['urlvars'][0] try: command = commands.get_by_commandid(cid.encode('utf-8')) c_time = command.time c_state = command.state c_result = command.result if c_state == COMMAND_DONE or c_state == COMMAND_ERROR: commands.delete(cid.encode('utf-8')) return {'cid': cid, 'time': c_time, 'state': c_state, 'result': c_result} except SharedItem_not_found: raise HTTPError(401, "Invalid command.")
def get_hba(conn, config, http_context): version = None set_logger_name("settings") logger = get_logger(config) if http_context and 'version' in http_context['query']: # Check parameter 'version' validate_parameters(http_context['query'], [('version', T_FILE_VERSION, True)]) version = http_context['query']['version'][0] ret = {'filepath': None, 'version': version, 'entries': []} hba_file = get_setting(conn, 'hba_file') ret['filepath'] = hba_file for hba_entry in HBAManager.get_entries(hba_file, version): ret['entries'].append(hba_entry.__dict__) return ret
def get_hba_raw(conn, config, http_context): version = None set_logger_name("settings") logger = get_logger(config) if http_context and 'version' in http_context['query']: # Check parameter 'version' validate_parameters(http_context['query'], [('version', T_FILE_VERSION, True)]) version = http_context['query']['version'][0] ret = {'filepath': None, 'version': version, 'content': ''} hba_file = get_setting(conn, 'hba_file') ret['filepath'] = hba_file ret['content'] = HBAManager.get_file_content(hba_file, version) return ret
def supervision_probe_blocks(http_context, queue_in=None, config=None, sessions=None, commands=None): set_logger_name("supervision") logger = get_logger(config) check_sessionid(http_context['headers'], sessions) try: output = api_run_probe(probe_blocks(config.plugins['supervision']), config) return output except (Exception, error) as e: logger.error(str(e.message)) raise HTTPError(500, "Internal error.")
def api_run_probe(probe_instance, config): """ Run a probe instance. """ set_logger_name("supervision") logger = get_logger(config) # TODO: logging methods in supervision_agent code and supervision_agent should be aligned. logging.root = logger try: system_info = host_info(config.plugins['supervision']) except ValueError as e: logger.error( "supervision_worker - unable to get system information: %s\n" % str(e)) return config.plugins['supervision']['conninfo'] = [{ 'host': config.postgresql['host'], 'port': config.postgresql['port'], 'user': config.postgresql['user'], 'database': config.postgresql['dbname'], 'password': config.postgresql['password'], 'dbnames': config.plugins['supervision']['dbnames'], 'instance': config.postgresql['instance'] }] # Validate connection information from the config, and ensure # the instance is available instances = [] for conninfo in config.plugins['supervision']['conninfo']: logging.debug("Validate connection information on instance \"%s\"", conninfo['instance']) instances.append(instance_info(conninfo, system_info['hostname'])) # Gather the data from probes data = run_probes([probe_instance], system_info['hostname'], instances, delta=False) return data
def worker_vacuum(commands, command, config): start_time = time.time() * 1000 set_logger_name("administration_worker") logger = get_logger(config) logger.info("[vacuum] Starting: pid=%s commandid=%s" % (os.getpid(), command.commandid,)) command.state = COMMAND_START command.time = time.time() commands.update(command) parameters = pickle.loads(base64.b64decode(command.parameters)) logger.info("[vacuum] table=%s, mode=%s, database=%s" % (parameters['table'], parameters['mode'], parameters['database'],)) conn = connector( host = config.postgresql['host'], port = config.postgresql['port'], user = config.postgresql['user'], password = config.postgresql['password'], database = parameters['database'] ) try: conn.connect() if parameters['mode'] == 'standard': query = "VACUUM %s" % (parameters['table'],) else: query = "VACUUM %s %s" % (parameters['mode'], parameters['table'],) conn.execute(query) conn.close() except error as e: command.state = COMMAND_ERROR command.result = str(e.message) command.time = time.time() commands.update(command) logger.error("%s" % (str(e.message))) try: conn.close() except error as e: pass except Exception: pass return command.state = COMMAND_DONE command.time = time.time() commands.update(command) logger.info("[vacuum] done in %s s." % (str((time.time()*1000 - start_time)/1000),))
def __init__(self, config, *args, **kwargs): PluginConfiguration.__init__(self, config.configfile, *args, **kwargs) self.plugin_configuration = { 'scheduler_interval': 2, 'history_length': 20 } set_logger_name("dashboard") logger = get_logger(config) try: self.check_section(__name__) except ConfigurationError as e: return try: if not (self.getint(__name__, 'scheduler_interval') > 0 and \ self.getint(__name__, 'scheduler_interval') < 86400): raise ValueError() self.plugin_configuration['scheduler_interval'] = \ self.getint(__name__, 'scheduler_interval') except ValueError as e: logger.error( "%s - configuration error: 'scheduler_interval' must be" "an integer between 0 and 86400 in '%s' section in %s." % (__name__, self.configfile, __name__)) except NoOptionError as e: pass try: if not (self.getint(__name__, 'history_length') > 0 and \ self.getint(__name__, 'history_length') < 300): raise ValueError() self.plugin_configuration['history_length'] = \ self.getint(__name__, 'history_length') except ValueError as e: logger.error( "%s - configuration error: 'histor_length' must be" "an integer between 0 and 300 in '%s' section in %s." % (__name__, self.configfile, __name__)) except NoOptionError as e: pass
def post_hba_raw(conn, config, http_context): new_version = False set_logger_name("settings") logger = get_logger(config) if 'content' not in http_context['post']: raise HTTPError(406, "Parameter 'content' not sent.") logger.debug(http_context['post']) if http_context and 'new_version' in http_context['post']: # Check parameter 'version' validate_parameters(http_context['post'], [('new_version', T_NEW_VERSION, False)]) if http_context['post']['new_version'] is True: new_version = True hba_file = get_setting(conn, 'hba_file') return HBAManager.save_file_content(hba_file, http_context['post']['content'], new_version)
def scheduler(queue_in, config, commands): logger = get_logger(config) worker = b'dashboard_collector' parameters = '' # Check command uniqueness. try: commands.check_uniqueness(worker, parameters) except SharedItem_exists: return cid = hash_id(worker) command = Command(cid.encode('utf-8'), time.time(), 0, worker, parameters, 0, u'') try: commands.add(command) # Put the Command in the command queue queue_in.put(command) return except SharedItem_no_free_slot_left: return
def get_pg_ident(conn, config, http_context): set_logger_name("settings") logger = get_logger(config) ret = {'filepath': None, 'content': ''} p_pg_ident_lines = [] try: conn.execute( "SELECT setting FROM pg_settings WHERE name = 'ident_file'") pg_ident_file = list(conn.get_rows())[0]['setting'] except error as e: logger.error(str(e.message)) raise HTTPError(500, 'Internal error.') ret['filepath'] = pg_ident_file with open(pg_ident_file, 'r') as fd: pg_ident_data = fd.read() fd.close() ret['content'] = pg_ident_data return ret
def api_function_wrapper_pg(config, http_context, sessions, module, function_name): """ API function wrapper in charge of: - instanciate a new logger; - check the user session id; - start a new PostgreSQL connexion; - call a function named 'function_name' from 'module_name' module and return its result; - close the PG connexion. """ logger = get_logger(config) logger.debug("Calling %s.%s()." % (module.__name__, function_name,)) logger.debug(http_context) try: username = check_sessionid(http_context['headers'], sessions) http_context['username'] = username conn = connector( host = config.postgresql['host'], port = config.postgresql['port'], user = config.postgresql['user'], password = config.postgresql['password'], database = config.postgresql['dbname'] ) conn.connect() dm = getattr(module, function_name)(conn, config, http_context) conn.close() logger.debug("Done.") return dm except (error, Exception, HTTPError) as e: logger.traceback(get_tb()) logger.error(str(e)) logger.debug("Failed.") try: conn.close() except Exception: pass if isinstance(e, HTTPError): raise e else: raise HTTPError(500, "Internal error.")
def dashboard_collector_worker(commands, command, config): try: signal.signal(signal.SIGTERM, dashboard_worker_sigterm_handler) start_time = time.time() * 1000 set_logger_name("dashboard_collector") logger = get_logger(config) logger.debug("Starting with pid=%s" % (getpid())) logger.debug("commandid=%s" % (command.commandid)) command.state = COMMAND_START command.time = time.time() command.pid = getpid() commands.update(command) conn = connector(host=config.postgresql['host'], port=config.postgresql['port'], user=config.postgresql['user'], password=config.postgresql['password'], database=config.postgresql['dbname']) conn.connect() db_metrics = metrics.get_metrics(conn, config) # We don't want to store notifications in the history. db_metrics.pop('notifications', None) conn.close() q = Queue('%s/dashboard.q' % (config.temboard['home']), max_length=(config.plugins['dashboard']['history_length'] + 1), overflow_mode='slide') q.push(Message(content=json.dumps(db_metrics))) logger.debug("Duration: %s." % (str(time.time() * 1000 - start_time))) logger.debug("Done.") except (error, Exception) as e: logger.traceback(get_tb()) logger.error(str(e)) logger.debug("Failed.") try: conn.close() except Exception: pass sys.exit(1)
def api_function_wrapper_pg(config, http_context, sessions, module, function_name): """ Simple API function wrapper in charge of: - instanciate a new logger; - check the user session id; - start a new PostgreSQL connexion; - call a function named 'function_name' from 'module_name' module and return its result; - close the PG connexion. """ logger = get_logger(config) logger.info("%s - %s" % ( module.__name__, function_name, )) username = check_sessionid(http_context['headers'], sessions) http_context['username'] = username conn = connector(host=config.postgresql['host'], port=config.postgresql['port'], user=config.postgresql['user'], password=config.postgresql['password'], database=config.postgresql['dbname']) try: conn.connect() dm = getattr(module, function_name)(conn, config, http_context) conn.close() return dm except (error, Exception, HTTPError) as e: logger.error(format_exc()) try: conn.close() except Exception: pass if isinstance(e, HTTPError): raise HTTPError(e.code, e.message['error']) else: raise HTTPError(500, "Internal error.")
def __init__(self, config, *args, **kwargs): PluginConfiguration.__init__(self, config.configfile, *args, **kwargs) self.plugin_configuration = { 'pg_ctl': None, } set_logger_name("administration") logger = get_logger(config) try: self.check_section(__name__) except ConfigurationError as e: return try: val = self.get(__name__, 'pg_ctl') for char in ['"', '\'']: if val.startswith(char) and val.endswith(char): val = val[1:-1] self.plugin_configuration['pg_ctl'] = val except configparser.NoOptionError as e: pass
def api_run_probe(probe_instance, config): """ Run a probe instance. """ set_logger_name("supervision") logger = get_logger(config) # TODO: logging methods in supervision_agent code and supervision_agent should be aligned. logging.root = logger config.plugins['supervision']['conninfo'] = [{ 'host': config.postgresql['host'], 'port': config.postgresql['port'], 'user': config.postgresql['user'], 'database': config.postgresql['dbname'], 'password': config.postgresql['password'], 'dbnames': config.plugins['supervision']['dbnames'], 'instance': config.postgresql['instance'] }] # Validate connection information from the config, and ensure # the instance is available instances = [] sysinfo = SysInfo() hostname = sysinfo.hostname(config.temboard['hostname']) for conninfo in config.plugins['supervision']['conninfo']: logging.debug("Validate connection information on instance \"%s\"", conninfo['instance']) instances.append(instance_info(conninfo, hostname)) # Gather the data from probes data = run_probes([probe_instance], instances, delta=False) return data
def get_command(http_context, queue_in=None, config=None, sessions=None, commands=None): headers = http_context['headers'] set_logger_name("api") logger = get_logger(config) logger.info("Get command status.") try: check_sessionid(headers, sessions) except HTTPError as e: logger.traceback(get_tb()) logger.error(e.message) logger.info("Invalid session.") raise e cid = http_context['urlvars'][0] try: command = commands.get_by_commandid(cid.encode('utf-8')) c_time = command.time c_state = command.state c_result = command.result if c_state == COMMAND_DONE or c_state == COMMAND_ERROR: commands.delete(cid.encode('utf-8')) logger.info("Done.") return { 'cid': cid, 'time': c_time, 'state': c_state, 'result': c_result } except SharedItem_not_found as e: logger.traceback(get_tb()) logger.error(e.message) logger.info("Failed.") raise HTTPError(401, "Invalid command.")
def delete_hba_version(conn, config, http_context): version = None set_logger_name("settings") logger = get_logger(config) if http_context and 'version' in http_context['query']: # Check parameter 'version' validate_parameters(http_context['query'], [('version', T_FILE_VERSION, True)]) version = http_context['query']['version'][0] if version is None: raise HTTPError(406, "HBA version number must be specified.") hba_file = get_setting(conn, 'hba_file') # Push a notification. try: NotificationMgmt.push( config, Notification(username=http_context['username'], message="HBA file version '%s' removed." % (version))) except NotificationError as e: logger.error(e.message) return HBAManager.remove_version(hba_file, version)
def post_settings(conn, config, http_context): set_logger_name("settings") logger = get_logger(config) if http_context and 'filter' in http_context['query']: # Check 'filter' parameters. validate_parameters(http_context['query'], [('filter', T_PGSETTINGS_FILTER, True)]) pg_config_categories = get_settings(conn, config, None) if 'settings' not in http_context['post']: raise HTTPError(406, "Parameter 'settings' not sent.") settings = http_context['post']['settings'] ret = {'settings': []} do_not_check_names = ['unix_socket_permissions', 'log_file_mode'] logger.debug(settings) for setting in settings: if 'name' not in setting \ or 'setting' not in setting: raise HTTPError(406, "setting item malformed.") checked = False try: for pg_config_category in pg_config_categories: for pg_config_item in pg_config_category['rows']: if pg_config_item['name'] == setting['name']: if pg_config_item['name'] in do_not_check_names: checked = True raise Exception() if pg_config_item['vartype'] == u'integer': # Integers handling. if pg_config_item['min_val'] and pg_config_item['unit'] and \ (int(human_to_number(setting['setting'], pg_config_item['unit'])) < int(pg_config_item['min_val'])): raise HTTPError( 406, "%s: Invalid setting." % (pg_config_item['name'])) if pg_config_item['max_val'] and pg_config_item['unit'] and \ (int(human_to_number(setting['setting'], pg_config_item['unit'])) > int(pg_config_item['max_val'])): raise HTTPError( 406, "%s: Invalid setting." % (pg_config_item['name'])) setting['setting'] = pg_escape(setting['setting']) if ((setting['setting'].startswith("'") and setting['setting'].endswith("'")) or \ (setting['setting'].startswith('"') and setting['setting'].endswith('"'))): setting['setting'] = setting['setting'][1:-1] if setting['setting'] == '': setting['setting'] = None checked = True if pg_config_item['vartype'] == u'real': # Real handling. if pg_config_item['min_val'] and \ (float(setting['setting']) < float(pg_config_item['min_val'])): raise HTTPError( 406, "%s: Invalid setting." % (pg_config_item['name'])) if pg_config_item['max_val'] and \ (float(setting['setting']) > float(pg_config_item['max_val'])): raise HTTPError( 406, "%s: Invalid setting." % (pg_config_item['name'])) setting['setting'] = float(setting['setting']) checked = True if pg_config_item['vartype'] == u'bool': # Boolean handling. if setting['setting'].lower() not in [ u'on', u'off' ]: raise HTTPError( 406, 'Invalid setting: %s.' % (setting['setting'].lower())) checked = True if pg_config_item['vartype'] == u'enum': # Enum handling. if len(pg_config_item['enumvals']) > 0: enumvals = [ re.sub(r"^[\"\'](.+)[\"\ ']$", r"\1", enumval) for enumval in pg_config_item['enumvals'][1:-1].split(',') ] if ((setting['setting'].startswith("'") and setting['setting'].endswith("'")) or \ (setting['setting'].startswith('"') and setting['setting'].endswith('"'))): setting['setting'] = setting['setting'][ 1:-1] if setting['setting'] not in enumvals: raise HTTPError( 406, 'Invalid setting: %s.' % (setting['setting'])) checked = True if pg_config_item['vartype'] == u'string': # String handling. # setting must be escaped. setting['setting'] = pg_escape( str(setting['setting'])) if ((setting['setting'].startswith("'") and setting['setting'].endswith("'")) or \ (setting['setting'].startswith('"') and setting['setting'].endswith('"'))): setting['setting'] = setting['setting'][1:-1] if setting['setting'] == '': setting['setting'] = None checked = True raise Exception() except HTTPError as e: raise HTTPError(e.code, e.message['error']) except Exception as e: pass if not checked: raise HTTPError( 406, 'Parameter %s can\'t be checked.' % (setting['name'])) if 'force' not in setting: setting['force'] = 'false' if ((pg_config_item['vartype'] == u'integer' and setting['setting'] != pg_config_item['setting_raw']) or \ (pg_config_item['vartype'] == u'real' and float(setting['setting']) != float(pg_config_item['setting'])) or \ (pg_config_item['vartype'] not in [ u'integer', u'real' ] and setting['setting'] != pg_config_item['setting'])) or \ (setting['force'] == 'true'): # At this point, all incoming parameters have been checked. if setting['setting']: query = "ALTER SYSTEM SET %s TO '%s'" % (setting['name'], setting['setting']) else: query = "ALTER SYSTEM RESET %s;" % (setting['name']) logger.debug(query) # Push a notification on setting change. try: NotificationMgmt.push( config, Notification( username=http_context['username'], message="Setting '%s' changed: '%s' -> '%s'" % (pg_config_item['name'], pg_config_item['setting_raw'], setting['setting']))) except NotificationError as e: logger.error(e.message) try: conn.execute(query) except error as e: raise HTTPError(408, "%s: %s" % (setting['name'], e.message)) ret['settings'].append({ 'name': pg_config_item['name'], 'setting': setting['setting'], 'previous_setting': pg_config_item['setting_raw'], 'restart': True if pg_config_item['context'] in ['internal', 'postmaster'] else False }) # Reload PG configuration. conn.execute("SELECT pg_reload_conf()") # Push a notification. try: NotificationMgmt.push( config, Notification(username=http_context['username'], message="PostgreSQL reload")) except NotificationError as e: logger.error(e.message) return ret