def get_db_host(): if cfg.exists('database_admin_url'): return osdb.get_url_host(cfg.get('database_admin_url')) elif cfg.exists('database_url'): return osdb.get_url_host(cfg.get('database_url')) return "localhost"
def update_instance(self, instance): # first of all, let's handle logging self.current_instance = instance self.update_logger() # Update the intro and prompt self.intro = cfg.get('prompt_intro') self.prompt = '(%s): ' % cfg.get('prompt_name') # initialize communcation handler self.handler = comm.initialize() # remove all loaded modules self.modules = {} if not self.execute: print(self.intro) # add the built-in modules and commands list for mod in ['clear', 'help', 'history', 'exit', 'quit']: self.modules[mod] = (self, None) if not cfg.exists('skip_modules'): skip_modules = [] else: skip_modules = cfg.get('skip_modules') available_modules = { key[20:]: sys.modules[key] for key in sys.modules.keys() if key.startswith("opensipscli.modules.") and key[20:] not in skip_modules } for name, module in available_modules.items(): m = importlib.import_module("opensipscli.modules.{}".format(name)) if not hasattr(m, "Module"): logger.debug( "Skipping module '{}' - does not extend Module".format( name)) continue if not hasattr(m, name): logger.debug( "Skipping module '{}' - module implementation not found". format(name)) continue mod = getattr(module, name) if not hasattr(mod, '__exclude__') or not hasattr( mod, '__get_methods__'): logger.debug( "Skipping module '{}' - module does not implement Module". format(name)) continue if mod.__exclude__(mod): logger.debug( "Skipping module '{}' - excluded on purpose".format(name)) continue logger.debug("Loaded module '{}'".format(name)) imod = mod() self.modules[name] = (imod, mod.__get_methods__(imod))
def __exclude__(self): if cfg.exists("dababase_user_url"): db_url = cfg.get("database_user_url") elif cfg.exists("database_url"): db_url = cfg.get("database_url") else: return not osdb.has_sqlalchemy() return not osdb.has_dialect(osdb.get_dialect(db_url))
def preloop(self): history_file = cfg.get('history_file') if readline and os.path.exists(history_file): readline.read_history_file(history_file) logger.debug("using history file {}".format(history_file)) readline.set_history_length(int(cfg.get('history_file_size'))) if not self.registered_atexit: atexit.register(self.history_write)
def get_db_url(self, db_name=cfg.get('database_name')): engine = osdb.get_db_engine() if not engine: return None # make sure to inherit the 'database_admin_url' engine db_url = osdb.set_url_driver(cfg.get("database_url"), engine) logger.debug("DB URL: '{}'".format(db_url)) return db_url
def preloop(self): history_file = cfg.get('history_file') logger.debug("using history file {}".format(history_file)) try: readline.read_history_file(os.path.expanduser(history_file)) except FileNotFoundError: pass readline.set_history_length(int(cfg.get('history_file_size'))) if not self.registered_atexit: atexit.register(self.history_write)
def get_db_engine(): if cfg.exists('database_admin_url'): engine = osdb.get_url_driver(cfg.get('database_admin_url')) elif cfg.exists('database_url'): engine = osdb.get_url_driver(cfg.get('database_url')) else: engine = "mysql" if engine not in SUPPORTED_BACKENDS: logger.error("bad database engine ({}), supported: {}".format( engine, " ".join(SUPPORTED_BACKENDS))) return None return engine
def execute(method, params): url = cfg.get('url') jsoncmd = jsonrpc_helper.get_command(method, params) headers = {'Content-Type': 'application/json'} request = urllib.request.Request(url, jsoncmd.encode(), headers) replycmd = urllib.request.urlopen(request).read().decode() return jsonrpc_helper.get_reply(replycmd)
def get_admin_db_url(self, db_name): engine = osdb.get_db_engine() if not engine: return None if cfg.exists('database_admin_url'): admin_url = cfg.get("database_admin_url") if engine == "postgres": admin_url = osdb.set_url_db(admin_url, 'postgres') else: if engine == 'postgres': if getuser() != "postgres": logger.error("Command must be run as 'postgres' user: "******"sudo -u postgres opensips-cli ...") return None """ For PG, do the initial setup using 'postgres' as role + DB """ admin_url = "postgres://postgres@localhost/postgres" else: admin_url = "{}://root@localhost".format(engine) if osdb.get_url_pswd(admin_url) is None: pswd = getpass("Password for admin {} user ({}): ".format( osdb.get_url_driver(admin_url, capitalize=True), osdb.get_url_user(admin_url))) logger.debug("read password: '******'", pswd) admin_url = osdb.set_url_password(admin_url, pswd) logger.debug("admin DB URL: '{}'".format(admin_url)) return admin_url
def valid(): opensips_fifo = cfg.get('fifo_file') if not os.path.exists(opensips_fifo): msg = "fifo file {} does not exist!".format(opensips_fifo) logger.debug(msg) return (False, [msg, 'Is OpenSIPS running?']) return (True, None)
def do_trap(self, params): self.pids = [] self.gdb_outputs = {} self.process_info = "" trap_file = cfg.get("trap_file") logger.info("Trapping {} in {}".format(PROCESS_NAME, trap_file)) if params and len(params) > 0: self.pids = params else: thread = Thread(target=self.get_pids) thread.start() thread.join(timeout=1) if len(self.pids) == 0: logger.warning("could not get OpenSIPS pids through MI!") try: ps_pids = subprocess.check_output(["pidof",PROCESS_NAME]) self.pids = ps_pids.decode().split() except: logger.warning("could not find any OpenSIPS running!") self.pids = [] if len(self.pids) < 1: logger.error("could not find OpenSIPS' pids") return -1 logger.debug("Dumping PIDs: {}".format(", ".join(self.pids))) threads = [] for pid in self.pids: thread = Thread(target=self.get_gdb_output, args=(pid,)) thread.start() threads.append(thread) for thread in threads: thread.join() if len(self.gdb_outputs) == 0: logger.error("could not get output of gdb") return -1 with open(trap_file, "w") as tf: tf.write(self.process_info) for pid in self.pids: if pid not in self.gdb_outputs: logger.warning("No output from pid {}".format(pid)) continue try: procinfo = subprocess.check_output( ["ps", "--no-headers", "-ww", "-fp", pid]).decode()[:-1] except: procinfo = "UNKNOWN" tf.write("\n\n---start {} ({})\n{}". format(pid, procinfo, self.gdb_outputs[pid])) print("Trap file: {}".format(trap_file))
def do_history(self, line): if not line: try: with open(os.path.expanduser(cfg.get('history_file'))) as hf: for num, line in enumerate(hf, 1): print(num, line, end='') except FileNotFoundError: pass
def update_logger(self): # first of all, let's handle logging if self.debug: level = "DEBUG" else: level = cfg.get("log_level") logger.setLevel(level)
def preloop(self): """ preload a history file """ history_file = cfg.get('history_file') logger.debug("using history file {}".format(history_file)) try: readline.read_history_file(os.path.expanduser(history_file)) except PermissionError: logger.warning("failed to read CLI history from {} " + "(no permission)".format(history_file)) except FileNotFoundError: pass readline.set_history_length(int(cfg.get('history_file_size'))) if not self.registered_atexit: atexit.register(self.history_write)
def initialize(): global comm_handler comm_type = cfg.get('communication_type') comm_func = 'opensipscli.communication.{}'.format(comm_type) try: comm_handler = __import__(comm_func, fromlist=[comm_type]) except ImportError as ie: comm_handler = None logger.error("cannot import '{}' handler: {}".format(comm_type, ie))
def __exclude__(self): """ method exlusion list """ if cfg.exists("database_url"): db_url = cfg.get("database_url") return not osdb.has_dialect(osdb.get_dialect(db_url)) else: return not osdb.has_sqlalchemy()
def history_write(self): """ save history file """ history_file = cfg.get('history_file') logger.debug("saving history in {}".format(history_file)) os.makedirs(os.path.expanduser(os.path.dirname(history_file)), exist_ok=True) readline.write_history_file(os.path.expanduser(history_file))
def execute(method, params): url = cfg.get('url') jsoncmd = jsonrpc_helper.get_command(method, params) headers = {'Content-Type': 'application/json'} request = urllib.request.Request(url, jsoncmd.encode(), headers) url_parsed = urllib.parse.urlparse(url) if url_parsed.scheme == 'https': replycmd = urllib.request.urlopen( request, context=ssl._create_unverified_context()).read().decode() else: replycmd = urllib.request.urlopen(request).read().decode() return jsonrpc_helper.get_reply(replycmd)
def execute(method, params): jsoncmd = jsonrpc_helper.get_command(method, params) reply_fifo_file_name = REPLY_FIFO_FILE_TEMPLATE.format( random.randrange(32767)) reply_dir = cfg.get('fifo_reply_dir') reply_fifo_file = "{}/{}".format(reply_dir, reply_fifo_file_name) # make sure fifo file does not exist try: os.unlink(reply_fifo_file) logger.debug("removed reply fifo '{}'".format(reply_fifo_file)) except OSError as ex: if os.path.exists(reply_fifo_file): raise jsonrpc_helper.JSONRPCException( "cannot remove repl file {}: {}!".format(reply_fifo_file, ex)) try: os.mkfifo(reply_fifo_file) os.chmod(reply_fifo_file, 0o666) except OSError as ex: raise jsonrpc_helper.JSONRPCException( "cannot create reply file {}: {}!".format(reply_fifo_file, ex)) opensips_fifo = cfg.get('fifo_file') if not os.path.exists(opensips_fifo): raise jsonrpc_helper.JSONRPCException( "fifo file {} does not exist!".format(opensips_fifo)) fifocmd = ":{}:{}".format(reply_fifo_file_name, jsoncmd) with open(opensips_fifo, 'w') as fifo: fifo.write(fifocmd) logger.debug("sent command '{}'".format(fifocmd)) with open(reply_fifo_file, 'r') as reply_fifo: replycmd = reply_fifo.readline() #logger.debug("received reply '{}'".format(replycmd)) # TODO: should we add this in a loop? os.unlink(reply_fifo_file) return jsonrpc_helper.get_reply(replycmd)
def history_write(self): """ save history file """ history_file = cfg.get('history_file') logger.debug("saving history in {}".format(history_file)) os.makedirs(os.path.expanduser(os.path.dirname(history_file)), exist_ok=True) try: readline.write_history_file(os.path.expanduser(history_file)) except PermissionError: logger.warning("failed to write CLI history to {} " + "(no permission)".format(history_file))
def valid(): global fifo_file opensips_fifo = cfg.get('fifo_file') if not os.path.exists(opensips_fifo): opensips_fifo_bk = opensips_fifo opensips_fifo = cfg.get('fifo_file_fallback') if not opensips_fifo or not os.path.exists(opensips_fifo): logger.debug("test") msg = "fifo file {} does not exist!".format(opensips_fifo) logger.debug(msg) return (False, [msg, 'Is OpenSIPS running?']) logger.debug("switched fifo from '{}' to fallback '{}'". format(opensips_fifo_bk, opensips_fifo)) try: open(opensips_fifo, 'w').close() except OSError as ex: extra = [] if ex.errno == errno.EACCES: sticky = get_sticky(os.path.dirname(opensips_fifo)) if sticky: extra = ["starting with Linux kernel 4.19, processes can " + "no longer read from FIFO files ", "that are saved in directories with sticky " + "bits (such as {})".format(sticky), "and are not owned by the same user the " + "process runs with. ", "To fix this, either store the file in a non-sticky " + "bit directory (such as /var/run/opensips), ", "or disable fifo file protection using " + "'sysctl fs.protected_fifos = 0' (NOT RECOMMENDED)"] msg = "cannot access fifo file {}: {}".format(opensips_fifo, ex) logger.debug(msg) return (False, [msg] + extra) fifo_file = opensips_fifo return (True, None)
def valid(): # check to see if there is an open connection url = cfg.get('url') try: url_parsed = urllib.parse.urlparse(url) if not url_parsed.port: if url_parsed.scheme == 'http': url_parsed.port = 80 else: url_parsed.port = 443 s = socket.socket() s.connect((url_parsed.hostname, url_parsed.port)) s.close() return True except: return False return False
def valid(): # check to see if there is an open connection url = cfg.get('url') try: url_parsed = urllib.parse.urlparse(url) if not url_parsed.port: if url_parsed.scheme == 'http': url_parsed.port = 80 else: url_parsed.port = 443 s = socket.socket() s.connect((url_parsed.hostname, url_parsed.port)) s.close() return True except Exception as e: logger.debug("could not connect to {} ({})".format(url, e)) return False return False
def valid(): # check to see if there is an open connection url = cfg.get('url') try: url_parsed = urllib.parse.urlparse(url) if not url_parsed.port: if url_parsed.scheme == 'http': url_parsed.port = 80 else: url_parsed.port = 443 s = socket.socket() s.connect((url_parsed.hostname, url_parsed.port)) s.close() return (True, None) except Exception as e: msg = "could not connect to {} ({})".format(url, e) logger.debug(msg) return (False, [msg, 'Is OpenSIPS running?']) return (False, None)
def get_schema_path(self, backend="mysql"): """ helper function: get the path defining the root path holding sql schema template """ if '+' in backend: backend = backend[0:backend.index('+')] if self.db_path is not None: return os.path.join(self.db_path, backend) db_path = os.path.expanduser(cfg.get("database_schema_path")) if db_path.endswith('/'): db_path = db_path[:-1] if os.path.basename(db_path) == backend: db_path = os.path.dirname(db_path) if not os.path.exists(db_path): logger.error( "path '{}' to OpenSIPS DB scripts does not exist!".format( db_path)) return None if not os.path.isdir(db_path): logger.error( "path '{}' to OpenSIPS DB scripts is not a directory!".format( db_path)) return None schema_path = os.path.join(db_path, backend) if not os.path.isdir(schema_path): logger.error( "invalid OpenSIPS DB scripts dir: '{}'!".format(schema_path)) return None std_tables = os.path.join(schema_path, 'standard-create.sql') if not os.path.isfile(std_tables): logger.error( "standard tables file not found ({})".format(std_tables)) return None self.db_path = db_path return schema_path
def __invoke__(self, cmd, params=None): params = self.parse_params(cmd, params) # Mi Module works with JSON Communication logger.debug("running command '{}' '{}'".format(cmd, params)) res = comm.execute(cmd, params) if res is None: return -1 output_type = cfg.get('output_type') if output_type == "pretty-print": self.print_pretty_print(res) elif output_type == "dictionary": self.print_dictionary(res) elif output_type == "lines": self.print_lines(res) elif output_type == "yaml": self.print_yaml(res) elif output_type == "none": pass # no one interested in the reply else: logger.error("unknown output_type='{}'! Dropping output!" .format(output_type)) return 0
def _do_create(self, db, db_name=None, do_all_tables=False): if db_name is None: db_name = db.db_name # check to see if the database has already been created if db.exists(db_name): logger.error("database '{}' already exists!".format(db_name)) return -2 db_schema = db.db_url.split(":")[0] schema_path = self.get_schema_path(db_schema) if schema_path is None: return -1 standard_file_path = os.path.join(schema_path, "standard-create.sql") if not os.path.isfile(standard_file_path): logger.error("cannot find stardard OpenSIPS DB file: '{}'!". format(standard_file_path)) return -1 tables_files = [ standard_file_path ] # all good now - check to see what tables we shall deploy if cfg.read_param(None, "Create [a]ll tables or just the [c]urrently configured ones?", default="a").lower() == "a": print("Creating all tables ...") tables = [ f.replace('-create.sql', '') \ for f in os.listdir(schema_path) \ if os.path.isfile(os.path.join(schema_path, f)) and \ f.endswith('-create.sql') ] else: print("Creating the currently configured set of tables ...") if cfg.exists("database_modules"): tables = cfg.get("database_modules").split(" ") else: tables = STANDARD_DB_MODULES logger.debug("deploying tables {}".format(" ".join(tables))) for table in tables: if table == "standard": # already checked for it continue table_file_path = os.path.join(schema_path, "{}-create.sql".format(table)) if not os.path.isfile(table_file_path): logger.warn("cannot find file to create {}: {}". format(table, table_file_path)) else: tables_files.append(table_file_path) db.create(db_name) db.use(db_name) for table_file in tables_files: print("Running {}...".format(os.path.basename(table_file))) try: db.create_module(table_file) except osdbError as ex: logger.error("cannot import: {}".format(ex)) print("The '{}' database has been successfully created!".format(db_name))
def create_tables(self, db_name, db_url, admin_db, tables=[], create_std=True): """ create database tables """ db_url = osdb.set_url_db(db_url, db_name) # 2) prepare new object store database instance # use it to connect to the created database db = self.get_db(db_url, db_name) if db is None: return -1 if not db.exists(): logger.warning("database '{}' does not exist!".format(db_name)) return -1 schema_path = self.get_schema_path(db.dialect) if schema_path is None: return -1 if create_std: standard_file_path = os.path.join(schema_path, "standard-create.sql") if not os.path.isfile(standard_file_path): logger.error( "cannot find stardard OpenSIPS DB file: '{}'!".format( standard_file_path)) return -1 table_files = {'standard': standard_file_path} else: table_files = {} # check to see what tables we shall deploy if tables: pass elif cfg.exists("database_modules"): # we know exactly what modules we want to instsall tables_line = cfg.get("database_modules").strip().lower() if tables_line == "all": logger.debug("Creating all tables") tables = [ f.replace('-create.sql', '') \ for f in os.listdir(schema_path) \ if os.path.isfile(os.path.join(schema_path, f)) and \ f.endswith('-create.sql') ] else: logger.debug("Creating custom tables") tables = tables_line.split(" ") else: logger.debug("Creating standard tables") tables = STANDARD_DB_MODULES # check for corresponding SQL schemas files in system path logger.debug("checking tables: {}".format(" ".join(tables))) for table in tables: if table == "standard": # already checked for it continue table_file_path = os.path.join(schema_path, "{}-create.sql".format(table)) if not os.path.isfile(table_file_path): logger.warn("cannot find SQL file for module {}: {}".format( table, table_file_path)) else: table_files[table] = table_file_path username = osdb.get_url_user(db_url) admin_db.connect(db_name) # create tables from SQL schemas for module, table_file in table_files.items(): logger.info("Running {}...".format(os.path.basename(table_file))) try: db.create_module(table_file) if db.dialect == "postgres": self.pg_grant_table_access(table_file, username, admin_db) except osdbModuleAlreadyExistsError: logger.error("{} table(s) are already created!".format(module)) except osdbError as ex: logger.error("cannot import: {}".format(ex)) # terminate active database connection db.destroy() return 0
def do_trace(self, params): filters = [] if params is None: caller_f = input("Caller filter: ") if caller_f != "": filters.append("caller={}".format(caller_f)) callee_f = input("Callee filter: ") if callee_f != "": filters.append("callee={}".format(callee_f)) ip_f = input("Source IP filter: ") if ip_f != "": filters.append("ip={}".format(ip_f)) if len(filters) == 0: ans = cfg.read_param(None, "No filter specified! "\ "Continue without a filter?", False, True) if not ans: return False filters = None else: filters = params s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) trace_ip = cfg.get("trace_listen_ip") trace_port = int(cfg.get("trace_listen_port")) s.bind((trace_ip, trace_port)) if trace_port == 0: trace_port = s.getsockname()[1] s.listen(1) conn = None trace_name = "opensips-cli.{}".format(random.randint(0, 65536)) trace_socket = "hep:{}:{};transport=tcp;version=3".format( trace_ip, trace_port) args = { 'id': trace_name, 'uri': trace_socket, } if filters: args['filter'] = filters logger.debug("filters are {}".format(filters)) trace_started = comm.execute('trace_start', args) if not trace_started: return False try: conn, addr = s.accept() logger.debug("New TCP connection from {}:{}".format( addr[0], addr[1])) remaining = b'' while True: data = conn.recv(TRACE_BUFFER_SIZE) if not data: break remaining = self.__print_hep(remaining + data) if remaining is None: break except KeyboardInterrupt: comm.execute('trace_stop', {'id': trace_name}, True) if conn is not None: conn.close()
def do_rootCA(self, params): global cfg logger.info("Preparing to generate CA cert + key...") # TODO # separate cli.cfg files for TLS are fully deprecated, this if block is # only kept for backwards-compatibility. Remove starting from v3.2! <3 if cfg.exists('tls_ca_config'): tls_cfg = cfg.get('tls_ca_config') cfg = OpenSIPSCLIConfig() cfg.parse(tls_cfg) ca_dir = cfg.read_param("tls_ca_dir", "Output directory", "/etc/opensips/tls/rootCA/") cert_file = cfg.read_param("tls_ca_cert_file", "Output cert file", "cacert.pem") key_file = cfg.read_param("tls_ca_key_file", "Output key file", "private/cakey.pem") c_f = join(ca_dir, cert_file) k_f = join(ca_dir, key_file) if (exists(c_f) or exists(k_f)) and not cfg.read_param( "tls_ca_overwrite", "CA certificate or key already exists, overwrite?", "yes", True): return # create a self-signed cert cert = crypto.X509() cert.get_subject().CN = cfg.read_param("tls_ca_common_name", "Website address (CN)", "opensips.org") cert.get_subject().C = cfg.read_param("tls_ca_country", "Country (C)", "RO") cert.get_subject().ST = cfg.read_param("tls_ca_state", "State (ST)", "Bucharest") cert.get_subject().L = cfg.read_param("tls_ca_locality", "Locality (L)", "Bucharest") cert.get_subject().O = cfg.read_param("tls_ca_organisation", "Organization (O)", "OpenSIPS") cert.get_subject().OU = cfg.read_param("tls_ca_organisational_unit", "Organisational Unit (OU)", "Project") cert.set_serial_number(randrange(100000)) cert.gmtime_adj_notBefore(0) notafter = int( cfg.read_param("tls_ca_notafter", "Certificate validity (seconds)", 315360000)) cert.gmtime_adj_notAfter(notafter) cert.set_issuer(cert.get_subject()) # create a key pair key = crypto.PKey() key_size = int( cfg.read_param("tls_ca_key_size", "RSA key size (bits)", 4096)) key.generate_key(crypto.TYPE_RSA, key_size) cert.set_pubkey(key) md = cfg.read_param("tls_ca_md", "Digest Algorithm", "SHA1") cert.sign(key, md) try: if not exists(dirname(c_f)): makedirs(dirname(c_f)) open(c_f, "wt").write( crypto.dump_certificate(crypto.FILETYPE_PEM, cert).decode('utf-8')) except Exception as e: logger.exception(e) logger.error("Failed to write to %s", c_f) return try: if not exists(dirname(k_f)): makedirs(dirname(k_f)) open(k_f, "wt").write( crypto.dump_privatekey(crypto.FILETYPE_PEM, key).decode('utf-8')) except Exception as e: logger.exception(e) logger.error("Failed to write to %s", k_f) return logger.info("CA certificate created in " + c_f) logger.info("CA private key created in " + k_f)