def check_utility_exists(sid): """ This function checks the utility file exist on the given path. Args: sid: Server ID Returns: None """ # Fetch the server details like hostname, port, roles etc server = get_server(sid) if server is None: return make_json_response( success=0, errormsg=_("Could not find the specified server.")) from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) utility = manager.utility('restore') ret_val = does_utility_exist(utility) if ret_val: return make_json_response(success=0, errormsg=ret_val) return make_json_response(success=1)
def check_utility_exists(sid): """ This function checks the utility file exist on the given path. Args: sid: Server ID Returns: None """ server = Server.query.filter_by(id=sid, user_id=current_user.id).first() if server is None: return make_json_response( success=0, errormsg=_("Could not find the specified server.")) from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) utility = manager.utility('sql') ret_val = does_utility_exist(utility) if ret_val: return make_json_response(success=0, errormsg=ret_val) return make_json_response(success=1)
def check_utility_exists(sid, backup_obj_type): """ This function checks the utility file exist on the given path. Args: sid: Server ID backup_obj_type: Type of the object Returns: None """ server = get_server(sid) if server is None: return make_json_response( success=0, errormsg=_("Could not find the specified server.")) from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) utility = manager.utility('backup') if backup_obj_type == 'objects' \ else manager.utility('backup_server') ret_val = does_utility_exist(utility) if ret_val: return make_json_response(success=0, errormsg=ret_val) return make_json_response(success=1)
def setUp(self): import_export_utils.setup_export_data(self) self.params['database'] = self.db_name self.params['schema'] = self.schema_name self.params['table'] = self.table_name self.params['columns'] = [self.column_name, self.column_name_1] if 'default_binary_paths' not in self.server or \ self.server['default_binary_paths'] is None or \ self.server['type'] not in self.server['default_binary_paths'] or\ self.server['default_binary_paths'][self.server['type']] == '': self.skipTest( "default_binary_paths is not set for the server {0}".format( self.server['name'] ) ) bin_p = self.server['default_binary_paths'][self.server['type']] binary_path = os.path.join(bin_p, 'psql') if os.name == 'nt': binary_path = binary_path + '.exe' ret_val = does_utility_exist(binary_path) if ret_val is not None: self.skipTest(ret_val)
def create_restore_job(sid): """ Args: sid: Server ID Creates a new job for restore task Returns: None """ is_error, errmsg, data, _file = _get_create_req_data() if is_error: return errmsg is_error, errmsg, driver, manager, conn, \ connected, server = _connect_server(sid) if is_error: return errmsg utility = manager.utility('restore') ret_val = does_utility_exist(utility) if ret_val: return make_json_response(success=0, errormsg=ret_val) args = _set_args_param_values(data, manager, server, driver, conn, _file) try: p = BatchProcess( desc=RestoreMessage(server.id, data['file'].encode('utf-8') if hasattr( data['file'], 'encode') else data['file'], *args, database=data['database']), cmd=utility, args=args) manager.export_password_env(p.id) # Check for connection timeout and if it is greater than 0 then # set the environment variable PGCONNECT_TIMEOUT. if manager.connect_timeout > 0: env = dict() env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout) p.set_env_variables(server, env=env) else: p.set_env_variables(server) p.start() jid = p.id except Exception as e: current_app.logger.exception(e) return make_json_response(status=410, success=0, errormsg=str(e)) # Return response return make_json_response(data={ 'job_id': jid, 'desc': p.desc.message, 'Success': 1 })
def check_binary_path_or_skip_test(cls, utility_name): if 'default_binary_paths' not in cls.server or \ cls.server['default_binary_paths'] is None or \ cls.server['type'] not in cls.server['default_binary_paths'] or \ cls.server['default_binary_paths'][cls.server['type']] == '': cls.skipTest( "default_binary_paths is not set for the server {0}".format( cls.server['name'])) from pgadmin.utils import does_utility_exist binary_path = os.path.join( cls.server['default_binary_paths'][cls.server['type']], utility_name) retVal = does_utility_exist(binary_path) if retVal is not None: cls.skipTest(retVal)
def setUp(self): if 'default_binary_paths' not in self.server or \ self.server['default_binary_paths'] is None or \ self.server['type'] not in self.server['default_binary_paths'] or\ self.server['default_binary_paths'][self.server['type']] == '': self.skipTest( "default_binary_paths is not set for the server {0}".format( self.server['name'])) binary_path = os.path.join( self.server['default_binary_paths'][self.server['type']], 'psql') if os.name == 'nt': binary_path = binary_path + '.exe' retVal = does_utility_exist(binary_path) if retVal is not None: self.skipTest(retVal)
def create_import_export_job(sid): """ Args: sid: Server ID Creates a new job for import and export table data functionality Returns: None """ if request.form: data = json.loads(request.form['data'], encoding='utf-8') else: data = json.loads(request.data, encoding='utf-8') # Fetch the server details like hostname, port, roles etc server = Server.query.filter_by(id=sid).first() if server is None: return bad_request(errormsg=_("Could not find the given server")) # To fetch MetaData for the server from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) conn = manager.connection() connected = conn.connected() if not connected: return bad_request(errormsg=_("Please connect to the server first...")) # Get the utility path from the connection manager utility = manager.utility('sql') ret_val = does_utility_exist(utility) if ret_val: return make_json_response(success=0, errormsg=ret_val) # Get the storage path from preference storage_dir = get_storage_directory() if 'filename' in data: try: _file = filename_with_file_manager_path(data['filename'], data['is_import']) except Exception as e: return bad_request(errormsg=str(e)) if not _file: return bad_request(errormsg=_('Please specify a valid file')) if IS_WIN: _file = _file.replace('\\', '/') data['filename'] = _file else: return bad_request(errormsg=_('Please specify a valid file')) # Get required and ignored column list icols = _get_ignored_column_list(data, driver, conn) cols = _get_required_column_list(data, driver, conn) # Create the COPY FROM/TO from template query = render_template('import_export/sql/cmd.sql', conn=conn, data=data, columns=cols, ignore_column_list=icols) args = ['--command', query] try: p = BatchProcess(desc=IEMessage(sid, data['schema'], data['table'], data['database'], storage_dir, utility, *args), cmd=utility, args=args) manager.export_password_env(p.id) env = dict() env['PGHOST'] = server.host env['PGPORT'] = str(server.port) env['PGUSER'] = server.username env['PGDATABASE'] = data['database'] p.set_env_variables(server, env=env) p.start() jid = p.id except Exception as e: current_app.logger.exception(e) return bad_request(errormsg=str(e)) # Return response return make_json_response(data={'job_id': jid, 'success': 1})
def create_backup_objects_job(sid): """ Args: sid: Server ID Creates a new job for backup task (Backup Database(s)/Schema(s)/Table(s)) Returns: None """ data = json.loads(request.data, encoding='utf-8') backup_obj_type = data.get('type', 'objects') try: backup_file = filename_with_file_manager_path( data['file'], (data.get('format', '') != 'directory')) except Exception as e: return bad_request(errormsg=str(e)) # Fetch the server details like hostname, port, roles etc server = get_server(sid) if server is None: return make_json_response( success=0, errormsg=_("Could not find the specified server.")) # To fetch MetaData for the server from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) conn = manager.connection() connected = conn.connected() if not connected: return make_json_response( success=0, errormsg=_("Please connect to the server first.")) utility = manager.utility('backup') if backup_obj_type == 'objects' \ else manager.utility('backup_server') ret_val = does_utility_exist(utility) if ret_val: return make_json_response(success=0, errormsg=ret_val) args = _get_args_params_values(data, conn, backup_obj_type, backup_file, server, manager) escaped_args = [escape_dquotes_process_arg(arg) for arg in args] try: bfile = data['file'].encode('utf-8') \ if hasattr(data['file'], 'encode') else data['file'] if backup_obj_type == 'objects': args.append(data['database']) escaped_args.append(data['database']) p = BatchProcess(desc=BackupMessage(BACKUP.OBJECT, server.id, bfile, *args, database=data['database']), cmd=utility, args=escaped_args) else: p = BatchProcess(desc=BackupMessage( BACKUP.SERVER if backup_obj_type != 'globals' else BACKUP.GLOBALS, server.id, bfile, *args), cmd=utility, args=escaped_args) manager.export_password_env(p.id) # Check for connection timeout and if it is greater than 0 then # set the environment variable PGCONNECT_TIMEOUT. if manager.connect_timeout > 0: env = dict() env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout) p.set_env_variables(server, env=env) else: p.set_env_variables(server) p.start() jid = p.id except Exception as e: current_app.logger.exception(e) return make_json_response(status=410, success=0, errormsg=str(e)) # Return response return make_json_response(data={ 'job_id': jid, 'desc': p.desc.message, 'Success': 1 })
def create_maintenance_job(sid, did): """ Args: sid: Server ID did: Database ID Creates a new job for maintenance vacuum operation Returns: None """ if request.form: data = json.loads(request.form['data'], encoding='utf-8') else: data = json.loads(request.data, encoding='utf-8') index_name = get_index_name(data) # Fetch the server details like hostname, port, roles etc server = Server.query.filter_by(id=sid).first() if server is None: return make_json_response( success=0, errormsg=_("Could not find the given server")) # To fetch MetaData for the server driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) conn = manager.connection() connected = conn.connected() if not connected: return make_json_response( success=0, errormsg=_("Please connect to the server first.")) utility = manager.utility('sql') ret_val = does_utility_exist(utility) if ret_val: return make_json_response(success=0, errormsg=ret_val) # Create the command for the vacuum operation query = render_template('maintenance/sql/command.sql', conn=conn, data=data, index_name=index_name) args = [ '--host', manager.local_bind_host if manager.use_ssh_tunnel else server.host, '--port', str(manager.local_bind_port) if manager.use_ssh_tunnel else str(server.port), '--username', server.username, '--dbname', data['database'], '--command', query ] try: p = BatchProcess(desc=Message(sid, data, query), cmd=utility, args=args) manager.export_password_env(p.id) # Check for connection timeout and if it is greater than 0 then # set the environment variable PGCONNECT_TIMEOUT. if manager.connect_timeout > 0: env = dict() env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout) p.set_env_variables(server, env=env) else: p.set_env_variables(server) p.start() jid = p.id except Exception as e: current_app.logger.exception(e) return make_json_response(status=410, success=0, errormsg=str(e)) # Return response return make_json_response(data={ 'job_id': jid, 'status': True, 'info': _('Maintenance job created.') })
def create_restore_job(sid): """ Args: sid: Server ID Creates a new job for restore task Returns: None """ if request.form: data = json.loads(request.form['data'], encoding='utf-8') else: data = json.loads(request.data, encoding='utf-8') try: _file = filename_with_file_manager_path(data['file']) except Exception as e: return bad_request(errormsg=str(e)) if _file is None: return make_json_response(status=410, success=0, errormsg=_("File could not be found.")) # Fetch the server details like hostname, port, roles etc server = Server.query.filter_by(id=sid).first() if server is None: return make_json_response( success=0, errormsg=_("Could not find the specified server.")) # To fetch MetaData for the server from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) conn = manager.connection() connected = conn.connected() if not connected: return make_json_response( success=0, errormsg=_("Please connect to the server first.")) utility = manager.utility('restore') ret_val = does_utility_exist(utility) if ret_val: return make_json_response(success=0, errormsg=ret_val) args = [] if 'list' in data: args.append('--list') else: def set_param(key, param): if key in data and data[key]: args.append(param) return True return False def set_value(key, param, default_value=None): if key in data and data[key] is not None and data[key] != '': args.append(param) args.append(data[key]) elif default_value is not None: args.append(param) args.append(default_value) def set_multiple(key, param, with_schema=True): if key in data: if len(data[key]) > 0: if with_schema: # TODO:// This is temporary # Once object tree is implemented then we will use # list of tuples 'else' part if isinstance(data[key], list): s, t = data[key] args.extend([ param, driver.qtIdent(conn, s) + '.' + driver.qtIdent(conn, t) ]) else: for s, o in data[key]: args.extend([ param, driver.qtIdent(conn, s) + '.' + driver.qtIdent(conn, o) ]) else: for o in data[key]: args.extend([param, o]) return True return False args.extend([ '--host', manager.local_bind_host if manager.use_ssh_tunnel else server.host, '--port', str(manager.local_bind_port) if manager.use_ssh_tunnel else str( server.port), '--username', server.username, '--no-password' ]) set_value('role', '--role') set_value('database', '--dbname') if data['format'] == 'directory': args.extend(['--format=d']) set_param('pre_data', '--section=pre-data') set_param('data', '--section=data') set_param('post_data', '--section=post-data') if not set_param('only_data', '--data-only'): set_param('dns_owner', '--no-owner') set_param('dns_privilege', '--no-privileges') set_param('dns_tablespace', '--no-tablespaces') if not set_param('only_schema', '--schema-only'): set_param('disable_trigger', '--disable-triggers') set_param('include_create_database', '--create') set_param('clean', '--clean') set_param('single_transaction', '--single-transaction') set_param('no_data_fail_table', '--no-data-for-failed-tables') set_param('use_set_session_auth', '--use-set-session-authorization') set_param('exit_on_error', '--exit-on-error') if manager.version >= 110000: set_param('no_comments', '--no-comments') set_value('no_of_jobs', '--jobs') set_param('verbose', '--verbose') set_multiple('schemas', '--schema', False) set_multiple('tables', '--table', False) set_multiple('functions', '--function', False) set_multiple('triggers', '--trigger', False) set_multiple('trigger_funcs', '--function', False) set_multiple('indexes', '--index', False) args.append(fs_short_path(_file)) try: p = BatchProcess(desc=RestoreMessage( sid, data['file'].encode('utf-8') if hasattr( data['file'], 'encode') else data['file'], *args), cmd=utility, args=args) manager.export_password_env(p.id) # Check for connection timeout and if it is greater than 0 then # set the environment variable PGCONNECT_TIMEOUT. if manager.connect_timeout > 0: env = dict() env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout) p.set_env_variables(server, env=env) else: p.set_env_variables(server) p.start() jid = p.id except Exception as e: current_app.logger.exception(e) return make_json_response(status=410, success=0, errormsg=str(e)) # Return response return make_json_response(data={'job_id': jid, 'Success': 1})
def create_backup_objects_job(sid): """ Args: sid: Server ID Creates a new job for backup task (Backup Database(s)/Schema(s)/Table(s)) Returns: None """ if request.form: data = json.loads(request.form['data'], encoding='utf-8') else: data = json.loads(request.data, encoding='utf-8') backup_obj_type = 'objects' if 'type' in data: backup_obj_type = data['type'] try: if 'format' in data and data['format'] == 'directory': backup_file = filename_with_file_manager_path(data['file'], False) else: backup_file = filename_with_file_manager_path(data['file']) except Exception as e: return bad_request(errormsg=str(e)) # Fetch the server details like hostname, port, roles etc server = Server.query.filter_by(id=sid, user_id=current_user.id).first() if server is None: return make_json_response( success=0, errormsg=_("Could not find the specified server.")) # To fetch MetaData for the server from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) conn = manager.connection() connected = conn.connected() if not connected: return make_json_response( success=0, errormsg=_("Please connect to the server first.")) utility = manager.utility('backup') if backup_obj_type == 'objects' \ else manager.utility('backup_server') ret_val = does_utility_exist(utility) if ret_val: return make_json_response(success=0, errormsg=ret_val) args = [ '--file', backup_file, '--host', manager.local_bind_host if manager.use_ssh_tunnel else server.host, '--port', str(manager.local_bind_port) if manager.use_ssh_tunnel else str( server.port), '--username', server.username, '--no-password' ] if backup_obj_type != 'objects': args.append('--database') args.append(server.maintenance_db) if backup_obj_type == 'globals': args.append('--globals-only') def set_param(key, param): if key in data and data[key]: args.append(param) def set_value(key, param, default_value=None): if key in data and data[key] is not None and data[key] != '': args.append(param) args.append(data[key]) elif default_value is not None: args.append(param) args.append(default_value) set_param('verbose', '--verbose') set_param('dqoute', '--quote-all-identifiers') set_value('role', '--role') if backup_obj_type == 'objects' and \ 'format' in data and data['format'] is not None: if data['format'] == 'custom': args.extend(['--format=c']) set_param('blobs', '--blobs') set_value('ratio', '--compress') elif data['format'] == 'tar': args.extend(['--format=t']) set_param('blobs', '--blobs') elif data['format'] == 'plain': args.extend(['--format=p']) elif data['format'] == 'directory': args.extend(['--format=d']) if 'only_data' in data and data['only_data']: set_param('only_data', '--data-only') if 'format' in data and data['format'] == 'plain': set_param('disable_trigger', '--disable-triggers') elif 'only_schema' in data and data['only_schema']: set_param('only_schema', '--schema-only') set_param('dns_owner', '--no-owner') set_param('include_create_database', '--create') set_param('include_drop_database', '--clean') set_param('pre_data', '--section=pre-data') set_param('data', '--section=data') set_param('post_data', '--section=post-data') set_param('dns_privilege', '--no-privileges') set_param('dns_tablespace', '--no-tablespaces') set_param('dns_unlogged_tbl_data', '--no-unlogged-table-data') set_param('use_insert_commands', '--inserts') set_param('use_column_inserts', '--column-inserts') set_param('disable_quoting', '--disable-dollar-quoting') set_param('with_oids', '--oids') set_param('use_set_session_auth', '--use-set-session-authorization') if manager.version >= 110000: set_param('no_comments', '--no-comments') set_param('load_via_partition_root', '--load-via-partition-root') set_value('encoding', '--encoding') set_value('no_of_jobs', '--jobs') if 'schemas' in data: for s in data['schemas']: args.extend(['--schema', s]) if 'tables' in data: for s, t in data['tables']: args.extend(['--table', driver.qtIdent(conn, s, t)]) try: if backup_obj_type == 'objects': args.append(data['database']) p = BatchProcess(desc=BackupMessage( BACKUP.OBJECT, sid, data['file'].encode('utf-8') if hasattr( data['file'], 'encode') else data['file'], *args, database=data['database']), cmd=utility, args=args) else: p = BatchProcess(desc=BackupMessage( BACKUP.SERVER if backup_obj_type != 'globals' else BACKUP.GLOBALS, sid, data['file'].encode('utf-8') if hasattr( data['file'], 'encode') else data['file'], *args), cmd=utility, args=args) manager.export_password_env(p.id) # Check for connection timeout and if it is greater than 0 then # set the environment variable PGCONNECT_TIMEOUT. if manager.connect_timeout > 0: env = dict() env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout) p.set_env_variables(server, env=env) else: p.set_env_variables(server) p.start() jid = p.id except Exception as e: current_app.logger.exception(e) return make_json_response(status=410, success=0, errormsg=str(e)) # Return response return make_json_response(data={'job_id': jid, 'Success': 1})