def wrap(*args, **kwargs): # Here args[0] will hold self & kwargs will hold gid,sid,did self = args[0] self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager( kwargs['sid']) self.conn = self.manager.connection(did=kwargs['did']) driver = get_driver(PG_DEFAULT_DRIVER) self.qtIdent = driver.qtIdent # Set the template path for the SQL scripts self.template_path = 'fts_dictionary/sql/#{0}#'.format( self.manager.version) return f(*args, **kwargs)
def check_transaction_status(trans_id): """ This function is used to check the transaction id is available in the session object and connection status. Args: trans_id: Returns: status and connection object """ grid_data = session['gridData'] # Return from the function if transaction id not found if str(trans_id) not in grid_data: return False, gettext('Transaction ID not found in the session.'), None, None, None # Fetch the object for the specified transaction id. # Use pickle.loads function to get the command object session_obj = grid_data[str(trans_id)] trans_obj = pickle.loads(session_obj['command_obj']) try: manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid) conn = manager.connection(did=trans_obj.did, conn_id=trans_obj.conn_id) except Exception as e: return False, internal_server_error(errormsg=str(e)), None, None, None if conn.connected(): return True, None, conn, trans_obj, session_obj else: return False, gettext('Not connected to server or connection with the server has been closed.'), \ None, trans_obj, session_obj
def wrap(*args, **kwargs): # Here args[0] will hold self & kwargs will hold gid,sid,tsid self = args[0] self.manager = get_driver( PG_DEFAULT_DRIVER ).connection_manager( kwargs['sid'] ) self.conn = self.manager.connection() # If DB not connected then return error to browser if not self.conn.connected(): current_app.logger.warning( "Connection to the server has been lost." ) return precondition_required( gettext( "Connection to the server has been lost." ) ) self.template_path = 'tablespaces/sql/#{0}#'.format( self.manager.version ) current_app.logger.debug( "Using the template path: %s", self.template_path ) # Allowed ACL on tablespace self.acl = ['C'] return f(*args, **kwargs)
def wrap(*args, **kwargs): # Here args[0] will hold self & kwargs will hold sid,did server_info.clear() server_info['manager'] = get_driver(PG_DEFAULT_DRIVER)\ .connection_manager(kwargs['sid']) server_info['conn'] = server_info['manager'].connection( did=kwargs['did'] ) # If DB not connected then return error to browser if not server_info['conn'].connected(): return precondition_required( gettext("Connection to the server has been lost.") ) # Set template path for sql scripts server_info['server_type'] = server_info['manager'].server_type server_info['version'] = server_info['manager'].version if server_info['server_type'] == 'pg': server_info['template_path'] = 'grant_wizard/pg/#{0}#'.format( server_info['version']) elif server_info['server_type'] == 'ppas': server_info['template_path'] = 'grant_wizard/ppas/#{0}#'.format( server_info['version']) return f(*args, **kwargs)
def wrap(*args, **kwargs): # Here args[0] will hold self & kwargs will hold gid,sid,did self = args[0] self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager( kwargs['sid']) self.conn = self.manager.connection(did=kwargs['did']) # We need datlastsysoid to check if current type is system type self.datlastsysoid = 0 if ( self.manager.db_info is not None and kwargs['did'] in self.manager.db_info ): self.datlastsysoid = self.manager.db_info[kwargs['did']][ 'datlastsysoid'] # Declare allows acl on type self.acl = ['U'] ver = self.manager.version server_type = self.manager.server_type # Set the template path for the SQL scripts self.template_path = 'type/sql/' + ( '#{0}#{1}#'.format(server_type, ver) if server_type == 'gpdb' else '#{0}#'.format(ver) ) return f(*args, **kwargs)
def has_oids(self, default_conn=None): """ This function checks whether the table has oids or not. """ driver = get_driver(PG_DEFAULT_DRIVER) if default_conn is None: manager = driver.connection_manager(self.sid) conn = manager.connection(did=self.did, conn_id=self.conn_id) else: conn = default_conn if conn.connected(): # Fetch the table oids status query = render_template( "/".join([self.sql_path, 'has_oids.sql']), obj_id=self.obj_id) status, has_oids = conn.execute_scalar(query) if not status: raise Exception(has_oids) else: raise Exception( gettext('Not connected to server or connection with the ' 'server has been closed.') ) return has_oids
def connect_status(self, gid, sid): """Check and return the connection status.""" from pgadmin.utils.driver import get_driver manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid) conn = manager.connection() return make_json_response(data={'connected': conn.connected()})
def validate_filter(self, row_filter): """ This function validates the given filter. Args: row_filter: sql syntax to validate """ status = True result = None if row_filter is None or row_filter == '': return False, gettext('Filter string is empty.') manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(self.sid) conn = manager.connection(did=self.did) if conn.connected(): sql = render_template("/".join([self.sql_path, 'validate.sql']), nsp_name=self.nsp_name, object_name=self.object_name, row_filter=row_filter) status, result = conn.execute_scalar(sql) if not status: result = result.partition("\n")[0] return status, result
def get_primary_keys(self): """ This function is used to fetch the primary key columns. """ manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(self.sid) conn = manager.connection(did=self.did, conn_id=self.conn_id) pk_names = '' primary_keys = dict() if conn.connected(): # Fetch the primary key column names query = render_template("/".join([self.sql_path, 'primary_keys.sql']), obj_id=self.obj_id) status, result = conn.execute_dict(query) if not status: raise Exception(result) for row in result['rows']: pk_names += row['attname'] + ',' primary_keys[row['attname']] = row['typname'] if pk_names != '': # Remove last character from the string pk_names = pk_names[:-1] else: raise Exception(gettext('Not connected to server or connection with the server has been closed.')) return pk_names, primary_keys
def wrap(*args, **kwargs): # Here args[0] will hold self & kwargs will hold gid,sid,did self = args[0] self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager( kwargs['sid'] ) self.conn = self.manager.connection(did=kwargs['did']) # If DB not connected then return error to browser if not self.conn.connected(): return precondition_required( _( "Connection to the server has been lost!" ) ) self.template_path = 'index_constraint/sql' # We need parent's name eg table name and schema name SQL = render_template("/".join([self.template_path, 'get_parent.sql']), tid=kwargs['tid']) status, rset = self.conn.execute_2darray(SQL) if not status: return internal_server_error(errormsg=rset) for row in rset['rows']: self.schema = row['schema'] self.table = row['table'] return f(*args, **kwargs)
def wrap(*args, **kwargs): self = args[0] driver = get_driver(PG_DEFAULT_DRIVER) self.manager = driver.connection_manager(kwargs['sid']) # Get database connection self.conn = self.manager.connection(did=kwargs['did']) self.qtIdent = driver.qtIdent self.qtLiteral = driver.qtLiteral if not self.conn.connected(): return precondition_required( gettext( "Connection to the server has been lost!" ) ) ver = self.manager.version # Set template path for sql scripts depending # on the server version. self.template_path = "/".join([ self.node_type ]) self.sql_template_path = "/".join([ self.template_path, self.manager.server_type, 'sql', '9.5_plus' if ver >= 90500 else '9.2_plus' if ver >= 90200 else '9.1_plus' ]) return f(*args, **kwargs)
def wrapped(self, *args, **kwargs): self.manager = get_driver( PG_DEFAULT_DRIVER ).connection_manager( kwargs['sid'] ) if self.manager is None: return gone(errormsg="Could not find the server.") if action and action in ["drop"]: self.conn = self.manager.connection() elif 'did' in kwargs: self.conn = self.manager.connection(did=kwargs['did']) self.db_allow_connection = True # If connection to database is not allowed then # provide generic connection if kwargs['did'] in self.manager.db_info: self._db = self.manager.db_info[kwargs['did']] if self._db['datallowconn'] is False: self.conn = self.manager.connection() self.db_allow_connection = False else: self.conn = self.manager.connection() # set template path for sql scripts self.template_path = 'databases/sql/#{0}#'.format( self.manager.version ) return f(self, *args, **kwargs)
def connect(self, gid, sid, did): """Connect the Database.""" from pgadmin.utils.driver import get_driver manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid) conn = manager.connection(did=did, auto_reconnect=True) status, errmsg = conn.connect() if not status: current_app.logger.error( "Could not connected to database(#{0}).\nError: {1}".format( did, errmsg ) ) return internal_server_error(errmsg) else: current_app.logger.info('Connection Established for Database Id: \ %s' % (did)) return make_json_response( success=1, info=_("Database connected."), data={ 'icon': 'pg-icon-database', 'connected': True } )
def __init__(self): super(FakeApp, self).__init__('') driver = get_driver(PG_DEFAULT_DRIVER, self) self.jinja_env.filters['qtLiteral'] = driver.qtLiteral self.jinja_env.filters['qtIdent'] = driver.qtIdent self.jinja_env.filters['qtTypeIdent'] = driver.qtTypeIdent self.jinja_loader = ChoiceLoader([ FileSystemLoader( os.path.join(os.path.dirname( os.path.realpath(__file__) ), os.pardir, 'templates') ), FileSystemLoader( os.path.join( os.path.dirname( os.path.realpath(__file__) ), os.pardir, os.pardir, 'templates') ), FileSystemLoader( os.path.join(os.path.dirname( os.path.realpath(__file__)), os.pardir, os.pardir, 'types', 'templates') ), FileSystemLoader( os.path.join(os.path.dirname( os.path.realpath(__file__)), os.pardir, os.pardir, os.pardir, os.pardir, 'templates') ), ] )
def wrap(*args, **kwargs): self = args[0] driver = get_driver(PG_DEFAULT_DRIVER) self.manager = driver.connection_manager(kwargs['sid']) # Get database connection self.conn = self.manager.connection(did=kwargs['did']) self.qtIdent = driver.qtIdent self.qtLiteral = driver.qtLiteral if not self.conn.connected(): return precondition_required( gettext( "Connection to the server has been lost." ) ) # Set template path for sql scripts depending # on the server version. self.sql_template_path = "/".join([ self.node_type, self.manager.server_type, '#{0}#' ]).format(self.manager.version) return f(*args, **kwargs)
def wrapped(self, *args, **kwargs): driver = get_driver(PG_DEFAULT_DRIVER) self.manager = driver.connection_manager(kwargs['sid']) self.qtIdent = driver.qtIdent if 'did' in kwargs: self.conn = self.manager.connection(did=kwargs['did']) else: self.conn = self.manager.connection() # If DB not connected then return error to browser if not self.conn.connected(): return precondition_required( _( "Connection to the server has been lost." ) ) self.template_path = 'package/ppas/#{0}#'.format( self.manager.version) SQL = render_template( "/".join([self.template_path, 'get_schema.sql']), scid=kwargs['scid'] ) status, rset = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=rset) self.schema = rset # Allowed ACL on package self.acl = ['X'] return f(self, *args, **kwargs)
def wrap(*args, **kwargs): self = args[0] driver = get_driver(PG_DEFAULT_DRIVER) self.manager = driver.connection_manager(kwargs['sid']) self.conn = self.manager.connection(did=kwargs['did']) self.qtIdent = driver.qtIdent # If DB not connected then return error to browser if not self.conn.connected(): return precondition_required( _("Connection to the server has been lost!") ) ver = self.manager.version # we will set template path for sql scripts if ver >= 90200: self.template_path = 'check_constraint/sql/9.2_plus' elif ver >= 90100: self.template_path = 'check_constraint/sql/9.1_plus' SQL = render_template("/".join([self.template_path, 'get_parent.sql']), tid=kwargs['tid']) status, rset = self.conn.execute_2darray(SQL) if not status: return internal_server_error(errormsg=rset) self.schema = rset['rows'][0]['schema'] self.table = rset['rows'][0]['table'] return f(*args, **kwargs)
def statistics(self, gid, sid): from pgadmin.utils.driver import get_driver manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid) conn = manager.connection() if conn.connected(): status, res = conn.execute_dict( render_template( "/".join([ 'servers/sql', '9.2_plus' if manager.version >= 90200 else '9.1_plus', 'stats.sql' ]), conn=conn, _=gettext ) ) if not status: return internal_server_error(errormsg=res) return make_json_response(data=res) return make_json_response( info=gettext( "Server has no active connection for generating statistics." ) )
def wrap(*args, **kwargs): self = args[0] driver = get_driver(PG_DEFAULT_DRIVER) self.manager = driver.connection_manager(kwargs['sid']) # Get database connection self.conn = self.manager.connection(did=kwargs['did']) self.qtIdent = driver.qtIdent if not self.conn.connected(): return precondition_required( gettext( "Connection to the server has been lost!" ) ) ver = self.manager.version server_type = self.manager.server_type # Set template path for sql scripts depending # on the server version. if ver >= 90500: self.template_path = 'foreign_tables/sql/9.5_plus' elif ver >= 90200: self.template_path = 'foreign_tables/sql/9.2_plus' else: self.template_path = 'foreign_tables/sql/9.1_plus' return f(*args, **kwargs)
def initialize_datagrid(cmd_type, obj_type, sid, did, obj_id): """ This method is responsible for creating an asynchronous connection. After creating the connection it will instantiate and initialize the object as per the object type. It will also create a unique transaction id and store the information into session variable. Args: cmd_type: Contains value for which menu item is clicked. obj_type: Contains type of selected object for which data grid to be render sid: Server Id did: Database Id obj_id: Id of currently selected object """ if request.data: filter_sql = json.loads(request.data.decode()) else: filter_sql = request.args or request.form # Create asynchronous connection using random connection id. conn_id = str(random.randint(1, 9999999)) try: manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid) conn = manager.connection(did=did, conn_id=conn_id) except Exception as e: return internal_server_error(errormsg=str(e)) # Connect the Server status, msg = conn.connect() if not status: return internal_server_error(errormsg=str(msg)) try: # Get the object as per the object type command_obj = ObjectRegistry.get_object(obj_type, conn_id=conn_id, sid=sid, did=did, obj_id=obj_id, cmd_type=cmd_type, sql_filter=filter_sql) except Exception as e: return internal_server_error(errormsg=str(e)) # Create a unique id for the transaction trans_id = str(random.randint(1, 9999999)) if 'gridData' not in session: sql_grid_data = dict() else: sql_grid_data = session['gridData'] # Use pickle to store the command object which will be used # later by the sql grid module. sql_grid_data[trans_id] = { 'command_obj': pickle.dumps(command_obj, -1) # -1 specify the highest protocol version available } # Store the grid dictionary into the session variable session['gridData'] = sql_grid_data return make_json_response(data={'gridTransId': trans_id})
def node(self, gid, sid): """Return a JSON document listing the server groups for the user""" server = Server.query.filter_by(user_id=current_user.id, servergroup_id=gid, id=sid).first() if server is None: return make_json_response( status=410, success=0, errormsg=gettext( gettext( "Couldn't find the server with id# %s!" ).format(sid) ) ) from pgadmin.utils.driver import get_driver manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(server.id) conn = manager.connection() connected = conn.connected() if connected: status, result = conn.execute_dict(""" SELECT CASE WHEN usesuper THEN pg_is_in_recovery() ELSE FALSE END as inrecovery, CASE WHEN usesuper AND pg_is_in_recovery() THEN pg_is_xlog_replay_paused() ELSE FALSE END as isreplaypaused FROM pg_user WHERE usename=current_user""") in_recovery = result['rows'][0]['inrecovery']; wal_paused = result['rows'][0]['isreplaypaused'] else: in_recovery = None wal_paused = None return make_json_response( result=self.blueprint.generate_browser_node( "%d" % (server.id), gid, server.name, "icon-server-not-connected" if not connected else "icon-{0}".format(manager.server_type), True, self.node_type, connected=connected, server_type=manager.server_type if connected else 'pg', version=manager.version, db=manager.db, user=manager.user_info if connected else None, in_recovery=in_recovery, wal_pause=wal_paused ) )
def wrap(*args, **kwargs): # Here args[0] will hold self & kwargs will hold gid,sid,did self = args[0] self.manager = get_driver( PG_DEFAULT_DRIVER ).connection_manager(kwargs['sid']) self.conn = self.manager.connection(did=kwargs['did']) self.template_path = 'extensions/sql' return f(*args, **kwargs)
def wrap(*args, **kwargs): # Here args[0] will hold self & kwargs will hold gid,sid,jid self = args[0] self.driver = get_driver(PG_DEFAULT_DRIVER) self.manager = self.driver.connection_manager(kwargs['sid']) self.conn = self.manager.connection() self.template_path = 'pga_schedule/sql/pre3.4' return f(*args, **kwargs)
def wrap(*args, **kwargs): # Here args[0] will hold self & kwargs will hold gid,sid,did self = args[0] self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager( kwargs['sid']) self.conn = self.manager.connection(did=kwargs['did']) self.template_path = 'fts_template/sql/#{0}#'.format( self.manager.version) return f(*args, **kwargs)
def get_all_columns_with_order(self, default_conn=None): """ It is overridden method specially for Table because we all have to fetch primary keys and rest of the columns both. Args: default_conn: Connection object Returns: all_sorted_columns: Sorted columns for the Grid all_columns: List of columns for the select2 options """ driver = get_driver(PG_DEFAULT_DRIVER) if default_conn is None: manager = driver.connection_manager(self.sid) conn = manager.connection(did=self.did, conn_id=self.conn_id) else: conn = default_conn all_sorted_columns = [] data_sorting = self.get_data_sorting() all_columns = [] # Fetch the primary key column names query = render_template( "/".join([self.sql_path, 'primary_keys.sql']), obj_id=self.obj_id ) status, result = conn.execute_dict(query) if not status: raise Exception(result) for row in result['rows']: all_columns.append(row['attname']) # Fetch the rest of the column names query = render_template( "/".join([self.sql_path, 'get_columns.sql']), obj_id=self.obj_id ) status, result = conn.execute_dict(query) if not status: raise Exception(result) for row in result['rows']: # Only append if not already present in the list if row['attname'] not in all_columns: all_columns.append(row['attname']) # If user has custom data sorting then pass as it as it is if data_sorting and len(data_sorting) > 0: all_sorted_columns = data_sorting return all_sorted_columns, all_columns
def check_transaction_status(trans_id): """ This function is used to check the transaction id is available in the session object and connection status. Args: trans_id: Returns: status and connection object """ if 'gridData' not in session: return False, gettext( 'Transaction ID not found in the session.' ), None, None, None grid_data = session['gridData'] # Return from the function if transaction id not found if str(trans_id) not in grid_data: return False, gettext( 'Transaction ID not found in the session.' ), None, None, None # Fetch the object for the specified transaction id. # Use pickle.loads function to get the command object session_obj = grid_data[str(trans_id)] trans_obj = pickle.loads(session_obj['command_obj']) try: manager = get_driver( PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid) conn = manager.connection( did=trans_obj.did, conn_id=trans_obj.conn_id, auto_reconnect=False, use_binary_placeholder=True, array_to_string=True ) except (ConnectionLost, SSHTunnelConnectionLost) as e: raise except Exception as e: current_app.logger.error(e) return False, internal_server_error(errormsg=str(e)), None, None, None connect = True if 'connect' in request.args and \ request.args['connect'] == '1' else False if connect: conn.connect() return True, None, conn, trans_obj, session_obj
def nodes(self, gid): res = [] """ Return a JSON document listing the servers under this server group for the user. """ servers = Server.query.filter_by(user_id=current_user.id, servergroup_id=gid) from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) for server in servers: manager = driver.connection_manager(server.id) conn = manager.connection() connected = conn.connected() if connected: status, result = conn.execute_dict(""" SELECT CASE WHEN usesuper THEN pg_is_in_recovery() ELSE FALSE END as inrecovery, CASE WHEN usesuper AND pg_is_in_recovery() THEN pg_is_xlog_replay_paused() ELSE FALSE END as isreplaypaused FROM pg_user WHERE usename=current_user""") in_recovery = result['rows'][0]['inrecovery']; wal_paused = result['rows'][0]['isreplaypaused'] else: in_recovery = None wal_paused = None res.append( self.blueprint.generate_browser_node( "%d" % (server.id), gid, server.name, "icon-server-not-connected" if not connected else "icon-{0}".format(manager.server_type), True, self.node_type, connected=connected, server_type=manager.server_type if connected else 'pg', version=manager.version, db=manager.db, user=manager.user_info if connected else None, in_recovery=in_recovery, wal_pause=wal_paused ) ) return make_json_response(result=res)
def wrap(*args, **kwargs): self = args[0] driver = get_driver(PG_DEFAULT_DRIVER) self.manager = driver.connection_manager(kwargs['sid']) self.conn = self.manager.connection(did=kwargs['did']) self.qtIdent = driver.qtIdent # Set the template path for the SQL scripts self.template_path = 'domain_constraints/sql/#{0}#'.format( self.manager.version) return f(*args, **kwargs)
def wrap(*args, **kwargs): # Here - args[0] will always hold self & kwargs will hold gid, # sid, did self = args[0] self.manager = get_driver( PG_DEFAULT_DRIVER ).connection_manager(kwargs['sid']) self.conn = self.manager.connection(did=kwargs['did']) self.template_path = 'event_triggers/sql/9.3_plus' return f(*args, **kwargs)
def wrap(*args, **kwargs): # Here args[0] will hold self & kwargs will hold gid,sid,did self = args[0] self.manager = get_driver(PG_DEFAULT_DRIVER).connection_manager( kwargs['sid'] ) self.conn = self.manager.connection(did=kwargs['did']) self.template_path = 'catalog_object/sql/{0}/#{1}#'.format( 'ppas' if self.manager.server_type == 'ppas' else 'pg', self.manager.version ) return f(*args, **kwargs)
def execute(self, sql, trans_id, http_session, connect=False): session_obj = StartRunningQuery.retrieve_session_information( http_session, trans_id ) if type(session_obj) is Response: return session_obj transaction_object = pickle.loads(session_obj['command_obj']) can_edit = False can_filter = False notifies = None if transaction_object is not None and session_obj is not None: # set fetched row count to 0 as we are executing query again. transaction_object.update_fetched_row_cnt(0) self.__retrieve_connection_id(transaction_object) try: manager = get_driver( PG_DEFAULT_DRIVER).connection_manager( transaction_object.sid) conn = manager.connection(did=transaction_object.did, conn_id=self.connection_id, auto_reconnect=False, use_binary_placeholder=True, array_to_string=True) except (ConnectionLost, SSHTunnelConnectionLost): raise except Exception as e: self.logger.error(e) return internal_server_error(errormsg=str(e)) # Connect to the Server if not connected. if connect and not conn.connected(): status, msg = conn.connect() if not status: self.logger.error(msg) return internal_server_error(errormsg=str(msg)) effective_sql_statement = apply_explain_plan_wrapper_if_needed( manager, sql) result, status = self.__execute_query( conn, session_obj, effective_sql_statement, trans_id, transaction_object ) can_edit = transaction_object.can_edit() can_filter = transaction_object.can_filter() # Get the notifies notifies = conn.get_notifies() else: status = False result = gettext( 'Either transaction object or session object not found.') return make_json_response( data={ 'status': status, 'result': result, 'can_edit': can_edit, 'can_filter': can_filter, 'info_notifier_timeout': self.blueprint_object.info_notifier_timeout.get(), 'notifies': notifies } )
def properties(sid, did, node_id, node_type): """It fetches the properties of object types and render into selection page of wizard """ get_schema_sql_url = '/sql/get_schemas.sql' # unquote encoded url parameter node_type = unquote(node_type) server_prop = server_info res_data = [] failed_objects = [] manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid) conn = manager.connection(did=did) show_sysobj = blueprint.show_system_objects().get() if node_type == 'database': sql = render_template("/".join( [server_prop['template_path'], get_schema_sql_url]), show_sysobj=show_sysobj) ntype = 'schema' else: sql = render_template("/".join( [server_prop['template_path'], get_schema_sql_url]), show_sysobj=show_sysobj, nspid=node_id) ntype = node_type status, res = conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) node_types = res['rows'] def _append_rows(status, res, disp_type): if not status: current_app.logger.error(res) failed_objects.append(disp_type) else: res_data.extend(res['rows']) for row in node_types: if 'oid' in row: node_id = row['oid'] if ntype == 'schema': status, res = _get_rows_for_type(conn, 'function', server_prop, node_id) _append_rows(status, res, 'function') status, res = _get_rows_for_type(conn, 'procedure', server_prop, node_id) _append_rows(status, res, 'procedure') status, res = _get_rows_for_type(conn, 'trigger_function', server_prop, node_id) _append_rows(status, res, 'trigger function') status, res = _get_rows_for_type(conn, 'sequence', server_prop, node_id) _append_rows(status, res, 'sequence') status, res = _get_rows_for_type(conn, 'table', server_prop, node_id) _append_rows(status, res, 'table') status, res = _get_rows_for_type(conn, 'view', server_prop, node_id) _append_rows(status, res, 'view') status, res = _get_rows_for_type(conn, 'mview', server_prop, node_id) _append_rows(status, res, 'materialized view') else: status, res = _get_rows_for_type(conn, ntype, server_prop, node_id) _append_rows(status, res, 'function') msg = None if len(failed_objects) > 0: msg = gettext('Unable to fetch the {} objects'.format( ", ".join(failed_objects))) return make_json_response(result=res_data, info=msg, status=200)
def save(sid, did): """ This function will apply the privileges to the selected Database Objects """ server_prop = server_info data = request.form if request.form else json.loads(request.data.decode()) # Form db connection and we use conn to execute sql manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid) conn = manager.connection(did=did) acls = [] try: acls = render_template( "/".join([server_prop['template_path'], 'acl.json']), ) acls = json.loads(acls) except Exception as e: current_app.logger.exception(e) try: # Parse privileges data['priv'] = {} if 'acl' in data: # Get function acls data['priv']['function'] = parse_priv_to_db( data['acl'], acls['function']['acl']) data['priv']['sequence'] = parse_priv_to_db( data['acl'], acls['sequence']['acl']) data['priv']['table'] = parse_priv_to_db(data['acl'], acls['table']['acl']) # Pass database objects and get SQL for privileges # Pass database objects and get SQL for privileges sql_data = '' data_func = { 'objects': data['objects'], 'priv': data['priv']['function'] } sql = render_template("/".join( [server_prop['template_path'], '/sql/grant_function.sql']), data=data_func, conn=conn) if sql and sql.strip('\n') != '': sql_data += sql data_seq = { 'objects': data['objects'], 'priv': data['priv']['sequence'] } sql = render_template("/".join( [server_prop['template_path'], '/sql/grant_sequence.sql']), data=data_seq, conn=conn) if sql and sql.strip('\n') != '': sql_data += sql data_table = { 'objects': data['objects'], 'priv': data['priv']['table'] } sql = render_template("/".join( [server_prop['template_path'], '/sql/grant_table.sql']), data=data_table, conn=conn) if sql and sql.strip('\n') != '': sql_data += sql status, res = conn.execute_dict(sql_data) if not status: return internal_server_error(errormsg=res) return make_json_response(success=1, info="Privileges applied") except Exception as e: return internal_server_error(errormsg=e.message)
def initialize_query_tool(sgid, sid, did=None): """ This method is responsible for instantiating and initializing the query tool object. It will also create a unique transaction id and store the information into session variable. Args: sgid: Server group Id sid: Server Id did: Database Id """ connect = True if ('recreate' in request.args and request.args['recreate'] == '1'): connect = False # Create asynchronous connection using random connection id. conn_id = str(random.randint(1, 9999999)) # Use Maintenance database OID manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid) if did is None: did = manager.did try: command_obj = ObjectRegistry.get_object('query_tool', conn_id=conn_id, sgid=sgid, sid=sid, did=did) except Exception as e: app.logger.error(e) return internal_server_error(errormsg=str(e)) try: conn = manager.connection(did=did, conn_id=conn_id, auto_reconnect=False, use_binary_placeholder=True, array_to_string=True) if connect: conn.connect() except ConnectionLost as e: raise except Exception as e: app.logger.error(e) return internal_server_error(errormsg=str(e)) # Create a unique id for the transaction trans_id = str(random.randint(1, 9999999)) if 'gridData' not in session: sql_grid_data = dict() else: sql_grid_data = session['gridData'] # Use pickle to store the command object which will be used # later by the sql grid module. sql_grid_data[trans_id] = { # -1 specify the highest protocol version available 'command_obj': pickle.dumps(command_obj, -1) } # Store the grid dictionary into the session variable session['gridData'] = sql_grid_data pref = Preferences.module('sqleditor') new_browser_tab = pref.preference('new_browser_tab').get() return make_json_response(data={ 'gridTransId': trans_id, 'newBrowserTab': new_browser_tab })
def start_view_data(trans_id): """ This method is used to execute query using asynchronous connection. Args: trans_id: unique transaction id """ limit = -1 # Check the transaction and connection status status, error_msg, conn, trans_obj, session_obj = \ check_transaction_status(trans_id) if error_msg == gettext('Transaction ID not found in the session.'): return make_json_response(success=0, errormsg=error_msg, info='DATAGRID_TRANSACTION_REQUIRED', status=404) # get the default connection as current connection which is attached to # trans id holds the cursor which has query result so we cannot use that # connection to execute another query otherwise we'll lose query result. try: manager = get_driver(PG_DEFAULT_DRIVER).connection_manager( trans_obj.sid) default_conn = manager.connection(did=trans_obj.did) except (ConnectionLost, SSHTunnelConnectionLost) as e: raise except Exception as e: current_app.logger.error(e) return internal_server_error(errormsg=str(e)) # Connect to the Server if not connected. if not default_conn.connected(): status, msg = default_conn.connect() if not status: return make_json_response( data={'status': status, 'result': u"{}".format(msg)} ) if status and conn is not None and \ trans_obj is not None and session_obj is not None: # set fetched row count to 0 as we are executing query again. trans_obj.update_fetched_row_cnt(0) # Fetch the sql and primary_keys from the object sql = trans_obj.get_sql(default_conn) pk_names, primary_keys = trans_obj.get_primary_keys(default_conn) session_obj['command_obj'] = pickle.dumps(trans_obj, -1) has_oids = False if trans_obj.object_type == 'table': # Fetch OIDs status has_oids = trans_obj.has_oids(default_conn) # Fetch the applied filter. filter_applied = trans_obj.is_filter_applied() # Fetch the limit for the SQL query limit = trans_obj.get_limit() can_edit = trans_obj.can_edit() can_filter = trans_obj.can_filter() # Store the primary keys to the session object session_obj['primary_keys'] = primary_keys # Store the OIDs status into session object session_obj['has_oids'] = has_oids update_session_grid_transaction(trans_id, session_obj) # Execute sql asynchronously try: status, result = conn.execute_async(sql) except (ConnectionLost, SSHTunnelConnectionLost) as e: raise else: status = False result = error_msg filter_applied = False can_edit = False can_filter = False sql = None return make_json_response( data={ 'status': status, 'result': result, 'filter_applied': filter_applied, 'limit': limit, 'can_edit': can_edit, 'can_filter': can_filter, 'sql': sql, 'info_notifier_timeout': blueprint.info_notifier_timeout.get() } )
def cancel_transaction(trans_id): """ This method is used to cancel the running transaction Args: trans_id: unique transaction id """ if 'gridData' not in session: return make_json_response( success=0, errormsg=gettext('Transaction ID not found in the session.'), info='DATAGRID_TRANSACTION_REQUIRED', status=404) grid_data = session['gridData'] # Return from the function if transaction id not found if str(trans_id) not in grid_data: return make_json_response( success=0, errormsg=gettext('Transaction ID not found in the session.'), info='DATAGRID_TRANSACTION_REQUIRED', status=404) # Fetch the object for the specified transaction id. # Use pickle.loads function to get the command object session_obj = grid_data[str(trans_id)] trans_obj = pickle.loads(session_obj['command_obj']) if trans_obj is not None and session_obj is not None: # Fetch the main connection object for the database. try: manager = get_driver( PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid) conn = manager.connection(did=trans_obj.did) except Exception as e: return internal_server_error(errormsg=str(e)) delete_connection = False # Connect to the Server if not connected. if not conn.connected(): status, msg = conn.connect() if not status: return internal_server_error(errormsg=str(msg)) delete_connection = True if conn.connected(): # on successful connection cancel the running transaction status, result = conn.cancel_transaction( trans_obj.conn_id, trans_obj.did) # Delete connection if we have created it to # cancel the transaction if delete_connection: manager.release(did=trans_obj.did) else: status = False result = gettext( 'Not connected to server or connection with the server has ' 'been closed.' ) else: status = False result = gettext( 'Either transaction object or session object not found.') return make_json_response( data={ 'status': status, 'result': result } )
def utils(): layout = get_setting('Browser/Layout', default='') snippets = [] prefs = Preferences.module('paths') pg_help_path_pref = prefs.preference('pg_help_path') pg_help_path = pg_help_path_pref.get() edbas_help_path_pref = prefs.preference('edbas_help_path') edbas_help_path = edbas_help_path_pref.get() # Get sqleditor options prefs = Preferences.module('sqleditor') editor_tab_size_pref = prefs.preference('tab_size') editor_tab_size = editor_tab_size_pref.get() editor_use_spaces_pref = prefs.preference('use_spaces') editor_use_spaces = editor_use_spaces_pref.get() editor_wrap_code_pref = prefs.preference('wrap_code') editor_wrap_code = editor_wrap_code_pref.get() brace_matching_pref = prefs.preference('brace_matching') brace_matching = brace_matching_pref.get() insert_pair_brackets_perf = prefs.preference('insert_pair_brackets') insert_pair_brackets = insert_pair_brackets_perf.get() # This will be opposite of use_space option editor_indent_with_tabs = False if editor_use_spaces else True # Try to fetch current libpq version from the driver try: from config import PG_DEFAULT_DRIVER from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) pg_libpq_version = driver.libpq_version() except Exception as e: pg_libpq_version = 0 for submodule in current_blueprint.submodules: snippets.extend(submodule.jssnippets) return make_response( render_template('browser/js/utils.js', layout=layout, jssnippets=snippets, pg_help_path=pg_help_path, edbas_help_path=edbas_help_path, editor_tab_size=editor_tab_size, editor_use_spaces=editor_use_spaces, editor_wrap_code=editor_wrap_code, editor_brace_matching=brace_matching, editor_insert_pair_brackets=insert_pair_brackets, editor_indent_with_tabs=editor_indent_with_tabs, app_name=config.APP_NAME, pg_libpq_version=pg_libpq_version, support_ssh_tunnel=config.SUPPORT_SSH_TUNNEL, logout_url=_get_logout_url()), 200, {'Content-Type': 'application/javascript'})
def create_maintenance_job(sid, did): """ Args: sid: Server ID did: Database ID Creates a new job for maintenance vacuum operation Returns: None """ if request.form: data = json.loads(request.form['data'], encoding='utf-8') else: data = json.loads(request.data, encoding='utf-8') index_name = get_index_name(data) # Fetch the server details like hostname, port, roles etc server = Server.query.filter_by( id=sid).first() if server is None: return make_json_response( success=0, errormsg=_("Could not find the given server") ) # To fetch MetaData for the server driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) conn = manager.connection() connected = conn.connected() if not connected: return make_json_response( success=0, errormsg=_("Please connect to the server first.") ) utility = manager.utility('sql') ret_val = does_utility_exist(utility) if ret_val: return make_json_response( success=0, errormsg=ret_val ) # Create the command for the vacuum operation query = render_template( 'maintenance/sql/command.sql', conn=conn, data=data, index_name=index_name ) args = [ '--host', manager.local_bind_host if manager.use_ssh_tunnel else server.host, '--port', str(manager.local_bind_port) if manager.use_ssh_tunnel else str(server.port), '--username', server.username, '--dbname', data['database'], '--command', query ] try: p = BatchProcess( desc=Message(sid, data, query), cmd=utility, args=args ) manager.export_password_env(p.id) # Check for connection timeout and if it is greater than 0 then # set the environment variable PGCONNECT_TIMEOUT. if manager.connect_timeout > 0: env = dict() env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout) p.set_env_variables(server, env=env) else: p.set_env_variables(server) p.start() jid = p.id except Exception as e: current_app.logger.exception(e) return make_json_response( status=410, success=0, errormsg=str(e) ) # Return response return make_json_response( data={'job_id': jid, 'status': True, 'info': _('Maintenance job created.')} )
def wrapped(self, **kwargs): self.manager = get_driver( PG_DEFAULT_DRIVER).connection_manager(kwargs['sid']) self.conn = self.manager.connection() driver = get_driver(PG_DEFAULT_DRIVER) self.qtIdent = driver.qtIdent if not self.conn.connected(): return precondition_required( _("Connection to the server has been lost.")) self.sql_path = 'roles/sql/#{0}#'.format(self.manager.version) self.alterKeys = [ u'rolcanlogin', u'rolsuper', u'rolcreatedb', u'rolcreaterole', u'rolinherit', u'rolreplication', u'rolconnlimit', u'rolvaliduntil', u'rolpassword' ] if self.manager.version >= 90200 else [ u'rolcanlogin', u'rolsuper', u'rolcreatedb', u'rolcreaterole', u'rolinherit', u'rolconnlimit', u'rolvaliduntil', u'rolpassword' ] check_permission = False fetch_name = False forbidden_msg = None if action in ['drop', 'update']: if 'rid' in kwargs: fetch_name = True check_permission = True if action == 'drop': forbidden_msg = _( "The current user does not have permission to drop" " the role.") else: forbidden_msg = _( "The current user does not have permission to " "update the role.") elif action == 'create': check_permission = True forbidden_msg = _( "The current user does not have permission to create " "the role.") elif action == 'msql' and 'rid' in kwargs: fetch_name = True if check_permission: user = self.manager.user_info if not user['is_superuser'] and \ not user['can_create_role']: if action != 'update' or 'rid' in kwargs: if kwargs['rid'] != -1: if user['id'] != kwargs['rid']: return forbidden(forbidden_msg) if fetch_name: status, res = self.conn.execute_dict( render_template(self.sql_path + 'permission.sql', rid=kwargs['rid'], conn=self.conn)) if not status: return internal_server_error( _("Error retrieving the role information.\n{0}"). format(res)) if len(res['rows']) == 0: return gone( _("Could not find the role on the database " "server.")) row = res['rows'][0] self.role = row['rolname'] self.rolCanLogin = row['rolcanlogin'] self.rolCatUpdate = row['rolcatupdate'] self.rolSuper = row['rolsuper'] return f(self, **kwargs)
def additional_properties(self, copy_dict, tid): """ We will use this function to add additional properties according to type Returns: additional properties for type like range/composite/enum """ # Fetching type of type of_type = copy_dict['typtype'] res = dict() # If type is of Composite then we need to add members list in our output if of_type == 'c': SQL = render_template("/".join( [self.template_path, 'additional_properties.sql']), type='c', typrelid=copy_dict['typrelid']) status, rset = self.conn.execute_2darray(SQL) if not status: return internal_server_error(errormsg=res) # To display in properties properties_list = [] # To display in composite collection grid composite_lst = [] for row in rset['rows']: # We will fetch Full type name typelist = ' '.join([row['attname'], row['fulltype']]) if not row['collname'] or (row['collname'] == 'default' and row['collnspname'] == 'pg_catalog'): full_collate = '' collate = '' else: full_collate = get_driver(PG_DEFAULT_DRIVER).qtIdent( self.conn, row['collnspname'], row['collname']) collate = ' COLLATE ' + full_collate typelist += collate properties_list.append(typelist) is_tlength = False is_precision = False if 'elemoid' in row: is_tlength, is_precision, typeval = self.get_length_precision( row['elemoid']) # Below logic will allow us to split length, precision from type name for grid import re t_len = None t_prec = None # If we have length & precision both if is_tlength and is_precision: matchObj = re.search(r'(\d+),(\d+)', row['fulltype']) if matchObj: t_len = matchObj.group(1) t_prec = matchObj.group(2) elif is_tlength: # If we have length only matchObj = re.search(r'(\d+)', row['fulltype']) if matchObj: t_len = matchObj.group(1) t_prec = None type_name = DataTypeReader.parse_type_name(row['typname']) row['type'] = self._cltype_formatter(type_name) row['hasSqrBracket'] = self.hasSqrBracket row = self.convert_length_precision_to_string(row) composite_lst.append({ 'attnum': row['attnum'], 'member_name': row['attname'], 'type': type_name, 'collation': full_collate, 'cltype': row['type'], 'tlength': t_len, 'precision': t_prec, 'is_tlength': is_tlength, 'is_precision': is_precision, 'hasSqrBracket': row['hasSqrBracket'], 'fulltype': row['fulltype'] }) # Adding both results res['member_list'] = ', '.join(properties_list) res['composite'] = composite_lst # If type is of ENUM then we need to add labels in our output if of_type == 'e': SQL = render_template("/".join( [self.template_path, 'additional_properties.sql']), type='e', tid=tid) status, rset = self.conn.execute_2darray(SQL) if not status: return internal_server_error(errormsg=res) # To display in properties properties_list = [] # To display in enum grid enum_list = [] for row in rset['rows']: properties_list.append(row['enumlabel']) enum_list.append({'label': row['enumlabel']}) # Adding both results in ouput res['enum_list'] = ', '.join(properties_list) res['enum'] = enum_list # If type is of Range then we need to add collation,subtype etc in our output if of_type == 'r': SQL = render_template("/".join( [self.template_path, 'additional_properties.sql']), type='r', tid=tid) status, res = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) range_dict = dict(res['rows'][0]) res.update(range_dict) if 'seclabels' in copy_dict and copy_dict['seclabels'] is not None: sec_labels = [] for sec in copy_dict['seclabels']: sec = re.search(r'([^=]+)=(.*$)', sec) sec_labels.append({ 'provider': sec.group(1), 'label': sec.group(2) }) res['seclabels'] = sec_labels # Returning only additional properties only return res
def get_dependents(self, conn, sid, rid): """ This function is used to fetch the dependents for the selected node. Args: conn: Connection object sid: Server Id rid: Role Id. Returns: Dictionary of dependents for the selected node. """ # Dictionary for the object types types = { # None specified special handling for this type 'r': 'table', 'i': None, 'S': 'sequence', 'v': 'view', 'x': 'external_table', 'p': 'function', 'n': 'schema', 'y': 'type', 'd': 'domain', 'T': 'trigger_function', 'C': 'conversion', 'o': None } query = render_template("/".join([self.sql_path, 'dependents.sql']), fetch_database=True, rid=rid) status, db_result = self.conn.execute_dict(query) if not status: current_app.logger.error(db_result) dependents = list() # Get the server manager manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid) for db_row in db_result['rows']: oid = db_row['datdba'] if db_row['type'] == 'd': if rid == oid: dependents.append({ 'type': 'database', 'name': '', 'field': db_row['datname'] }) else: dependents.append({ 'type': 'tablespace', 'name': db_row['datname'], 'field': '' }) # If connection to the database is not allowed then continue # with the next database if not db_row['datallowconn']: continue # Get the connection from the manager for the specified database. # Check the connect status and if it is not connected then create # a new connection to run the query and fetch the dependents. is_connected = True try: temp_conn = manager.connection(db_row['datname']) is_connected = temp_conn.connected() if not is_connected: temp_conn.connect() except Exception as e: current_app.logger.exception(e) if temp_conn.connected(): query = render_template("/".join( [self.sql_path, 'dependents.sql']), fetch_dependents=True, rid=rid, lastsysoid=db_row['datlastsysoid']) status, result = temp_conn.execute_dict(query) if not status: current_app.logger.error(result) for row in result['rows']: rel_name = row['nspname'] if rel_name is not None: rel_name += '.' if rel_name is None: rel_name = row['relname'] else: rel_name += row['relname'] type_name = '' type_str = row['relkind'] # Fetch the type name from the dictionary # if type is not present in the types dictionary then # we will continue and not going to add it. if type_str[0] in types: # if type is present in the types dictionary, but it's # value is None then it requires special handling. if types[type_str[0]] is None: if type_str[0] == 'i': type_name = 'index' rel_name = row['indname'] + ' ON ' + rel_name elif type_str[0] == 'o': type_name = 'operator' rel_name = row['relname'] else: type_name = types[type_str[0]] else: continue dependents.append({ 'type': type_name, 'name': rel_name, 'field': db_row['datname'] }) # Release only those connections which we have created above. if not is_connected: manager.release(db_row['datname']) return dependents
def create_backup_job(sid): """ Args: sid: Server ID Creates a new job for backup task (Backup Server/Globals) Returns: None """ if request.form: # Convert ImmutableDict to dict data = dict(request.form) data = json.loads(data['data'][0], encoding='utf-8') else: data = json.loads(request.data, encoding='utf-8') try: backup_file = filename_with_file_manager_path(data['file']) except Exception as e: return bad_request(errormsg=str(e)) # Fetch the server details like hostname, port, roles etc server = Server.query.filter_by(id=sid, user_id=current_user.id).first() if server is None: return make_json_response( success=0, errormsg=_("Could not find the specified server.")) # To fetch MetaData for the server from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) conn = manager.connection() connected = conn.connected() if not connected: return make_json_response( success=0, errormsg=_("Please connect to the server first.")) utility = manager.utility('backup_server') args = [ '--file', backup_file, '--host', server.host, '--port', str(server.port), '--username', server.username, '--no-password', '--database', server.maintenance_db ] if 'role' in data and data['role']: args.append('--role') args.append(data['role']) if 'verbose' in data and data['verbose']: args.append('--verbose') if 'dqoute' in data and data['dqoute']: args.append('--quote-all-identifiers') if data['type'] == 'global': args.append('--globals-only') try: p = BatchProcess(desc=BackupMessage( BACKUP.SERVER if data['type'] != 'global' else BACKUP.GLOBALS, sid, data['file'].encode('utf-8') if hasattr( data['file'], 'encode') else data['file'], *args), cmd=utility, args=args) manager.export_password_env(p.id) p.set_env_variables(server) p.start() jid = p.id except Exception as e: current_app.logger.exception(e) return make_json_response(status=410, success=0, errormsg=str(e)) # Return response return make_json_response(data={'job_id': jid, 'success': 1})
def change_password(self, gid, sid): """ This function is used to change the password of the Database Server. Args: gid: Group id sid: Server id """ try: data = json.loads(request.form['data'], encoding='utf-8') # Fetch Server Details server = Server.query.filter_by(id=sid).first() if server is None: return bad_request(gettext("Server not found.")) # Fetch User Details. user = User.query.filter_by(id=current_user.id).first() if user is None: return unauthorized(gettext("Unauthorized request.")) manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid) conn = manager.connection() is_passfile = False # If there is no password found for the server # then check for pgpass file if not server.password and not manager.password: if server.passfile and \ manager.passfile and \ server.passfile == manager.passfile: is_passfile = True # Check for password only if there is no pgpass file used if not is_passfile: if data and ('password' not in data or data['password'] == ''): return make_json_response( status=400, success=0, errormsg=gettext( "Could not find the required parameter(s)." ) ) if data and ('newPassword' not in data or data['newPassword'] == '' or 'confirmPassword' not in data or data['confirmPassword'] == ''): return make_json_response( status=400, success=0, errormsg=gettext( "Could not find the required parameter(s)." ) ) if data['newPassword'] != data['confirmPassword']: return make_json_response( status=200, success=0, errormsg=gettext( "Passwords do not match." ) ) # Check against old password only if no pgpass file if not is_passfile: decrypted_password = decrypt(manager.password, user.password) if isinstance(decrypted_password, bytes): decrypted_password = decrypted_password.decode() password = data['password'] # Validate old password before setting new. if password != decrypted_password: return unauthorized(gettext("Incorrect password.")) # Hash new password before saving it. password = pqencryptpassword(data['newPassword'], manager.user) SQL = render_template( "/servers/sql/#{0}#/change_password.sql".format( manager.version), conn=conn, _=gettext, user=manager.user, encrypted_password=password) status, res = conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=res) # Store password in sqlite only if no pgpass file if not is_passfile: password = encrypt(data['newPassword'], user.password) # Check if old password was stored in pgadmin4 sqlite database. # If yes then update that password. if server.password is not None and config.ALLOW_SAVE_PASSWORD: setattr(server, 'password', password) db.session.commit() # Also update password in connection manager. manager.password = password manager.update_session() return make_json_response( status=200, success=1, info=gettext( "Password changed successfully." ) ) except Exception as e: return internal_server_error(errormsg=str(e))
def save(self, changed_data, columns_info, client_primary_key='__temp_PK', default_conn=None): """ This function is used to save the data into the database. Depending on condition it will either update or insert the new row into the database. Args: changed_data: Contains data to be saved columns_info: default_conn: client_primary_key: """ driver = get_driver(PG_DEFAULT_DRIVER) if default_conn is None: manager = driver.connection_manager(self.sid) conn = manager.connection(did=self.did, conn_id=self.conn_id) else: conn = default_conn status = False res = None query_res = dict() count = 0 list_of_rowid = [] operations = ('added', 'updated', 'deleted') list_of_sql = {} _rowid = None if conn.connected(): # Start the transaction conn.execute_void('BEGIN;') # Iterate total number of records to be updated/inserted for of_type in changed_data: # No need to go further if its not add/update/delete operation if of_type not in operations: continue # if no data to be save then continue if len(changed_data[of_type]) < 1: continue column_type = {} column_data = {} for each_col in columns_info: if (columns_info[each_col]['not_null'] and not columns_info[each_col]['has_default_val']): column_data[each_col] = None column_type[each_col] =\ columns_info[each_col]['type_name'] else: column_type[each_col] = \ columns_info[each_col]['type_name'] # For newly added rows if of_type == 'added': # Python dict does not honour the inserted item order # So to insert data in the order, we need to make ordered # list of added index We don't need this mechanism in # updated/deleted rows as it does not matter in # those operations added_index = OrderedDict( sorted(changed_data['added_index'].items(), key=lambda x: int(x[0]))) list_of_sql[of_type] = [] # When new rows are added, only changed columns data is # sent from client side. But if column is not_null and has # no_default_value, set column to blank, instead # of not null which is set by default. column_data = {} pk_names, primary_keys = self.get_primary_keys() has_oids = 'oid' in column_type for each_row in added_index: # Get the row index to match with the added rows # dict key tmp_row_index = added_index[each_row] data = changed_data[of_type][tmp_row_index]['data'] # Remove our unique tracking key data.pop(client_primary_key, None) data.pop('is_row_copied', None) list_of_rowid.append(data.get(client_primary_key)) # Update columns value with columns having # not_null=False and has no default value column_data.update(data) sql = render_template("/".join( [self.sql_path, 'insert.sql']), data_to_be_saved=column_data, primary_keys=None, object_name=self.object_name, nsp_name=self.nsp_name, data_type=column_type, pk_names=pk_names, has_oids=has_oids) select_sql = render_template( "/".join([self.sql_path, 'select.sql']), object_name=self.object_name, nsp_name=self.nsp_name, primary_keys=primary_keys, has_oids=has_oids) list_of_sql[of_type].append({ 'sql': sql, 'data': data, 'client_row': tmp_row_index, 'select_sql': select_sql }) # Reset column data column_data = {} # For updated rows elif of_type == 'updated': list_of_sql[of_type] = [] for each_row in changed_data[of_type]: data = changed_data[of_type][each_row]['data'] pk = changed_data[of_type][each_row]['primary_keys'] sql = render_template("/".join( [self.sql_path, 'update.sql']), data_to_be_saved=data, primary_keys=pk, object_name=self.object_name, nsp_name=self.nsp_name, data_type=column_type) list_of_sql[of_type].append({'sql': sql, 'data': data}) list_of_rowid.append(data.get(client_primary_key)) # For deleted rows elif of_type == 'deleted': list_of_sql[of_type] = [] is_first = True rows_to_delete = [] keys = None no_of_keys = None for each_row in changed_data[of_type]: rows_to_delete.append(changed_data[of_type][each_row]) # Fetch the keys for SQL generation if is_first: # We need to covert dict_keys to normal list in # Python3 # In Python2, it's already a list & We will also # fetch column names using index keys = list(changed_data[of_type][each_row].keys()) no_of_keys = len(keys) is_first = False # Map index with column name for each row for row in rows_to_delete: for k, v in row.items(): # Set primary key with label & delete index based # mapped key try: row[changed_data['columns'][int(k)] ['name']] = v except ValueError: continue del row[k] sql = render_template("/".join( [self.sql_path, 'delete.sql']), data=rows_to_delete, primary_key_labels=keys, no_of_keys=no_of_keys, object_name=self.object_name, nsp_name=self.nsp_name) list_of_sql[of_type].append({'sql': sql, 'data': {}}) for opr, sqls in list_of_sql.items(): for item in sqls: if item['sql']: row_added = None # Fetch oids/primary keys if 'select_sql' in item and item['select_sql']: status, res = conn.execute_dict( item['sql'], item['data']) else: status, res = conn.execute_void( item['sql'], item['data']) if not status: conn.execute_void('ROLLBACK;') # If we roll backed every thing then update the # message for each sql query. for val in query_res: if query_res[val]['status']: query_res[val]['result'] = \ 'Transaction ROLLBACK' # If list is empty set rowid to 1 try: if list_of_rowid: _rowid = list_of_rowid[count] else: _rowid = 1 except Exception: _rowid = 0 return status, res, query_res, _rowid # Select added row from the table if 'select_sql' in item: status, sel_res = conn.execute_dict( item['select_sql'], res['rows'][0]) if not status: conn.execute_void('ROLLBACK;') # If we roll backed every thing then update # the message for each sql query. for val in query_res: if query_res[val]['status']: query_res[val]['result'] = \ 'Transaction ROLLBACK' # If list is empty set rowid to 1 try: if list_of_rowid: _rowid = list_of_rowid[count] else: _rowid = 1 except Exception: _rowid = 0 return status, sel_res, query_res, _rowid if 'rows' in sel_res and len(sel_res['rows']) > 0: row_added = { item['client_row']: sel_res['rows'][0] } rows_affected = conn.rows_affected() # store the result of each query in dictionary query_res[count] = { 'status': status, 'result': None if row_added else res, 'sql': sql, 'rows_affected': rows_affected, 'row_added': row_added } count += 1 # Commit the transaction if there is no error found conn.execute_void('COMMIT;') return status, res, query_res, _rowid
def save(trans_id): """ This method is used to save the data changes to the server Args: trans_id: unique transaction id """ if request.data: changed_data = json.loads(request.data, encoding='utf-8') else: changed_data = request.args or request.form # Check the transaction and connection status status, error_msg, conn, trans_obj, session_obj = \ check_transaction_status(trans_id) if error_msg == gettext('Transaction ID not found in the session.'): return make_json_response(success=0, errormsg=error_msg, info='DATAGRID_TRANSACTION_REQUIRED', status=404) if status and conn is not None and \ trans_obj is not None and session_obj is not None: # If there is no primary key found then return from the function. if ('primary_keys' not in session_obj or len(session_obj['primary_keys']) <= 0 or len(changed_data) <= 0) and \ 'has_oids' not in session_obj: return make_json_response( data={ 'status': False, 'result': gettext('No primary key found for this object, ' 'so unable to save records.') } ) manager = get_driver( PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid) if hasattr(trans_obj, 'conn_id'): conn = manager.connection(did=trans_obj.did, conn_id=trans_obj.conn_id) else: conn = manager.connection(did=trans_obj.did) # default connection # Connect to the Server if not connected. if not conn.connected(): status, msg = conn.connect() if not status: return make_json_response( data={'status': status, 'result': u"{}".format(msg)} ) status, res, query_results, _rowid = trans_obj.save( changed_data, session_obj['columns_info'], session_obj['client_primary_key'], conn) else: status = False res = error_msg query_results = None _rowid = None transaction_status = conn.transaction_status() return make_json_response( data={ 'status': status, 'result': res, 'query_results': query_results, '_rowid': _rowid, 'transaction_status': transaction_status } )
def get_all_columns_with_order(self, default_conn=None): """ It is overridden method specially for Table because we all have to fetch primary keys and rest of the columns both. Args: default_conn: Connection object Returns: all_sorted_columns: Sorted columns for the Grid all_columns: List of columns for the select2 options """ driver = get_driver(PG_DEFAULT_DRIVER) if default_conn is None: manager = driver.connection_manager(self.sid) conn = manager.connection(did=self.did, conn_id=self.conn_id) else: conn = default_conn all_sorted_columns = [] data_sorting = self.get_data_sorting() all_columns = [] # Fetch the primary key column names query = render_template( "/".join([self.sql_path, 'primary_keys.sql']), obj_id=self.obj_id ) status, result = conn.execute_dict(query) if not status: raise Exception(result) for row in result['rows']: all_columns.append(row['attname']) all_sorted_columns.append( { 'name': row['attname'], 'order': self.get_pk_order() } ) # Fetch the rest of the column names query = render_template( "/".join([self.sql_path, 'get_columns.sql']), obj_id=self.obj_id ) status, result = conn.execute_dict(query) if not status: raise Exception(result) for row in result['rows']: # Only append if not already present in the list if row['attname'] not in all_columns: all_columns.append(row['attname']) # If user has custom data sorting then pass as it as it is if data_sorting and len(data_sorting) > 0: all_sorted_columns = data_sorting return all_sorted_columns, all_columns
def initialize_datagrid(cmd_type, obj_type, sgid, sid, did, obj_id): """ This method is responsible for creating an asynchronous connection. After creating the connection it will instantiate and initialize the object as per the object type. It will also create a unique transaction id and store the information into session variable. Args: cmd_type: Contains value for which menu item is clicked. obj_type: Contains type of selected object for which data grid to be render sgid: Server group Id sid: Server Id did: Database Id obj_id: Id of currently selected object """ if request.data: filter_sql = json.loads(request.data, encoding='utf-8') else: filter_sql = request.args or request.form # Create asynchronous connection using random connection id. conn_id = str(random.randint(1, 9999999)) try: manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid) # default_conn is same connection which is created when user connect to # database from tree default_conn = manager.connection(did=did) conn = manager.connection(did=did, conn_id=conn_id, auto_reconnect=False, use_binary_placeholder=True, array_to_string=True) except ConnectionLost as e: raise except Exception as e: app.logger.error(e) return internal_server_error(errormsg=str(e)) status, msg = default_conn.connect() if not status: app.logger.error(msg) return internal_server_error(errormsg=str(msg)) status, msg = conn.connect() if not status: app.logger.error(msg) return internal_server_error(errormsg=str(msg)) try: # if object type is partition then it is nothing but a table. if obj_type == 'partition': obj_type = 'table' # Get the object as per the object type command_obj = ObjectRegistry.get_object(obj_type, conn_id=conn_id, sgid=sgid, sid=sid, did=did, obj_id=obj_id, cmd_type=cmd_type, sql_filter=filter_sql) except Exception as e: app.logger.error(e) return internal_server_error(errormsg=str(e)) # Create a unique id for the transaction trans_id = str(random.randint(1, 9999999)) if 'gridData' not in session: sql_grid_data = dict() else: sql_grid_data = session['gridData'] # Use pickle to store the command object which will be used later by the # sql grid module. sql_grid_data[trans_id] = { # -1 specify the highest protocol version available 'command_obj': pickle.dumps(command_obj, -1) } # Store the grid dictionary into the session variable session['gridData'] = sql_grid_data pref = Preferences.module('sqleditor') new_browser_tab = pref.preference('new_browser_tab').get() return make_json_response(data={ 'gridTransId': trans_id, 'newBrowserTab': new_browser_tab })
def connect(self, gid, sid): """ Connect the Server and return the connection object. Verification Process before Connection: Verify requested server. Check the server password is already been stored in the database or not. If Yes, connect the server and return connection. If No, Raise HTTP error and ask for the password. In case of 'Save Password' request from user, excrypted Pasword will be stored in the respected server database and establish the connection OR just connect the server and do not store the password. """ current_app.logger.info( 'Connection Request for server#{0}'.format(sid) ) # Fetch Server Details server = Server.query.filter_by(id=sid).first() if server is None: return bad_request(gettext("Server not found.")) if current_user and hasattr(current_user, 'id'): # Fetch User Details. user = User.query.filter_by(id=current_user.id).first() if user is None: return unauthorized(gettext("Unauthorized request.")) else: return unauthorized(gettext("Unauthorized request.")) data = request.form if request.form else json.loads( request.data, encoding='utf-8' ) if request.data else {} password = None passfile = None save_password = False # Connect the Server manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid) conn = manager.connection() if 'password' not in data: conn_passwd = getattr(conn, 'password', None) if conn_passwd is None and server.password is None and \ server.passfile is None and server.service is None: # Return the password template in case password is not # provided, or password has not been saved earlier. return make_json_response( success=0, status=428, result=render_template( 'servers/password.html', server_label=server.name, username=server.username, _=gettext ) ) elif server.passfile and server.passfile != '': passfile = server.passfile else: password = conn_passwd or server.password else: password = data['password'] if 'password' in data else None save_password = data['save_password']\ if password and 'save_password' in data else False # Encrypt the password before saving with user's login # password key. try: password = encrypt(password, user.password) \ if password is not None else server.password except Exception as e: current_app.logger.exception(e) return internal_server_error(errormsg=e.message) status = True try: status, errmsg = conn.connect( password=password, passfile=passfile, server_types=ServerType.types() ) except Exception as e: current_app.logger.exception(e) return make_json_response( success=0, status=401, result=render_template( 'servers/password.html', server_label=server.name, username=server.username, errmsg=getattr(e, 'message', str(e)), _=gettext ) ) if not status: if hasattr(str, 'decode'): errmsg = errmsg.decode('utf-8') current_app.logger.error( "Could not connected to server(#{0}) - '{1}'.\nError: {2}" .format(server.id, server.name, errmsg) ) return make_json_response( success=0, status=401, result=render_template( 'servers/password.html', server_label=server.name, username=server.username, errmsg=errmsg, _=gettext ) ) else: if save_password and config.ALLOW_SAVE_PASSWORD: try: # Save the encrypted password using the user's login # password key. setattr(server, 'password', password) db.session.commit() except Exception as e: # Release Connection current_app.logger.exception(e) manager.release(database=server.maintenance_db) conn = None return internal_server_error(errormsg=e.message) current_app.logger.info('Connection Established for server: \ %s - %s' % (server.id, server.name)) # Update the recovery and wal pause option for the server # if connected successfully in_recovery, wal_paused = recovery_state(conn, manager.version) return make_json_response( success=1, info=gettext("Server connected."), data={ 'icon': server_icon_and_background(True, manager, server), 'connected': True, 'server_type': manager.server_type, 'type': manager.server_type, 'version': manager.version, 'db': manager.db, 'user': manager.user_info, 'in_recovery': in_recovery, 'wal_pause': wal_paused } )
def create_restore_job(sid): """ Args: sid: Server ID Creates a new job for restore task Returns: None """ if request.form: data = json.loads(request.form['data'], encoding='utf-8') else: data = json.loads(request.data, encoding='utf-8') try: _file = filename_with_file_manager_path(data['file']) except Exception as e: return bad_request(errormsg=str(e)) if _file is None: return make_json_response(status=410, success=0, errormsg=_("File could not be found.")) # Fetch the server details like hostname, port, roles etc server = Server.query.filter_by(id=sid).first() if server is None: return make_json_response( success=0, errormsg=_("Could not find the specified server.")) # To fetch MetaData for the server from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) conn = manager.connection() connected = conn.connected() if not connected: return make_json_response( success=0, errormsg=_("Please connect to the server first.")) utility = manager.utility('restore') ret_val = is_utility_exists(utility) if ret_val: return make_json_response(success=0, errormsg=ret_val) args = [] if 'list' in data: args.append('--list') else: def set_param(key, param): if key in data and data[key]: args.append(param) return True return False def set_value(key, param, default_value=None): if key in data and data[key] is not None and data[key] != '': args.append(param) args.append(data[key]) elif default_value is not None: args.append(param) args.append(default_value) def set_multiple(key, param, with_schema=True): if key in data: if len(data[key]) > 0: if with_schema: # TODO:// This is temporary # Once object tree is implemented then we will use # list of tuples 'else' part if isinstance(data[key], list): s, t = data[key] args.extend([ param, driver.qtIdent(conn, s) + '.' + driver.qtIdent(conn, t) ]) else: for s, o in data[key]: args.extend([ param, driver.qtIdent(conn, s) + '.' + driver.qtIdent(conn, o) ]) else: for o in data[key]: args.extend([param, o]) return True return False args.extend([ '--host', manager.local_bind_host if manager.use_ssh_tunnel else server.host, '--port', str(manager.local_bind_port) if manager.use_ssh_tunnel else str( server.port), '--username', server.username, '--no-password' ]) set_value('role', '--role') set_value('database', '--dbname') if data['format'] == 'directory': args.extend(['--format=d']) set_param('pre_data', '--section=pre-data') set_param('data', '--section=data') set_param('post_data', '--section=post-data') if not set_param('only_data', '--data-only'): set_param('dns_owner', '--no-owner') set_param('dns_privilege', '--no-privileges') set_param('dns_tablespace', '--no-tablespaces') if not set_param('only_schema', '--schema-only'): set_param('disable_trigger', '--disable-triggers') set_param('include_create_database', '--create') set_param('clean', '--clean') set_param('single_transaction', '--single-transaction') set_param('no_data_fail_table', '--no-data-for-failed-tables') set_param('use_set_session_auth', '--use-set-session-authorization') set_param('exit_on_error', '--exit-on-error') if manager.version >= 110000: set_param('no_comments', '--no-comments') set_value('no_of_jobs', '--jobs') set_param('verbose', '--verbose') set_multiple('schemas', '--schema', False) set_multiple('tables', '--table', False) set_multiple('functions', '--function', False) set_multiple('triggers', '--trigger', False) set_multiple('trigger_funcs', '--function', False) set_multiple('indexes', '--index', False) args.append(fs_short_path(_file)) try: p = BatchProcess(desc=RestoreMessage( sid, data['file'].encode('utf-8') if hasattr( data['file'], 'encode') else data['file'], *args), cmd=utility, args=args) manager.export_password_env(p.id) # Check for connection timeout and if it is greater than 0 then # set the environment variable PGCONNECT_TIMEOUT. if manager.connect_timeout > 0: env = dict() env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout) p.set_env_variables(server, env=env) else: p.set_env_variables(server) p.start() jid = p.id except Exception as e: current_app.logger.exception(e) return make_json_response(status=410, success=0, errormsg=str(e)) # Return response return make_json_response(data={'job_id': jid, 'Success': 1})
def create_import_export_job(sid): """ Args: sid: Server ID Creates a new job for import and export table data functionality Returns: None """ if request.form: data = json.loads(request.form['data'], encoding='utf-8') else: data = json.loads(request.data, encoding='utf-8') # Fetch the server details like hostname, port, roles etc server = Server.query.filter_by( id=sid).first() if server is None: return bad_request(errormsg=_("Could not find the given server")) # To fetch MetaData for the server from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) conn = manager.connection() connected = conn.connected() if not connected: return bad_request(errormsg=_("Please connect to the server first...")) # Get the utility path from the connection manager utility = manager.utility('sql') ret_val = does_utility_exist(utility) if ret_val: return make_json_response( success=0, errormsg=ret_val ) # Get the storage path from preference storage_dir = get_storage_directory() if 'filename' in data: try: _file = filename_with_file_manager_path( data['filename'], data['is_import']) except Exception as e: return bad_request(errormsg=str(e)) if not _file: return bad_request(errormsg=_('Please specify a valid file')) elif IS_WIN: _file = _file.replace('\\', '/') data['filename'] = _file else: return bad_request(errormsg=_('Please specify a valid file')) # Get required and ignored column list icols = _get_ignored_column_list(data, driver, conn) cols = _get_required_column_list(data, driver, conn) # Create the COPY FROM/TO from template query = render_template( 'import_export/sql/cmd.sql', conn=conn, data=data, columns=cols, ignore_column_list=icols ) args = ['--command', query] try: io_params = { 'sid': sid, 'schema': data['schema'], 'table': data['table'], 'database': data['database'], 'is_import': data['is_import'], 'filename': data['filename'], 'storage': storage_dir, 'utility': utility } p = BatchProcess( desc=IEMessage( *args, **io_params ), cmd=utility, args=args ) manager.export_password_env(p.id) env = dict() env['PGHOST'] = \ manager.local_bind_host if manager.use_ssh_tunnel else server.host env['PGPORT'] = \ str(manager.local_bind_port) if manager.use_ssh_tunnel else str( server.port) env['PGUSER'] = server.username env['PGDATABASE'] = data['database'] p.set_env_variables(server, env=env) p.start() jid = p.id except Exception as e: current_app.logger.exception(e) return bad_request(errormsg=str(e)) # Return response return make_json_response( data={'job_id': jid, 'success': 1} )
def msql(sid, did): """ This function will return modified SQL """ server_prop = server_info data = request.form if request.form else json.loads(request.data.decode()) # Form db connection manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid) conn = manager.connection(did=did) acls = [] try: acls = render_template("/".join( [server_prop['template_path'], '/acl.json'])) acls = json.loads(acls) except Exception as e: current_app.logger.exception(e) try: # Parse privileges data['priv'] = {} if 'acl' in data: # Get function acls data['priv']['function'] = parse_priv_to_db( data['acl'], acls['function']['acl']) data['priv']['sequence'] = parse_priv_to_db( data['acl'], acls['sequence']['acl']) data['priv']['table'] = parse_priv_to_db(data['acl'], acls['table']['acl']) # Pass database objects and get SQL for privileges sql_data = '' data_func = { 'objects': data['objects'], 'priv': data['priv']['function'] } sql = render_template("/".join( [server_prop['template_path'], '/sql/grant_function.sql']), data=data_func, conn=conn) if sql and sql.strip('\n') != '': sql_data += sql data_seq = { 'objects': data['objects'], 'priv': data['priv']['sequence'] } sql = render_template("/".join( [server_prop['template_path'], '/sql/grant_sequence.sql']), data=data_seq, conn=conn) if sql and sql.strip('\n') != '': sql_data += sql data_table = { 'objects': data['objects'], 'priv': data['priv']['table'] } sql = render_template("/".join( [server_prop['template_path'], '/sql/grant_table.sql']), data=data_table, conn=conn) if sql and sql.strip('\n') != '': sql_data += sql res = {'data': sql_data} return ajax_response(response=res, status=200) except Exception as e: return make_json_response(status=410, success=0, errormsg=e.message)
def get_dependents(self, conn, sid, rid): """ This function is used to fetch the dependents for the selected node. Args: conn: Connection object sid: Server Id rid: Role Id. Returns: Dictionary of dependents for the selected node. """ # Dictionary for the object types types = { # None specified special handling for this type 'r': 'table', 'i': None, 'S': 'sequence', 'v': 'view', 'x': 'external_table', 'p': 'function', 'n': 'schema', 'y': 'type', 'd': 'domain', 'T': 'trigger_function', 'C': 'conversion', 'o': None } query = render_template("/".join([self.sql_path, 'dependents.sql']), fetch_database=True, rid=rid) status, db_result = self.conn.execute_dict(query) if not status: current_app.logger.error(db_result) dependents = list() # Get the server manager manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid) for db_row in db_result['rows']: oid = db_row['datdba'] if db_row['type'] == 'd': if rid == oid: dependents.append({ 'type': 'database', 'name': '', 'field': db_row['datname'] }) else: dependents.append({ 'type': 'tablespace', 'name': db_row['datname'], 'field': '' }) # If connection to the database is not allowed then continue # with the next database if not db_row['datallowconn']: continue # Get the connection from the manager for the specified database. # Check the connect status and if it is not connected then create # a new connection to run the query and fetch the dependents. is_connected = True try: temp_conn = manager.connection(db_row['datname']) is_connected = temp_conn.connected() if not is_connected: temp_conn.connect() except Exception as e: current_app.logger.exception(e) self._temp_connection_check(rid, temp_conn, db_row, types, dependents) RoleView._release_connection(is_connected, manager, db_row) return dependents
def initialize_query_tool(trans_id, sgid, sid, did=None): """ This method is responsible for instantiating and initializing the query tool object. It will also create a unique transaction id and store the information into session variable. Args: sgid: Server group Id sid: Server Id did: Database Id """ connect = True # Read the data if present. Skipping read may cause connection # reset error if data is sent from the client if request.data: _ = request.data reqArgs = request.args if ('recreate' in reqArgs and reqArgs['recreate'] == '1'): connect = False # Create asynchronous connection using random connection id. conn_id = str(random.randint(1, 9999999)) # Use Maintenance database OID manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid) if did is None: did = manager.did try: command_obj = ObjectRegistry.get_object('query_tool', conn_id=conn_id, sgid=sgid, sid=sid, did=did) except Exception as e: app.logger.error(e) return internal_server_error(errormsg=str(e)) try: conn = manager.connection(did=did, conn_id=conn_id, auto_reconnect=False, use_binary_placeholder=True, array_to_string=True) if connect: status, msg = conn.connect() if not status: app.logger.error(msg) return internal_server_error(errormsg=str(msg)) except (ConnectionLost, SSHTunnelConnectionLost) as e: app.logger.error(e) raise except Exception as e: app.logger.error(e) return internal_server_error(errormsg=str(e)) if 'gridData' not in session: sql_grid_data = dict() else: sql_grid_data = session['gridData'] # Set the value of auto commit and auto rollback specified in Preferences pref = Preferences.module('sqleditor') command_obj.set_auto_commit(pref.preference('auto_commit').get()) command_obj.set_auto_rollback(pref.preference('auto_rollback').get()) # Use pickle to store the command object which will be used # later by the sql grid module. sql_grid_data[str(trans_id)] = { # -1 specify the highest protocol version available 'command_obj': pickle.dumps(command_obj, -1) } # Store the grid dictionary into the session variable session['gridData'] = sql_grid_data return make_json_response(data={ 'connId': str(conn_id), 'serverVersion': manager.version, })
def update(self, gid, sid): """Update the server settings""" server = Server.query.filter_by( user_id=current_user.id, id=sid).first() if server is None: return make_json_response( status=410, success=0, errormsg=gettext("Could not find the required server.") ) # Not all parameters can be modified, while the server is connected config_param_map = { 'name': 'name', 'host': 'host', 'hostaddr': 'hostaddr', 'port': 'port', 'db': 'maintenance_db', 'username': '******', 'sslmode': 'ssl_mode', 'gid': 'servergroup_id', 'comment': 'comment', 'role': 'role', 'db_res': 'db_res', 'passfile': 'passfile', 'sslcert': 'sslcert', 'sslkey': 'sslkey', 'sslrootcert': 'sslrootcert', 'sslcrl': 'sslcrl', 'sslcompression': 'sslcompression', 'bgcolor': 'bgcolor', 'fgcolor': 'fgcolor', 'service': 'service' } disp_lbl = { 'name': gettext('name'), 'host': gettext('Host name/address'), 'port': gettext('Port'), 'db': gettext('Maintenance database'), 'username': gettext('Username'), 'sslmode': gettext('SSL Mode'), 'comment': gettext('Comments'), 'role': gettext('Role') } idx = 0 data = request.form if request.form else json.loads( request.data, encoding='utf-8' ) if 'db_res' in data: data['db_res'] = ','.join(data['db_res']) if 'hostaddr' in data and data['hostaddr'] and data['hostaddr'] != '': if not self.pat4.match(data['hostaddr']): if not self.pat6.match(data['hostaddr']): return make_json_response( success=0, status=400, errormsg=gettext('Host address not valid') ) manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid) conn = manager.connection() connected = conn.connected() if connected: for arg in ( 'host', 'hostaddr', 'port', 'db', 'username', 'sslmode', 'role', 'service' ): if arg in data: return forbidden( errormsg=gettext( "'{0}' is not allowed to modify, " "when server is connected." ).format(disp_lbl[arg]) ) for arg in config_param_map: if arg in data: value = data[arg] # sqlite3 do not have boolean type so we need to convert # it manually to integer if arg == 'sslcompression': value = 1 if value else 0 setattr(server, config_param_map[arg], value) idx += 1 if idx == 0: return make_json_response( success=0, errormsg=gettext('No parameters were changed.') ) try: db.session.commit() except Exception as e: current_app.logger.exception(e) return make_json_response( success=0, errormsg=e.message ) # When server is connected, we don't require to update the connection # manager. Because - we don't allow to change any of the parameters, # which will affect the connections. if not conn.connected(): manager.update(server) return jsonify( node=self.blueprint.generate_browser_node( "%d" % (server.id), server.servergroup_id, server.name, server_icon_and_background(connected, manager, server), True, self.node_type, connected=False, server_type='pg' # default server type ) )
def create(self, gid): """Add a server node to the settings database""" required_args = [ u'name', u'port', u'sslmode', u'username' ] data = request.form if request.form else json.loads( request.data, encoding='utf-8' ) # Some fields can be provided with service file so they are optional if 'service' in data and not data['service']: required_args.extend([ u'host', u'db', u'role' ]) for arg in required_args: if arg not in data: return make_json_response( status=410, success=0, errormsg=gettext( "Could not find the required parameter (%s)." % arg ) ) if 'hostaddr' in data and data['hostaddr'] and data['hostaddr'] != '': if not self.pat4.match(data['hostaddr']): if not self.pat6.match(data['hostaddr']): return make_json_response( success=0, status=400, errormsg=gettext('Host address not valid') ) # To check ssl configuration is_ssl, data = self.check_ssl_fields(data) server = None try: server = Server( user_id=current_user.id, servergroup_id=data.get('gid', gid), name=data.get('name'), host=data.get('host', None), hostaddr=data.get('hostaddr', None), port=data.get('port'), maintenance_db=data.get('db', None), username=data.get('username'), ssl_mode=data.get('sslmode'), comment=data.get('comment', None), role=data.get('role', None), db_res=','.join(data[u'db_res']) if u'db_res' in data else None, sslcert=data.get('sslcert', None), sslkey=data.get('sslkey', None), sslrootcert=data.get('sslrootcert', None), sslcrl=data.get('sslcrl', None), sslcompression=1 if is_ssl and data['sslcompression'] else 0, bgcolor=data.get('bgcolor', None), fgcolor=data.get('fgcolor', None), service=data.get('service', None) ) db.session.add(server) db.session.commit() connected = False user = None manager = None if 'connect_now' in data and data['connect_now']: manager = get_driver(PG_DEFAULT_DRIVER).connection_manager( server.id) manager.update(server) conn = manager.connection() have_password = False password = None passfile = None if 'password' in data and data["password"] != '': # login with password have_password = True password = data['password'] password = encrypt(password, current_user.password) elif 'passfile' in data and data["passfile"] != '': passfile = data['passfile'] setattr(server, 'passfile', passfile) db.session.commit() status, errmsg = conn.connect( password=password, passfile=passfile, server_types=ServerType.types() ) if hasattr(str, 'decode') and errmsg is not None: errmsg = errmsg.decode('utf-8') if not status: db.session.delete(server) db.session.commit() return make_json_response( status=401, success=0, errormsg=gettext( u"Unable to connect to server:\n\n%s" % errmsg) ) else: if 'save_password' in data and data['save_password'] and \ have_password and config.ALLOW_SAVE_PASSWORD: setattr(server, 'password', password) db.session.commit() user = manager.user_info connected = True return jsonify( node=self.blueprint.generate_browser_node( "%d" % server.id, server.servergroup_id, server.name, server_icon_and_background(connected, manager, server), True, self.node_type, user=user, connected=connected, server_type=manager.server_type if manager and manager.server_type else 'pg' ) ) except Exception as e: if server: db.session.delete(server) db.session.commit() current_app.logger.exception(e) return make_json_response( status=410, success=0, errormsg=str(e) )
def create_backup_objects_job(sid): """ Args: sid: Server ID Creates a new job for backup task (Backup Database(s)/Schema(s)/Table(s)) Returns: None """ if request.form: # Convert ImmutableDict to dict data = dict(request.form) data = json.loads(data['data'][0], encoding='utf-8') else: data = json.loads(request.data, encoding='utf-8') # Remove ratio from data in case of empty string if 'ratio' in data and data['ratio'] == '': data.pop("ratio") try: backup_file = filename_with_file_manager_path(data['file']) except Exception as e: return bad_request(errormsg=str(e)) # Fetch the server details like hostname, port, roles etc server = Server.query.filter_by(id=sid, user_id=current_user.id).first() if server is None: return make_json_response( success=0, errormsg=_("Could not find the specified server.")) # To fetch MetaData for the server from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) conn = manager.connection() connected = conn.connected() if not connected: return make_json_response( success=0, errormsg=_("Please connect to the server first.")) utility = manager.utility('backup') args = [ '--file', backup_file, '--host', server.host, '--port', str(server.port), '--username', server.username, '--no-password' ] def set_param(key, param): if key in data and data[key]: args.append(param) def set_value(key, param, value): if key in data: if value: if value is True and data[key]: args.append(param) args.append(data[key]) else: args.append(param) args.append(value) set_param('verbose', '--verbose') set_param('dqoute', '--quote-all-identifiers') set_value('role', '--role', True) if data['format'] is not None: if data['format'] == 'custom': args.extend(['--format=c']) set_param('blobs', '--blobs') set_value('ratio', '--compress', True) elif data['format'] == 'tar': args.extend(['--format=t']) set_param('blobs', '--blobs') elif data['format'] == 'plain': args.extend(['--format=p']) if 'only_data' in data and data['only_data']: args.append('--data-only') set_param('disable_trigger', '--disable-triggers') else: set_param('only_schema', '--schema-only') set_param('dns_owner', '--no-owner') set_param('include_create_database', '--create') set_param('include_drop_database', '--clean') elif data['format'] == 'directory': args.extend(['--format=d']) set_param('pre_data', '--section=pre-data') set_param('data', '--section=data') set_param('post_data', '--section=post-data') set_param('dns_privilege', '--no-privileges') set_param('dns_tablespace', '--no-tablespaces') set_param('dns_unlogged_tbl_data', '--no-unlogged-table-data') set_param('use_insert_commands', '--inserts') set_param('use_column_inserts', '--column-inserts') set_param('disable_quoting', '--disable-dollar-quoting') set_param('with_oids', '--oids') set_param('use_set_session_auth', '--use-set-session-authorization') set_value('encoding', '--encoding', True) set_value('no_of_jobs', '--jobs', True) for s in data['schemas']: args.extend(['--schema', s]) for s, t in data['tables']: args.extend(['--table', driver.qtIdent(conn, s, t)]) args.append(data['database']) try: p = BatchProcess( desc=BackupMessage(BACKUP.OBJECT, sid, data['file'].encode('utf-8') if hasattr( data['file'], 'encode') else data['file'], *args, database=data['database']), cmd=utility, args=args) manager.export_password_env(p.id) p.set_env_variables(server) p.start() jid = p.id except Exception as e: current_app.logger.exception(e) return make_json_response(status=410, success=0, errormsg=str(e)) # Return response return make_json_response(data={'job_id': jid, 'Success': 1})
def clear_current_user_connections(app, user): from config import PG_DEFAULT_DRIVER from pgadmin.utils.driver import get_driver _driver = get_driver(PG_DEFAULT_DRIVER) _driver.gc_own()
def wrap(*args, **kwargs): # Here args[0] will hold self & kwargs will hold gid,sid,did g.manager = get_driver( PG_DEFAULT_DRIVER).connection_manager( kwargs['sid'] ) stats_type = ('activity', 'prepared', 'locks', 'config') # Below check handle the case where existing server is deleted # by user and python server will raise exception if this check # is not introduce. if g.manager is None: if f.__name__ in stats_type: return precondition_required( gettext("Please connect to the selected server" " to view the table.") ) else: return precondition_required( gettext("Please connect to the selected server" " to view the graph.") ) g.conn = g.manager.connection() # If DB not connected then return error to browser if not g.conn.connected(): if f.__name__ in stats_type: return precondition_required( gettext("Please connect to the selected server" " to view the table.") ) else: return precondition_required( gettext("Please connect to the selected server" " to view the graph.") ) if 'did' in kwargs: db_conn = g.manager.connection(did=kwargs['did']) # If the selected DB not connected then return error to browser if not db_conn.connected(): if f.__name__ in stats_type: return precondition_required( gettext("Please connect to the selected database" " to view the table.") ) else: return precondition_required( gettext("Please connect to the selected database to" " view the graph.") ) # Set template path for sql scripts g.server_type = g.manager.server_type g.version = g.manager.version # Include server_type in template_path when server_type is gpdb g.template_path = 'dashboard/sql/' + ( '#{0}#{1}#'.format(g.server_type, g.version) if g.server_type == 'gpdb' else '#{0}#'.format(g.version) ) return f(*args, **kwargs)
def get_dependents(self, conn, sid, tsid): """ This function is used to fetch the dependents for the selected node. Args: conn: Connection object sid: Server Id tsid: Tablespace ID Returns: Dictionary of dependents for the selected node. """ # Dictionary for the object types types = { # None specified special handling for this type 'r': 'table', 'i': None, 'S': 'sequence', 'v': 'view', 'x': 'external_table', 'p': 'function', 'n': 'schema', 'y': 'type', 'd': 'domain', 'T': 'trigger_function', 'C': 'conversion', 'o': None } # Fetching databases with CONNECT privileges status. query = render_template("/".join( [self.template_path, 'dependents.sql']), fetch_database=True) status, db_result = self.conn.execute_dict(query) if not status: current_app.logger.error(db_result) dependents = list() # Get the server manager manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid) for db_row in db_result['rows']: oid = db_row['dattablespace'] # Append all the databases to the dependents list if oid is same if tsid == oid: dependents.append({ 'type': 'database', 'name': '', 'field': db_row['datname'] }) # If connection to the database is not allowed then continue # with the next database if not db_row['datallowconn']: continue # Get the connection from the manager for the specified database. # Check the connect status and if it is not connected then create # a new connection to run the query and fetch the dependents. is_connected = True try: temp_conn = manager.connection(db_row['datname']) is_connected = temp_conn.connected() if not is_connected: temp_conn.connect() except Exception as e: current_app.logger.exception(e) if temp_conn.connected(): query = render_template("/".join( [self.template_path, 'dependents.sql']), fetch_dependents=True, tsid=tsid) status, result = temp_conn.execute_dict(query) if not status: current_app.logger.error(result) self._create_dependents_data(types, result, dependents, db_row, is_connected, manager) return dependents
def create_backup_objects_job(sid): """ Args: sid: Server ID Creates a new job for backup task (Backup Database(s)/Schema(s)/Table(s)) Returns: None """ if request.form: # Convert ImmutableDict to dict data = dict(request.form) data = json.loads(data['data'][0], encoding='utf-8') else: data = json.loads(request.data, encoding='utf-8') backup_obj_type = 'objects' if 'type' in data: backup_obj_type = data['type'] try: if 'format' in data and data['format'] == 'directory': backup_file = filename_with_file_manager_path(data['file'], False) else: backup_file = filename_with_file_manager_path(data['file']) except Exception as e: return bad_request(errormsg=str(e)) # Fetch the server details like hostname, port, roles etc server = Server.query.filter_by(id=sid, user_id=current_user.id).first() if server is None: return make_json_response( success=0, errormsg=_("Could not find the specified server.")) # To fetch MetaData for the server from pgadmin.utils.driver import get_driver driver = get_driver(PG_DEFAULT_DRIVER) manager = driver.connection_manager(server.id) conn = manager.connection() connected = conn.connected() if not connected: return make_json_response( success=0, errormsg=_("Please connect to the server first.")) utility = manager.utility('backup') if backup_obj_type == 'objects' \ else manager.utility('backup_server') ret_val = is_utility_exists(utility) if ret_val: return make_json_response(success=0, errormsg=ret_val) args = [ '--file', backup_file, '--host', manager.local_bind_host if manager.use_ssh_tunnel else server.host, '--port', str(manager.local_bind_port) if manager.use_ssh_tunnel else str( server.port), '--username', server.username, '--no-password' ] if backup_obj_type != 'objects': args.append('--database') args.append(server.maintenance_db) if backup_obj_type == 'globals': args.append('--globals-only') def set_param(key, param): if key in data and data[key]: args.append(param) def set_value(key, param, default_value=None): if key in data and data[key] is not None and data[key] != '': args.append(param) args.append(data[key]) elif default_value is not None: args.append(param) args.append(default_value) set_param('verbose', '--verbose') set_param('dqoute', '--quote-all-identifiers') set_value('role', '--role') if backup_obj_type == 'objects' and \ 'format' in data and data['format'] is not None: if data['format'] == 'custom': args.extend(['--format=c']) set_param('blobs', '--blobs') set_value('ratio', '--compress') elif data['format'] == 'tar': args.extend(['--format=t']) set_param('blobs', '--blobs') elif data['format'] == 'plain': args.extend(['--format=p']) elif data['format'] == 'directory': args.extend(['--format=d']) if 'only_data' in data and data['only_data']: set_param('only_data', '--data-only') if 'format' in data and data['format'] == 'plain': set_param('disable_trigger', '--disable-triggers') elif 'only_schema' in data and data['only_schema']: set_param('only_schema', '--schema-only') set_param('dns_owner', '--no-owner') set_param('include_create_database', '--create') set_param('include_drop_database', '--clean') set_param('pre_data', '--section=pre-data') set_param('data', '--section=data') set_param('post_data', '--section=post-data') set_param('dns_privilege', '--no-privileges') set_param('dns_tablespace', '--no-tablespaces') set_param('dns_unlogged_tbl_data', '--no-unlogged-table-data') set_param('use_insert_commands', '--inserts') set_param('use_column_inserts', '--column-inserts') set_param('disable_quoting', '--disable-dollar-quoting') set_param('with_oids', '--oids') set_param('use_set_session_auth', '--use-set-session-authorization') if manager.version >= 110000: set_param('no_comments', '--no-comments') set_param('load_via_partition_root', '--load-via-partition-root') set_value('encoding', '--encoding') set_value('no_of_jobs', '--jobs') if 'schemas' in data: for s in data['schemas']: args.extend(['--schema', s]) if 'tables' in data: for s, t in data['tables']: args.extend(['--table', driver.qtIdent(conn, s, t)]) try: if backup_obj_type == 'objects': args.append(data['database']) p = BatchProcess(desc=BackupMessage( BACKUP.OBJECT, sid, data['file'].encode('utf-8') if hasattr( data['file'], 'encode') else data['file'], *args, database=data['database']), cmd=utility, args=args) else: p = BatchProcess(desc=BackupMessage( BACKUP.SERVER if backup_obj_type != 'globals' else BACKUP.GLOBALS, sid, data['file'].encode('utf-8') if hasattr( data['file'], 'encode') else data['file'], *args), cmd=utility, args=args) manager.export_password_env(p.id) # Check for connection timeout and if it is greater than 0 then # set the environment variable PGCONNECT_TIMEOUT. if manager.connect_timeout > 0: env = dict() env['PGCONNECT_TIMEOUT'] = str(manager.connect_timeout) p.set_env_variables(server, env=env) else: p.set_env_variables(server) p.start() jid = p.id except Exception as e: current_app.logger.exception(e) return make_json_response(status=410, success=0, errormsg=str(e)) # Return response return make_json_response(data={'job_id': jid, 'Success': 1})