def properties(self, gid, sid, did, scid, coid): """ This function will show the properties of the selected collation node. Args: gid: Server Group ID sid: Server ID did: Database ID scid: Schema ID scid: Schema ID coid: Collation ID Returns: JSON of selected collation node """ try: SQL = render_template("/".join([self.template_path, 'properties.sql']), scid=scid, coid=coid) status, res = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) return ajax_response( response=res['rows'][0], status=200 ) except Exception as e: return internal_server_error(errormsg=str(e))
def move_objects(self, gid, sid, tsid): """ This function moves objects from current tablespace to another Args: gid: Server Group ID sid: Server ID tsid: Tablespace ID """ data = json.loads(request.form['data'], encoding='utf-8') try: SQL = render_template("/".join( [self.template_path, 'move_objects.sql']), data=data, conn=self.conn ) status, res = self.conn.execute_scalar(SQL.strip('\n')) if not status: return internal_server_error(errormsg=res) return make_json_response( success=1, info="Tablespace updated", data={ 'id': tsid, 'sid': sid, 'gid': gid } ) except Exception as e: current_app.logger.exception(e) return internal_server_error(errormsg=str(e))
def update(self, gid, sid, did, eid): """ This function will update an extension object """ data = request.form if request.form else json.loads( request.data, encoding='utf-8' ) try: SQL, name = self.getSQL(gid, sid, data, did, eid) # Most probably this is due to error if not isinstance(SQL, (str, unicode)): return SQL SQL = SQL.strip('\n').strip(' ') status, res = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) return jsonify( node=self.blueprint.generate_browser_node( eid, did, name, icon="icon-%s" % self.node_type ) ) except Exception as e: return internal_server_error(errormsg=str(e))
def update(self, gid, sid, did, cid): """ This function will update cast object :param cid: cast id :param did: database id :param sid: server id :param gid: group id :return: """ data = request.form if request.form else json.loads( request.data, encoding='utf-8' ) try: sql, name = self.get_sql(gid, sid, did, data, cid) # Most probably this is due to error if not isinstance(sql, (str, unicode)): return sql status, res = self.conn.execute_scalar(sql) if not status: return internal_server_error(errormsg=res) return jsonify( node=self.blueprint.generate_browser_node( cid, did, name, "icon-{0}".format(self.node_type) ) ) except Exception as e: return internal_server_error(errormsg=str(e))
def delete(self, gid, sid, did, cid): """ This function will drop the cast object :param cid: cast id :param did: database id :param sid: server id :param gid: group id :return: """ # Below will decide if it's simple drop or drop with cascade call if self.cmd == 'delete': # This is a cascade operation cascade = True else: cascade = False try: # Get name for cast from cid sql = render_template("/".join([self.template_path, 'delete.sql']), cid=cid) status, res = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) if not res['rows']: return make_json_response( status=410, success=0, errormsg=gettext( 'Error: Object not found.' ), info=gettext( 'The specified cast object could not be found.\n' ) ) # drop cast result = res['rows'][0] sql = render_template("/".join([self.template_path, 'delete.sql']), castsource=result['castsource'], casttarget=result['casttarget'], cascade=cascade ) status, res = self.conn.execute_scalar(sql) if not status: return internal_server_error(errormsg=res) return make_json_response( success=1, info=gettext("Cast dropped"), data={ 'id': cid, 'sid': sid, 'gid': gid, 'did': did } ) except Exception as e: return internal_server_error(errormsg=str(e))
def truncate(self, gid, sid, did, scid, tid): """ This function will truncate the table object Args: gid: Server Group ID sid: Server ID did: Database ID scid: Schema ID tid: Table ID """ try: SQL = render_template( "/".join([self.table_template_path, 'properties.sql']), did=did, scid=scid, tid=tid, datlastsysoid=self.datlastsysoid ) status, res = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) return super(TableView, self).truncate( gid, sid, did, scid, tid, res ) except Exception as e: return internal_server_error(errormsg=str(e))
def get_relations(self, gid, sid, did, scid, tid=None): """ Returns: This function will return list of tables available for like/relation combobox while creating new table """ res = [{'label': '', 'value': ''}] try: SQL = render_template( "/".join([self.table_template_path, 'get_relations.sql']), show_sys_objects=self.blueprint.show_system_objects, server_type=self.manager.server_type ) status, rset = self.conn.execute_2darray(SQL) if not status: return internal_server_error(errormsg=res) for row in rset['rows']: res.append( { 'label': row['like_relation'], 'value': row['like_relation'] } ) return make_json_response( data=res, status=200 ) except Exception as e: return internal_server_error(errormsg=str(e))
def get_attach_tables(self, gid, sid, did, scid, tid=None): """ Returns: This function will return list of tables available to be attached to the partitioned table. """ try: res = [] SQL = render_template( "/".join([ self.partition_template_path, 'get_attach_tables.sql' ]), tid=tid ) status, rset = self.conn.execute_2darray(SQL) if not status: return internal_server_error(errormsg=res) for row in rset['rows']: res.append( {'label': row['table_name'], 'value': row['oid']} ) return make_json_response( data=res, status=200 ) except Exception as e: return internal_server_error(errormsg=str(e))
def get_all_tables(self, gid, sid, did, scid, tid=None): """ Args: gid: Server Group Id sid: Server Id did: Database Id scid: Schema Id tid: Table Id Returns: Returns the lits of tables required for constraints. """ try: SQL = render_template( "/".join([ self.table_template_path, 'get_tables_for_constraints.sql' ]), show_sysobj=self.blueprint.show_system_objects ) status, res = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) return make_json_response( data=res['rows'], status=200 ) except Exception as e: return internal_server_error(errormsg=str(e))
def update(self, gid, sid, did, scid, syid): """ This function will updates existing the synonym object Args: gid: Server Group ID sid: Server ID did: Database ID scid: Schema ID syid: Synonym ID """ data = request.form if request.form else json.loads( request.data, encoding='utf-8' ) SQL = self.get_sql(gid, sid, data, scid, syid) # Most probably this is due to error if not isinstance(SQL, (str, unicode)): return SQL try: if SQL and SQL.strip('\n') and SQL.strip(' '): status, res = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=res) return jsonify( node=self.blueprint.generate_browser_node( syid, scid, syid, icon="icon-synonym" ) ) except Exception as e: return internal_server_error(errormsg=str(e))
def update(self, gid, sid, did, scid, tid): """ This function will update an existing table object Args: gid: Server Group ID sid: Server ID did: Database ID scid: Schema ID tid: Table ID """ data = request.form if request.form else json.loads( request.data, encoding='utf-8' ) for k, v in data.items(): try: data[k] = json.loads(v, encoding='utf-8') except (ValueError, TypeError, KeyError): data[k] = v try: SQL = render_template( "/".join([self.table_template_path, 'properties.sql']), did=did, scid=scid, tid=tid, datlastsysoid=self.datlastsysoid ) status, res = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) return super(TableView, self).update( gid, sid, did, scid, tid, data, res) except Exception as e: return internal_server_error(errormsg=str(e))
def get_collations(self, gid, sid, did, scid, doid=None): """ Returns Collations. Args: gid: Server Group Id sid: Server Id did: Database Id scid: Schema Id doid: Domain Id """ res = [{'label': '', 'value': ''}] try: SQL = render_template("/".join([self.template_path, 'get_collations.sql'])) status, rset = self.conn.execute_2darray(SQL) if not status: return internal_server_error(errormsg=res) for row in rset['rows']: res.append({'label': row['copy_collation'], 'value': row['copy_collation']} ) return make_json_response( data=res, status=200 ) except Exception as e: return internal_server_error(errormsg=str(e))
def update(self, gid, sid, did, lid): """ This function will update the data for the selected language node. Args: gid: Server Group ID sid: Server ID did: Database ID lid: Language ID """ data = request.form if request.form else json.loads( request.data, encoding='utf-8' ) try: sql, name = self.get_sql(data, lid) # Most probably this is due to error if not isinstance(sql, (str, unicode)): return sql sql = sql.strip('\n').strip(' ') status, res = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) return jsonify( node=self.blueprint.generate_browser_node( lid, did, name, icon="icon-%s" % self.node_type ) ) except Exception as e: return internal_server_error(errormsg=str(e))
def get_handlers(self, gid, sid, did): """ This function returns the handlers for the selected foreign data wrapper node. Args: gid: Server Group ID sid: Server ID did: Database ID """ res = [{'label': '', 'value': ''}] try: sql = render_template("/".join([self.template_path, 'handlers.sql']), conn=self.conn) status, r_set = self.conn.execute_2darray(sql) if not status: return internal_server_error(errormsg=r_set) for row in r_set['rows']: res.append({'label': row['fdwhan'], 'value': row['fdwhan']}) return make_json_response( data=res, status=200 ) except Exception as e: return internal_server_error(errormsg=str(e))
def sql(self, gid, sid, did, scid, seid): """ This function will generate sql for sql panel Args: gid: Server Group ID sid: Server ID did: Database ID scid: Schema ID seid: Sequence ID """ SQL = render_template("/".join([self.template_path, 'properties.sql']), scid=scid, seid=seid) status, res = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) for row in res['rows']: SQL = render_template("/".join([self.template_path, 'get_def.sql']), data=row) status, rset1 = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=rset1) row['current_value'] = rset1['rows'][0]['last_value'] row['minimum'] = rset1['rows'][0]['min_value'] row['maximum'] = rset1['rows'][0]['max_value'] row['increment'] = rset1['rows'][0]['increment_by'] row['cache'] = rset1['rows'][0]['cache_value'] row['cycled'] = rset1['rows'][0]['is_cycled'] result = res['rows'][0] result = self._formatter(result, scid, seid) SQL = self.getSQL(gid, sid, did, result, scid) SQL = SQL.strip('\n').strip(' ') return ajax_response(response=SQL)
def get_data(sid, did, template): """ Generic function to get server stats based on an SQL template Args: sid: The server ID did: The database ID template: The SQL template name Returns: """ # Allow no server ID to be specified (so we can generate a route in JS) # but throw an error if it's actually called. if not sid: return internal_server_error(errormsg='Server ID not specified.') sql = render_template( "/".join([g.template_path, template]), did=did ) status, res = g.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) return ajax_response( response=res['rows'], status=200 )
def get_tables(self, gid, sid, did, scid, foid=None): """ Returns the Foreign Tables as well as Plain Tables. Args: gid: Server Group Id sid: Server Id did: Database Id scid: Schema Id foid: Foreign Table Id """ res = [] try: SQL = render_template("/".join( [self.template_path, 'get_tables.sql']), foid=foid, server_type=self.manager.server_type, show_sys_objects=self.blueprint.show_system_objects) status, rset = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) return make_json_response( data=rset['rows'], status=200 ) except Exception: exc_type, exc_value, exc_traceback = sys.exc_info() current_app.logger.error( traceback.print_exception(exc_type, exc_value, exc_traceback, limit=2)) return internal_server_error(errormsg=str(exc_value))
def enable_disable_trigger(self, gid, sid, did, scid, tid, trid): """ This function will enable OR disable the current trigger object Args: gid: Server Group ID sid: Server ID did: Database ID scid: Schema ID tid: Table ID trid: Trigger ID """ data = request.form if request.form else json.loads(request.data.decode()) # Convert str 'true' to boolean type is_enable_flag = json.loads(data['enable']) try: SQL = render_template("/".join([self.template_path, 'properties.sql']), tid=tid, trid=trid, datlastsysoid=self.datlastsysoid) status, res = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) o_data = dict(res['rows'][0]) # If enable is set to true means we need SQL to enable # current trigger which is disabled already so we need to # alter the 'is_enable_trigger' flag so that we can render # correct SQL for operation o_data['is_enable_trigger'] = is_enable_flag # Adding parent into data dict, will be using it while creating sql o_data['schema'] = self.schema o_data['table'] = self.table SQL = render_template("/".join([self.template_path, 'enable_disable_trigger.sql']), data=o_data, conn=self.conn) status, res = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=res) return make_json_response( success=1, info="Trigger updated", data={ 'id': trid, 'tid': tid, 'scid': scid } ) except Exception as e: return internal_server_error(errormsg=str(e))
def properties(self, gid, sid, did, fid): """ This function will show the properties of the selected foreign data wrapper node. Args: gid: Server Group ID sid: Server ID did: Database ID fid: foreign data wrapper ID """ sql = render_template("/".join([self.template_path, 'properties.sql']), fid=fid, conn=self.conn) status, res = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) if res['rows'][0]['fdwoptions'] is not None: res['rows'][0]['fdwoptions'] = self.tokenize_options(res['rows'][0]['fdwoptions']) sql = render_template("/".join([self.template_path, 'acl.sql']), fid=fid) status, fdw_acl_res = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=fdw_acl_res) for row in fdw_acl_res['rows']: privilege = parse_priv_from_db(row) if row['deftype'] in res['rows'][0]: res['rows'][0][row['deftype']].append(privilege) else: res['rows'][0][row['deftype']] = [privilege] return ajax_response( response=res['rows'][0], status=200 )
def update(self, gid, sid, did, scid): """ This function will update an existing schema object Args: gid: Server Group ID sid: Server ID did: Database ID scid: Schema ID """ data = request.form if request.form else json.loads( request.data, encoding='utf-8' ) try: SQL, name = self.get_sql(gid, sid, data, scid) SQL = SQL.strip('\n').strip(' ') status, res = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=res) return jsonify( node=self.blueprint.generate_browser_node( scid, did, name, icon="icon-%s" % self.node_type ) ) except Exception as e: return internal_server_error(errormsg=str(e))
def get_collations(self, gid, sid, did, scid, tid, idx=None): """ This function will return list of collation available via AJAX response """ res = [{'label': '', 'value': ''}] try: SQL = render_template( "/".join([self.template_path, 'get_collations.sql']) ) status, rset = self.conn.execute_2darray(SQL) if not status: return internal_server_error(errormsg=res) for row in rset['rows']: res.append( {'label': row['collation'], 'value': row['collation']} ) return make_json_response( data=res, status=200 ) except Exception as e: return internal_server_error(errormsg=str(e))
def msql(self, gid, sid, did, scid, tid=None): """ This function will generates modified sql for type object Args: gid: Server Group ID sid: Server ID did: Database ID scid: Schema ID tid: Type ID """ req = request.args data = dict() # converting nested request data in proper json format for key, val in req.items(): if key in ['composite', 'enum', 'seclabels', 'typacl']: data[key] = json.loads(val) else: data[key] = val try: SQL = self.get_sql(gid, sid, data, scid, tid) if SQL and SQL.strip('\n') and SQL.strip(' '): return make_json_response( data=SQL, status=200 ) except Exception as e: internal_server_error(errormsg=str(e))
def sql(self, gid, sid, did, cid): """ This function will generate sql for sql panel :param gid: group id :param sid: server id :param did: database id :param cid: cast id :return: """ try: sql = render_template( "/".join([self.template_path, 'sql.sql']), cid=cid, conn=self.conn ) status, res = self.conn.execute_scalar(sql) if not status: return internal_server_error( _("Could not generate reversed engineered SQL for the cast.\n\n{0}").format( res ) ) if res is None: return gone( _("Could not generate reversed engineered SQL for the cast node.\n") ) return ajax_response(response=res) except Exception as e: return internal_server_error(errormsg=str(e))
def get_sql(self, gid, sid, did, data, cid=None): """ This function will return sql for model data :param gid: group id :param sid: server id :param did: database id :param cid: cast id :param data: model data :return: """ try: if cid is not None: sql = render_template("/".join([self.template_path, 'properties.sql']), cid=cid, datlastsysoid=self.manager.db_info[did]['datlastsysoid'], showsysobj=self.blueprint.show_system_objects) status, res = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) old_data = res['rows'][0] sql = render_template( "/".join([self.template_path, 'update.sql']), data=data, o_data=old_data ) else: if 'srctyp' in data and 'trgtyp' in data: sql = render_template("/".join([self.template_path, 'create.sql']), data=data, conn=self.conn) else: sql = "-- incomplete definition" return str(sql) except Exception as e: return internal_server_error(errormsg=str(e))
def update(self, gid, sid, did, scid, tid, rid): """ This function will update a rule object """ data = request.form if request.form else \ json.loads(request.data.decode()) SQL = self.getSQL(gid, sid, data, tid, rid) try: if SQL and SQL.strip('\n') and SQL.strip(' '): status, res = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=res) return make_json_response( success=1, info=gettext("Rule updated"), data={ 'id': tid, 'sid': sid, 'gid': gid, 'did': did } ) else: return make_json_response( success=1, info="Nothing to update", data={ 'id': tid, 'scid': scid, 'did': did } ) except Exception as e: return internal_server_error(errormsg=str(e))
def get_subtype_opclass(self, gid, sid, did, scid, tid=None): """ This function will return list of subtype opclass available as AJAX response. """ res = [{'label': '', 'value': ''}] data = request.args try: SQL = render_template("/".join([self.template_path, 'get_subtypes.sql']), subtype_opclass=True, data=data) if SQL: status, rset = self.conn.execute_2darray(SQL) if not status: return internal_server_error(errormsg=res) for row in rset['rows']: res.append( {'label': row['opcname'], 'value': row['opcname']}) return make_json_response( data=res, status=200 ) except Exception as e: return internal_server_error(errormsg=str(e))
def getSQL(self, gid, sid, data, did, eid=None): """ This function will generate sql from model data """ required_args = [ 'name' ] try: if eid is not None: SQL = render_template("/".join( [self.template_path, 'properties.sql'] ), eid=eid) status, res = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) old_data = res['rows'][0] for arg in required_args: if arg not in data: data[arg] = old_data[arg] SQL = render_template("/".join( [self.template_path, 'update.sql'] ), data=data, o_data=old_data) else: SQL = render_template("/".join( [self.template_path, 'create.sql'] ), data=data) return SQL except Exception as e: return internal_server_error(errormsg=str(e))
def delete(self, gid, sid, did, fid): """ This function will delete the selected foreign data wrapper node. Args: gid: Server Group ID sid: Server ID did: Database ID fid: foreign data wrapper ID """ if self.cmd == 'delete': # This is a cascade operation cascade = True else: cascade = False try: # Get name of foreign data wrapper from fid sql = render_template("/".join([self.template_path, 'delete.sql']), fid=fid, conn=self.conn ) status, name = self.conn.execute_scalar(sql) if not status: return internal_server_error(errormsg=name) if name is None: return make_json_response( status=410, success=0, errormsg=gettext( 'Error: Object not found.' ), info=gettext( 'The specified foreign data' ' wrapper could not be found.\n' ) ) # drop foreign data wrapper node sql = render_template("/".join([self.template_path, 'delete.sql']), name=name, cascade=cascade, conn=self.conn) status, res = self.conn.execute_scalar(sql) if not status: return internal_server_error(errormsg=res) return make_json_response( success=1, info=gettext("Foreign Data Wrapper dropped"), data={ 'id': fid, 'did': did, 'sid': sid, 'gid': gid, } ) except Exception as e: return internal_server_error(errormsg=str(e))
def dependents(self, gid, sid, did, fid, fsid): """ This function get the dependents and return ajax response for the foreign server node. Args: gid: Server Group ID sid: Server ID did: Database ID fid: foreign data wrapper ID fsid: Foreign server ID """ dependents_result = self.get_dependents(self.conn, fsid) # Fetching dependents of foreign servers query = render_template("/".join([self.template_path, 'dependents.sql']), fsid=fsid) status, result = self.conn.execute_dict(query) if not status: internal_server_error(errormsg=result) for row in result['rows']: dependents_result.append( {'type': 'user_mapping', 'name': row['name'], 'field': 'normal' if (row['deptype'] == 'n') else ''}) return ajax_response( response=dependents_result, status=200 )
def delete(self, gid, sid, did, eid): """ This function will drop/drop cascade a extension object """ cascade = True if self.cmd == 'delete' else False try: # check if extension with eid exists SQL = render_template("/".join( [self.template_path, 'delete.sql']), eid=eid) status, name = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=name) # drop extension SQL = render_template("/".join( [self.template_path, 'delete.sql'] ), name=name, cascade=cascade) status, res = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=res) return make_json_response( success=1, info=gettext("Extension dropped"), data={ 'id': did, 'sid': sid, 'gid': gid, } ) except Exception as e: return internal_server_error(errormsg=str(e))
def update(self, gid, sid, did): """Update the database.""" data = self._get_data_from_request() # Update schema restriction in db object. DatabaseView._update_db_schema_res(data, did, sid) # Generic connection for offline updates conn = self.manager.connection(conn_id='db_offline_update') status, errmsg = conn.connect() if not status: current_app.logger.error( "Could not create database connection for offline updates\n" "Err: {0}".format(errmsg) ) return internal_server_error(errmsg) fetching_error, err_msg = self._fetch_db_details(data, did) if fetching_error: return internal_server_error(errormsg=err_msg) # Release any existing connection from connection manager # to perform offline operation self.manager.release(did=did) all_ids = { 'gid': gid, 'sid': sid, 'did': did } is_error, errmsg = self._check_rename_db_or_change_table_space(data, conn, all_ids) if is_error: return internal_server_error(errmsg) # Make connection for database again connection_error, errmsg = self._reconnect_connect_db(data, did) if connection_error: return internal_server_error(errmsg) sql = self.get_online_sql(gid, sid, data, did) sql = sql.strip('\n').strip(' ') if sql and sql != "": status, msg = self.conn.execute_scalar(sql) if not status: return internal_server_error(errormsg=msg) # Release any existing connection from connection manager # used for offline updates self.manager.release(conn_id="db_offline_update") # Fetch the new data again after update for proper node # generation status, rset = self.conn.execute_dict( render_template( "/".join([self.template_path, self._NODES_SQL]), did=did, conn=self.conn, last_system_oid=0, show_system_objects=self.blueprint.show_system_objects, ) ) if not status: return internal_server_error(errormsg=rset) if len(rset['rows']) == 0: return gone( self.not_found_error_msg() ) res = rset['rows'][0] can_drop = True error, errmsg, is_can_drop = self._commit_db_changes(res, can_drop) if error: return make_json_response( success=0, errormsg=errmsg ) can_drop = can_dis_conn = is_can_drop return jsonify( node=self.blueprint.generate_browser_node( did, sid, res['name'], icon="pg-icon-{0}".format(self.node_type) if self._db['datallowconn'] and self.conn.connected() else "icon-database-not-connected", connected=self.conn.connected() if self._db['datallowconn'] else False, tablespace=res['spcname'], allowConn=res['datallowconn'], canCreate=res['cancreate'], canDisconn=can_dis_conn, canDrop=can_drop, inode=True if res['datallowconn'] else False ) )
def create(self, gid, sid, did, scid, tid, exid=None): """ This function will create a Exclusion constraint. Args: gid: Server Group ID sid: Server ID did: Database ID scid: Schema ID tid: Table ID exid: Exclusion constraint ID Returns: """ required_args = ['columns'] data = request.form if request.form else json.loads(request.data, encoding='utf-8') for k, v in data.items(): try: data[k] = json.loads(v, encoding='utf-8') except (ValueError, TypeError, KeyError): data[k] = v for arg in required_args: if arg not in data: return make_json_response( status=400, success=0, errormsg=_("Could not find required parameter (%s)." % str(arg))) elif isinstance(data[arg], list) and len(data[arg]) < 1: return make_json_response( status=400, success=0, errormsg=_("Could not find required parameter (%s)." % str(arg))) data['schema'] = self.schema data['table'] = self.table try: if 'name' not in data or data['name'] == "": SQL = render_template("/".join( [self.template_path, 'begin.sql'])) # Start transaction. status, res = self.conn.execute_scalar(SQL) if not status: self.end_transaction() return internal_server_error(errormsg=res) # The below SQL will execute CREATE DDL only SQL = render_template("/".join([self.template_path, 'create.sql']), data=data, conn=self.conn) status, res = self.conn.execute_scalar(SQL) if not status: self.end_transaction() return internal_server_error(errormsg=res) if 'name' not in data or data['name'] == "": sql = render_template("/".join( [self.template_path, 'get_oid_with_transaction.sql']), tid=tid) status, res = self.conn.execute_dict(sql) if not status: self.end_transaction() return internal_server_error(errormsg=res) self.end_transaction() data['name'] = res['rows'][0]['name'] else: sql = render_template("/".join( [self.template_path, 'get_oid.sql']), name=data['name']) status, res = self.conn.execute_dict(sql) if not status: self.end_transaction() return internal_server_error(errormsg=res) return jsonify(node=self.blueprint.generate_browser_node( res['rows'][0]['oid'], tid, data['name'], icon="icon-exclusion_constraint")) except Exception as e: self.end_transaction() return make_json_response(status=400, success=0, errormsg=str(e))
def create(self, gid, sid): """Create the database.""" required_args = [ 'name' ] data = request.form if request.form else json.loads( request.data, encoding='utf-8' ) for arg in required_args: if arg not in data: return make_json_response( status=410, success=0, errormsg=_( "Could not find the required parameter ({})." ).format(arg) ) # The below SQL will execute CREATE DDL only SQL = render_template( "/".join([self.template_path, self._CREATE_SQL]), data=data, conn=self.conn ) status, msg = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=msg) if 'datacl' in data: data['datacl'] = parse_priv_to_db(data['datacl'], 'DATABASE') # The below SQL will execute rest DMLs because we cannot execute # CREATE with any other SQL = render_template( "/".join([self.template_path, self._GRANT_SQL]), data=data, conn=self.conn ) SQL = SQL.strip('\n').strip(' ') if SQL and SQL != "": status, msg = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=msg) # We need oid of newly created database SQL = render_template( "/".join([self.template_path, self._PROPERTIES_SQL]), name=data['name'], conn=self.conn, last_system_oid=0, show_system_objects=self.blueprint.show_system_objects, ) SQL = SQL.strip('\n').strip(' ') if SQL and SQL != "": status, res = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) response = res['rows'][0] # Add database entry into database table with schema_restrictions. database = Database(id=response['did'], server=sid, schema_res=','.join(data['schema_res'])) db.session.add(database) db.session.commit() return jsonify( node=self.blueprint.generate_browser_node( response['did'], sid, response['name'], icon="icon-database-not-connected", connected=False, tablespace=response['default_tablespace'], allowConn=True, canCreate=response['cancreate'], canDisconn=True, canDrop=True ) )
def get_sql(conn, data, did, tid, idx, datlastsysoid, mode=None, template_path=None): """ This function will generate sql from model data. :param conn: Connection Object :param data: Data :param did: :param tid: Table ID :param idx: Index ID :param datlastsysoid: :param mode: :param template_path: Optional template path :return: """ name = data['name'] if 'name' in data else None if idx is not None: SQL = render_template("/".join([template_path, 'properties.sql']), did=did, tid=tid, idx=idx, datlastsysoid=datlastsysoid) status, res = conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) if len(res['rows']) == 0: raise ObjectGone(_('Could not find the index in the table.')) old_data = dict(res['rows'][0]) # If name is not present in data then # we will fetch it from old data, we also need schema & table name if 'name' not in data: name = data['name'] = old_data['name'] SQL = render_template( "/".join([template_path, 'update.sql']), data=data, o_data=old_data, conn=conn ) else: required_args = { 'name': 'Name', 'columns': 'Columns' } for arg in required_args: err = False if arg == 'columns' and len(data['columns']) < 1: err = True if arg not in data: err = True # Check if we have at least one column if err: return _('-- definition incomplete'), name # If the request for new object which do not have did SQL = render_template( "/".join([template_path, 'create.sql']), data=data, conn=conn, mode=mode ) SQL += "\n" SQL += render_template( "/".join([template_path, 'alter.sql']), data=data, conn=conn ) return SQL, name
def delete(self, gid, sid, did, scid, tid, **kwargs): """ This function will updates the existing schema object Args: gid: Server Group ID sid: Server ID did: Database ID scid: Schema ID tid: Table ID idx: Index ID """ idx = kwargs.get('idx', None) only_sql = kwargs.get('only_sql', False) if idx is None: data = request.form if request.form else json.loads( request.data, encoding='utf-8') else: data = {'ids': [idx]} # Below will decide if it's simple drop or drop with cascade call cascade = self._check_cascade_operation() try: for idx in data['ids']: # We will first fetch the index name for current request # so that we create template for dropping index SQL = render_template( "/".join([self.template_path, self._PROPERTIES_SQL]), did=did, tid=tid, idx=idx, datlastsysoid=self._DATABASE_LAST_SYSTEM_OID) status, res = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) elif not res['rows']: return make_json_response( success=0, errormsg=gettext('Error: Object not found.'), info=self.not_found_error_msg()) data = dict(res['rows'][0]) SQL = render_template("/".join( [self.template_path, self._DELETE_SQL]), data=data, conn=self.conn, cascade=cascade) if only_sql: return SQL status, res = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=res) return make_json_response(success=1, info=gettext("Index is dropped")) except Exception as e: return internal_server_error(errormsg=str(e))
def create(self, gid, sid, did, scid, tid): """ This function will creates new the schema object Args: gid: Server Group ID sid: Server ID did: Database ID scid: Schema ID tid: Table ID """ data = request.form if request.form else json.loads(request.data, encoding='utf-8') for k, v in data.items(): try: # comments should be taken as is because if user enters a # json comment it is parsed by loads which should not happen if k in ('description', ): data[k] = v else: data[k] = json.loads(v, encoding='utf-8') except (ValueError, TypeError, KeyError): data[k] = v required_args = {'name': 'Name', 'columns': 'Columns'} is_error, err_msg = IndexesView._check_for_error(required_args, data) if is_error: return make_json_response(status=410, success=0, errormsg=gettext(err_msg)) # Adding parent into data dict, will be using it while creating sql data['schema'] = self.schema data['table'] = self.table if len(data['table']) == 0: return gone(gettext(self.not_found_error_msg('Table'))) try: # If user chooses concurrent index then we cannot run it inside # a transaction block. # Don't start transaction if isconcurrent is True if hasattr(data, "isconcurrent") and not data['isconcurrent']: # Start transaction. self.conn.execute_scalar("BEGIN;") SQL = render_template("/".join( [self.template_path, self._CREATE_SQL]), data=data, conn=self.conn, mode='create') status, res = self.conn.execute_scalar(SQL) if not status: # End transaction. if hasattr(data, "isconcurrent") and not data['isconcurrent']: self.conn.execute_scalar("END;") return internal_server_error(errormsg=res) # If user chooses concurrent index then we cannot run it along # with other alter statements so we will separate alter index part SQL = render_template("/".join( [self.template_path, self._ALTER_SQL]), data=data, conn=self.conn) SQL = SQL.strip('\n').strip(' ') if SQL != '': status, res = self.conn.execute_scalar(SQL) if not status: if hasattr(data, "isconcurrent") and not data['isconcurrent']: # End transaction. self.conn.execute_scalar("END;") return internal_server_error(errormsg=res) # we need oid to add object in tree at browser SQL = render_template("/".join([self.template_path, self._OID_SQL]), tid=tid, data=data) status, idx = self.conn.execute_scalar(SQL) if not status: if hasattr(data, "isconcurrent") and not data['isconcurrent']: # End transaction. self.conn.execute_scalar("END;") return internal_server_error(errormsg=tid) if hasattr(data, "isconcurrent") and not data['isconcurrent']: # End transaction. self.conn.execute_scalar("END;") return jsonify(node=self.blueprint.generate_browser_node( idx, tid, data['name'], icon="icon-index")) except Exception as e: if hasattr(data, "isconcurrent") and not data['isconcurrent']: # End transaction. self.conn.execute_scalar("END;") return internal_server_error(errormsg=str(e))
def sql(self, gid, sid, jid): """ This function will generate sql for sql panel """ SQL = render_template( "/".join([self.template_path, self._PROPERTIES_SQL]), jid=jid, conn=self.conn, last_system_oid=0 ) status, res = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) if len(res['rows']) == 0: return gone( _("Could not find the object on the server.") ) row = res['rows'][0] status, res = self.conn.execute_dict( render_template( "/".join([self.template_path, 'steps.sql']), jid=jid, conn=self.conn, has_connstr=self.manager.db_info['pgAgent']['has_connstr'] ) ) if not status: return internal_server_error(errormsg=res) row['jsteps'] = res['rows'] status, res = self.conn.execute_dict( render_template( "/".join([self.template_path, 'schedules.sql']), jid=jid, conn=self.conn ) ) if not status: return internal_server_error(errormsg=res) row['jschedules'] = res['rows'] for schedule in row['jschedules']: schedule['jscexceptions'] = [] if schedule['jexid']: idx = 0 for exc in schedule['jexid']: # Convert datetime.time object to string if isinstance(schedule['jextime'][idx], time): schedule['jextime'][idx] = \ schedule['jextime'][idx].strftime("%H:%M:%S") schedule['jscexceptions'].append({ 'jexid': exc, 'jexdate': schedule['jexdate'][idx], 'jextime': schedule['jextime'][idx] }) idx += 1 del schedule['jexid'] del schedule['jexdate'] del schedule['jextime'] return ajax_response( response=render_template( "/".join([self.template_path, self._CREATE_SQL]), jid=jid, data=row, conn=self.conn, fetch_id=False, has_connstr=self.manager.db_info['pgAgent']['has_connstr'] ) )
def create(self, gid, sid, did): """ This function will creates new the cast object :param did: database id :param sid: server id :param gid: group id :return: """ required_args = [ 'srctyp', 'trgtyp' ] data = request.form if request.form else json.loads( request.data, encoding='utf-8' ) for arg in required_args: if arg not in data: return make_json_response( status=410, success=0, errormsg=gettext( "Could not find the required parameter (%s)." % arg ) ) try: sql = render_template("/".join([self.template_path, 'create.sql']), data=data, conn=self.conn, ) status, res = self.conn.execute_scalar(sql) if not status: return internal_server_error(errormsg=res) # we need oid to to add object in tree at browser, below sql will # gives the same last_system_oid = 0 if self.blueprint.show_system_objects else \ (self.manager.db_info[did])['datlastsysoid'] \ if self.manager.db_info is not None and \ did in self.manager.db_info else 0 sql = render_template( "/".join([self.template_path, 'properties.sql']), srctyp=data['srctyp'], trgtyp=data['trgtyp'], datlastsysoid=last_system_oid, showsysobj=self.blueprint.show_system_objects ) status, cid = self.conn.execute_scalar(sql) if not status: return internal_server_error(errormsg=cid) return jsonify( node=self.blueprint.generate_browser_node( cid, did, data['name'], icon="icon-cast" ) ) except Exception as e: return internal_server_error(errormsg=str(e))
def get_sql(self, gid, sid, did, scid, data, tid=None): """ This function will return SQL for model data :param gid: group id :param sid: server id :param did: database id :param scid: schema id :param tid: fts tempate id """ # Fetch sql for update if tid is not None: sql = render_template("/".join( [self.template_path, 'properties.sql']), tid=tid, scid=scid) status, res = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) if len(res['rows']) == 0: return gone( gettext("Could not find the requested FTS template.")) old_data = res['rows'][0] if 'schema' not in data: data['schema'] = old_data['schema'] # If user has changed the schema then fetch new schema directly # using its oid otherwise fetch old schema name using # fts template oid sql = render_template("/".join([self.template_path, 'schema.sql']), data=data) status, new_schema = self.conn.execute_scalar(sql) if not status: return internal_server_error(errormsg=new_schema) # Replace schema oid with schema name new_data = data.copy() if 'schema' in new_data: new_data['schema'] = new_schema # Fetch old schema name using old schema oid sql = render_template("/".join([self.template_path, 'schema.sql']), data=old_data) status, old_schema = self.conn.execute_scalar(sql) if not status: return internal_server_error(errormsg=old_schema) # Replace old schema oid with old schema name old_data['schema'] = old_schema sql = render_template("/".join([self.template_path, 'update.sql']), data=new_data, o_data=old_data) # Fetch sql query for modified data if 'name' in data: return sql.strip('\n'), data['name'] return sql.strip('\n'), old_data['name'] else: # Fetch schema name from schema oid sql = render_template("/".join([self.template_path, 'schema.sql']), data=data) status, schema = self.conn.execute_scalar(sql) if not status: return internal_server_error(errormsg=schema) # Replace schema oid with schema name new_data = data.copy() new_data['schema'] = schema if ('tmpllexize' in new_data and 'name' in new_data and 'schema' in new_data): sql = render_template("/".join( [self.template_path, 'create.sql']), data=new_data, conn=self.conn) else: sql = u"-- definition incomplete" return sql.strip('\n'), data['name']
def get_column_details(conn, idx, data, mode='properties', template_path=None): """ This functional will fetch list of column for index. :param conn: Connection Object :param idx: Index ID :param data: Data :param mode: 'create' or 'properties' :param template_path: Optional template path :return: """ SQL = render_template( "/".join([template_path, 'column_details.sql']), idx=idx ) status, rset = conn.execute_2darray(SQL) if not status: return internal_server_error(errormsg=rset) # 'attdef' comes with quotes from query so we need to strip them # 'options' we need true/false to render switch ASC(false)/DESC(true) columns = [] cols = [] for row in rset['rows']: # We need all data as collection for ColumnsModel # we will not strip down colname when using in SQL to display cols_data = { 'colname': row['attdef'] if mode == 'create' else row['attdef'].strip('"'), 'collspcname': row['collnspname'], 'op_class': row['opcname'], } # ASC/DESC and NULLS works only with btree indexes if 'amname' in data and data['amname'] == 'btree': cols_data['sort_order'] = False if row['options'][0] == 'DESC': cols_data['sort_order'] = True cols_data['nulls'] = False if row['options'][1].split(" ")[1] == 'FIRST': cols_data['nulls'] = True columns.append(cols_data) # We need same data as string to display in properties window # If multiple column then separate it by colon cols_str = row['attdef'] if row['collnspname']: cols_str += ' COLLATE ' + row['collnspname'] if row['opcname']: cols_str += ' ' + row['opcname'] # ASC/DESC and NULLS works only with btree indexes if 'amname' in data and data['amname'] == 'btree': # Append sort order cols_str += ' ' + row['options'][0] # Append nulls value cols_str += ' ' + row['options'][1] cols.append(cols_str) # Push as collection data['columns'] = columns # Push as string data['columns_csv'] = ', '.join(cols) return data
def sql(self, gid, sid, did): """ This function will generate sql for sql panel """ conn = self.manager.connection() SQL = render_template( "/".join([self.template_path, self._PROPERTIES_SQL]), did=did, conn=conn, last_system_oid=0, show_system_objects=False, ) status, res = conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) if len(res['rows']) == 0: return gone( self.not_found_error_msg() ) SQL = render_template( "/".join([self.template_path, self._ACL_SQL]), did=did, conn=self.conn ) status, dataclres = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=dataclres) res = self.formatdbacl(res, dataclres['rows']) SQL = render_template( "/".join([self.template_path, 'defacl.sql']), did=did, conn=self.conn ) status, defaclres = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=defaclres) res = self.formatdbacl(res, defaclres['rows']) result = res['rows'][0] SQL = render_template( "/".join([self.template_path, 'get_variables.sql']), did=did, conn=self.conn ) status, res1 = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res1) # Get Formatted Security Labels if 'seclabels' in result: # Security Labels is not available for PostgreSQL <= 9.1 frmtd_sec_labels = parse_sec_labels_from_db(result['seclabels']) result.update(frmtd_sec_labels) # Get Formatted Variables frmtd_variables = parse_variables_from_db(res1['rows']) result.update(frmtd_variables) sql_header = "-- Database: {0}\n\n-- ".format(result['name']) sql_header += render_template( "/".join([self.template_path, self._DELETE_SQL]), datname=result['name'], conn=conn ) SQL = self.get_new_sql(gid, sid, result, did) SQL = re.sub('\n{2,}', '\n\n', SQL) SQL = sql_header + '\n' + SQL SQL = SQL.strip('\n') return ajax_response(response=SQL)
def sql(self, gid, sid, did, scid, tid, trid): """ This function will generates reverse engineered sql for trigger object Args: gid: Server Group ID sid: Server ID did: Database ID scid: Schema ID tid: Table ID trid: Trigger ID """ SQL = render_template("/".join([self.template_path, 'properties.sql']), tid=tid, trid=trid, datlastsysoid=self.datlastsysoid) status, res = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) if len(res['rows']) == 0: return gone( gettext("""Could not find the trigger in the table.""")) data = dict(res['rows'][0]) # Adding parent into data dict, will be using it while creating sql data['schema'] = self.schema data['table'] = self.table data = self.get_trigger_function_schema(data) if len(data['custom_tgargs']) > 1: # We know that trigger has more than 1 argument, let's join them data['tgargs'] = self._format_args(data['custom_tgargs']) if len(data['tgattr']) >= 1: columns = ', '.join(data['tgattr'].split(' ')) data['columns'] = self._column_details(tid, columns) data = self._trigger_definition(data) SQL, name = self.get_sql(scid, tid, None, data) sql_header = u"-- Trigger: {0}\n\n-- ".format(data['name']) sql_header += render_template("/".join( [self.template_path, 'delete.sql']), data=data, conn=self.conn) SQL = sql_header + '\n\n' + SQL.strip('\n') # If trigger is disbaled then add sql code for the same if not data['is_enable_trigger']: SQL += '\n\n' SQL += render_template("/".join( [self.template_path, 'enable_disable_trigger.sql']), data=data, conn=self.conn) return ajax_response(response=SQL)
def enable_disable_trigger(self, gid, sid, did, scid, tid, trid): """ This function will enable OR disable the current trigger object Args: gid: Server Group ID sid: Server ID did: Database ID scid: Schema ID tid: Table ID trid: Trigger ID """ data = request.form if request.form else json.loads(request.data, encoding='utf-8') # Convert str 'true' to boolean type is_enable_flag = json.loads(data['enable']) try: SQL = render_template("/".join( [self.template_path, 'properties.sql']), tid=tid, trid=trid, datlastsysoid=self.datlastsysoid) status, res = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) if len(res['rows']) == 0: return gone( gettext("""Could not find the trigger in the table.""")) o_data = dict(res['rows'][0]) # If enable is set to true means we need SQL to enable # current trigger which is disabled already so we need to # alter the 'is_enable_trigger' flag so that we can render # correct SQL for operation o_data['is_enable_trigger'] = is_enable_flag # Adding parent into data dict, will be using it while creating sql o_data['schema'] = self.schema o_data['table'] = self.table SQL = render_template("/".join( [self.template_path, 'enable_disable_trigger.sql']), data=o_data, conn=self.conn) status, res = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=res) return make_json_response(success=1, info="Trigger updated", data={ 'id': trid, 'tid': tid, 'scid': scid }) except Exception as e: return internal_server_error(errormsg=str(e))
def properties(self, gid, sid, did): SQL = render_template( "/".join([self.template_path, self._PROPERTIES_SQL]), did=did, conn=self.conn, last_system_oid=0, show_system_objects=self.blueprint.show_system_objects, ) status, res = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) if len(res['rows']) == 0: return gone( self.not_found_error_msg() ) SQL = render_template( "/".join([self.template_path, self._ACL_SQL]), did=did, conn=self.conn ) status, dataclres = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) res = self.formatdbacl(res, dataclres['rows']) SQL = render_template( "/".join([self.template_path, 'defacl.sql']), did=did, conn=self.conn ) status, defaclres = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) res = self.formatdbacl(res, defaclres['rows']) result = res['rows'][0] result['is_sys_obj'] = ( result['oid'] <= self.datlastsysoid or self.datistemplate) # Fetching variable for database SQL = render_template( "/".join([self.template_path, 'get_variables.sql']), did=did, conn=self.conn ) status, res1 = self.conn.execute_dict(SQL) database = Database.query.filter_by(id=did, server=sid).first() if database: result['schema_res'] = database.schema_res.split( ',') if database.schema_res else [] if not status: return internal_server_error(errormsg=res1) # Get Formatted Security Labels if 'seclabels' in result: # Security Labels is not available for PostgreSQL <= 9.1 frmtd_sec_labels = parse_sec_labels_from_db(result['seclabels']) result.update(frmtd_sec_labels) # Get Formatted Variables frmtd_variables = parse_variables_from_db(res1['rows']) result.update(frmtd_variables) return ajax_response( response=result, status=200 )
def update(self, gid, sid, did, scid, tid, cid): """ Updates the Check Constraint object. Args: gid: Server Group Id sid: Server Id did: Database Id scid: Schema Id tid: Table Id cid: Check Constraint Id """ data = request.form if request.form else json.loads( request.data.decode()) try: data['schema'] = self.schema data['table'] = self.table SQL = self.get_sql(gid, sid, data, scid, tid, cid) SQL = SQL.strip('\n').strip(' ') if SQL != "": status, res = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=res) sql = render_template("/".join( [self.template_path, 'get_name.sql']), cid=cid) status, res = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) if "convalidated" in res['rows'][0] and res['rows'][0][ "convalidated"]: icon = 'icon-check_constraints_)bad' valid = False else: icon = 'icon-check_constraints' valid = True return make_json_response(success=1, info="Check Constraint updated", data={ 'id': cid, 'tid': tid, 'scid': scid, 'sid': sid, 'gid': gid, 'did': did, 'icon': icon, 'val id': valid }) else: return make_json_response(success=1, info="Nothing to update", data={ 'id': cid, 'tid': tid, 'scid': scid, 'sid': sid, 'gid': gid, 'did': did }) except Exception as e: return internal_server_error(errormsg=str(e))
def all_exception_handler(e): current_app.logger.error(e, exc_info=True) return internal_server_error(errormsg=str(e))
def create(self, gid, sid, did, scid, tid, cid=None): """ This function will create a primary key. Args: gid: Server Group ID sid: Server ID did: Database ID scid: Schema ID tid: Table ID cid: Check constraint ID Returns: """ required_args = ['consrc'] data = request.form if request.form else json.loads( request.data.decode()) for k, v in data.items(): try: data[k] = json.loads(v) except (ValueError, TypeError): data[k] = v for arg in required_args: if arg not in data or data[arg] == '': return make_json_response( status=400, success=0, errormsg=_("Couldn't find the required parameter (%s)." % arg)) data['schema'] = self.schema data['table'] = self.table try: if 'name' not in data or data['name'] == "": SQL = "BEGIN;" # Start transaction. status, res = self.conn.execute_scalar(SQL) if not status: self.end_transaction() return internal_server_error(errormsg=res) # The below SQL will execute CREATE DDL only SQL = render_template("/".join([self.template_path, 'create.sql']), data=data) status, msg = self.conn.execute_scalar(SQL) if not status: self.end_transaction() return internal_server_error(errormsg=msg) if 'name' not in data or data['name'] == "": sql = render_template("/".join( [self.template_path, 'get_oid_with_transaction.sql'], ), tid=tid) status, res = self.conn.execute_dict(sql) if not status: self.end_transaction() return internal_server_error(errormsg=res) self.end_transaction() data['name'] = res['rows'][0]['name'] else: sql = render_template("/".join( [self.template_path, 'get_oid.sql']), tid=tid, name=data['name']) status, res = self.conn.execute_dict(sql) if not status: self.end_transaction() return internal_server_error(errormsg=res) if "convalidated" in res['rows'][0] and res['rows'][0][ "convalidated"]: icon = "icon-check_constraints_bad" valid = False else: icon = "icon-check_constraints" valid = True return jsonify(node=self.blueprint.generate_browser_node( res['rows'][0]['oid'], tid, data['name'], icon=icon, valid=valid)) except Exception as e: self.end_transaction() return make_json_response(status=400, success=0, errormsg=e)
def update(self, gid, sid, did, scid, tid, trid): """ This function will updates existing the trigger object Args: gid: Server Group ID sid: Server ID did: Database ID scid: Schema ID tid: Table ID trid: Trigger ID """ data = request.form if request.form else json.loads(request.data, encoding='utf-8') try: data['schema'] = self.schema data['table'] = self.table SQL, name = self.get_sql(scid, tid, trid, data) if not isinstance(SQL, (str, unicode)): return SQL SQL = SQL.strip('\n').strip(' ') status, res = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=res) # We need oid to add object in browser tree and if user # update the trigger then new OID is getting generated # so we need to return new OID of trigger. SQL = render_template("/".join([self.template_path, 'get_oid.sql']), tid=tid, data=data) status, new_trid = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=new_trid) # Fetch updated properties SQL = render_template("/".join( [self.template_path, 'properties.sql']), tid=tid, trid=new_trid, datlastsysoid=self.datlastsysoid) status, res = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) if len(res['rows']) == 0: return gone( gettext("""Could not find the trigger in the table.""")) # Making copy of output for future use data = dict(res['rows'][0]) return jsonify(node=self.blueprint.generate_browser_node( new_trid, tid, name, icon="icon-%s" % self.node_type if data['is_enable_trigger'] else "icon-%s-bad" % self.node_type)) except Exception as e: return internal_server_error(errormsg=str(e))
def cancel_transaction(trans_id): """ This method is used to cancel the running transaction Args: trans_id: unique transaction id """ if 'gridData' not in session: return make_json_response( success=0, errormsg=gettext('Transaction ID not found in the session.'), info='DATAGRID_TRANSACTION_REQUIRED', status=404) grid_data = session['gridData'] # Return from the function if transaction id not found if str(trans_id) not in grid_data: return make_json_response( success=0, errormsg=gettext('Transaction ID not found in the session.'), info='DATAGRID_TRANSACTION_REQUIRED', status=404) # Fetch the object for the specified transaction id. # Use pickle.loads function to get the command object session_obj = grid_data[str(trans_id)] trans_obj = pickle.loads(session_obj['command_obj']) if trans_obj is not None and session_obj is not None: # Fetch the main connection object for the database. try: manager = get_driver( PG_DEFAULT_DRIVER).connection_manager(trans_obj.sid) conn = manager.connection(did=trans_obj.did) except Exception as e: return internal_server_error(errormsg=str(e)) delete_connection = False # Connect to the Server if not connected. if not conn.connected(): status, msg = conn.connect() if not status: return internal_server_error(errormsg=str(msg)) delete_connection = True if conn.connected(): # on successful connection cancel the running transaction status, result = conn.cancel_transaction( trans_obj.conn_id, trans_obj.did) # Delete connection if we have created it to # cancel the transaction if delete_connection: manager.release(did=trans_obj.did) else: status = False result = gettext( 'Not connected to server or connection with the server has ' 'been closed.' ) else: status = False result = gettext( 'Either transaction object or session object not found.') return make_json_response( data={ 'status': status, 'result': result } )
def start_view_data(trans_id): """ This method is used to execute query using asynchronous connection. Args: trans_id: unique transaction id """ limit = -1 # Check the transaction and connection status status, error_msg, conn, trans_obj, session_obj = \ check_transaction_status(trans_id) if error_msg == gettext('Transaction ID not found in the session.'): return make_json_response(success=0, errormsg=error_msg, info='DATAGRID_TRANSACTION_REQUIRED', status=404) # get the default connection as current connection which is attached to # trans id holds the cursor which has query result so we cannot use that # connection to execute another query otherwise we'll lose query result. try: manager = get_driver(PG_DEFAULT_DRIVER).connection_manager( trans_obj.sid) default_conn = manager.connection(did=trans_obj.did) except (ConnectionLost, SSHTunnelConnectionLost) as e: raise except Exception as e: current_app.logger.error(e) return internal_server_error(errormsg=str(e)) # Connect to the Server if not connected. if not default_conn.connected(): status, msg = default_conn.connect() if not status: return make_json_response( data={'status': status, 'result': u"{}".format(msg)} ) if status and conn is not None and \ trans_obj is not None and session_obj is not None: # set fetched row count to 0 as we are executing query again. trans_obj.update_fetched_row_cnt(0) # Fetch the sql and primary_keys from the object sql = trans_obj.get_sql(default_conn) pk_names, primary_keys = trans_obj.get_primary_keys(default_conn) session_obj['command_obj'] = pickle.dumps(trans_obj, -1) has_oids = False if trans_obj.object_type == 'table': # Fetch OIDs status has_oids = trans_obj.has_oids(default_conn) # Fetch the applied filter. filter_applied = trans_obj.is_filter_applied() # Fetch the limit for the SQL query limit = trans_obj.get_limit() can_edit = trans_obj.can_edit() can_filter = trans_obj.can_filter() # Store the primary keys to the session object session_obj['primary_keys'] = primary_keys # Store the OIDs status into session object session_obj['has_oids'] = has_oids update_session_grid_transaction(trans_id, session_obj) # Execute sql asynchronously try: status, result = conn.execute_async(sql) except (ConnectionLost, SSHTunnelConnectionLost) as e: raise else: status = False result = error_msg filter_applied = False can_edit = False can_filter = False sql = None return make_json_response( data={ 'status': status, 'result': result, 'filter_applied': filter_applied, 'limit': limit, 'can_edit': can_edit, 'can_filter': can_filter, 'sql': sql, 'info_notifier_timeout': blueprint.info_notifier_timeout.get() } )
def poll(trans_id): """ This method polls the result of the asynchronous query and returns the result. Args: trans_id: unique transaction id """ result = None rows_affected = 0 rows_fetched_from = 0 rows_fetched_to = 0 has_more_rows = False columns = dict() columns_info = None primary_keys = None types = {} client_primary_key = None rset = None has_oids = False oids = None additional_messages = None notifies = None # Check the transaction and connection status status, error_msg, conn, trans_obj, session_obj = \ check_transaction_status(trans_id) if error_msg == gettext('Transaction ID not found in the session.'): return make_json_response(success=0, errormsg=error_msg, info='DATAGRID_TRANSACTION_REQUIRED', status=404) if status and conn is not None and session_obj is not None: status, result = conn.poll( formatted_exception_msg=True, no_result=True) if not status: messages = conn.messages() if messages and len(messages) > 0: additional_messages = ''.join(messages) result = '{0}\n{1}\n\n{2}'.format( additional_messages, gettext('******* Error *******'), result ) return internal_server_error(result) elif status == ASYNC_OK: status = 'Success' rows_affected = conn.rows_affected() # if transaction object is instance of QueryToolCommand # and transaction aborted for some reason then issue a # rollback to cleanup if isinstance(trans_obj, QueryToolCommand): trans_status = conn.transaction_status() if trans_status == TX_STATUS_INERROR and \ trans_obj.auto_rollback: conn.execute_void("ROLLBACK;") st, result = conn.async_fetchmany_2darray(ON_DEMAND_RECORD_COUNT) # There may be additional messages even if result is present # eg: Function can provide result as well as RAISE messages messages = conn.messages() if messages: additional_messages = ''.join(messages) notifies = conn.get_notifies() if st: if 'primary_keys' in session_obj: primary_keys = session_obj['primary_keys'] # Fetch column information columns_info = conn.get_column_info() client_primary_key = generate_client_primary_key_name( columns_info ) session_obj['client_primary_key'] = client_primary_key # If trans_obj is a QueryToolCommand then check for updatable # resultsets and primary keys if isinstance(trans_obj, QueryToolCommand) and \ trans_obj.check_updatable_results_pkeys_oids(): pk_names, primary_keys = trans_obj.get_primary_keys() session_obj['has_oids'] = trans_obj.has_oids() # Update command_obj in session obj session_obj['command_obj'] = pickle.dumps( trans_obj, -1) # If primary_keys exist, add them to the session_obj to # allow for saving any changes to the data if primary_keys is not None: session_obj['primary_keys'] = primary_keys if 'has_oids' in session_obj: has_oids = session_obj['has_oids'] if has_oids: oids = {'oid': 'oid'} if columns_info is not None: # Only QueryToolCommand or TableCommand can be editable if hasattr(trans_obj, 'obj_id') and trans_obj.can_edit(): columns = trans_obj.get_columns_types(conn) else: for col in columns_info: col_type = dict() col_type['type_code'] = col['type_code'] col_type['type_name'] = None col_type['internal_size'] = col['internal_size'] columns[col['name']] = col_type if columns: st, types = fetch_pg_types(columns, trans_obj) if not st: return internal_server_error(types) for col_name, col_info in columns.items(): for col_type in types: if col_type['oid'] == col_info['type_code']: typname = col_type['typname'] col_info['type_name'] = typname # Using characters %, (, ) in the argument names is not # supported in psycopg2 col_info['pgadmin_alias'] = \ re.sub("[%()]+", "|", col_name) session_obj['columns_info'] = columns # status of async_fetchmany_2darray is True and result is none # means nothing to fetch if result and rows_affected > -1: res_len = len(result) if res_len == ON_DEMAND_RECORD_COUNT: has_more_rows = True if res_len > 0: rows_fetched_from = trans_obj.get_fetched_row_cnt() trans_obj.update_fetched_row_cnt( rows_fetched_from + res_len) rows_fetched_from += 1 rows_fetched_to = trans_obj.get_fetched_row_cnt() session_obj['command_obj'] = pickle.dumps( trans_obj, -1) # As we changed the transaction object we need to # restore it and update the session variable. update_session_grid_transaction(trans_id, session_obj) # Procedure/Function output may comes in the form of Notices # from the database server, so we need to append those outputs # with the original result. if result is None: result = conn.status_message() if result is not None and additional_messages is not None: result = additional_messages + result else: result = result if result is not None \ else additional_messages elif status == ASYNC_EXECUTION_ABORTED: status = 'Cancel' else: status = 'Busy' messages = conn.messages() if messages and len(messages) > 0: result = ''.join(messages) else: status = 'NotConnected' result = error_msg transaction_status = conn.transaction_status() return make_json_response( data={ 'status': status, 'result': result, 'rows_affected': rows_affected, 'rows_fetched_from': rows_fetched_from, 'rows_fetched_to': rows_fetched_to, 'additional_messages': additional_messages, 'notifies': notifies, 'has_more_rows': has_more_rows, 'colinfo': columns_info, 'primary_keys': primary_keys, 'types': types, 'client_primary_key': client_primary_key, 'has_oids': has_oids, 'oids': oids, 'transaction_status': transaction_status, }, encoding=conn.python_encoding )
def wrapped(self, **kwargs): self.manager = get_driver( PG_DEFAULT_DRIVER).connection_manager(kwargs['sid']) self.conn = self.manager.connection() driver = get_driver(PG_DEFAULT_DRIVER) self.qtIdent = driver.qtIdent if not self.conn.connected(): return precondition_required( _("Connection to the server has been lost.")) self.datlastsysoid = \ self.manager.db_info[self.manager.did]['datlastsysoid'] \ if self.manager.db_info is not None and \ self.manager.did in self.manager.db_info else 0 self.sql_path = 'roles/sql/#{0}#'.format(self.manager.version) self.alterKeys = [ u'rolcanlogin', u'rolsuper', u'rolcreatedb', u'rolcreaterole', u'rolinherit', u'rolreplication', u'rolconnlimit', u'rolvaliduntil', u'rolpassword' ] if self.manager.version >= 90200 else [ u'rolcanlogin', u'rolsuper', u'rolcreatedb', u'rolcreaterole', u'rolinherit', u'rolconnlimit', u'rolvaliduntil', u'rolpassword' ] check_permission = False fetch_name = False forbidden_msg = None if action in ['drop', 'update']: if 'rid' in kwargs: fetch_name = True check_permission = True if action == 'drop': forbidden_msg = _( "The current user does not have permission to drop" " the role.") else: forbidden_msg = _( "The current user does not have permission to " "update the role.") elif action == 'create': check_permission = True forbidden_msg = _( "The current user does not have permission to create " "the role.") elif action == 'msql' and 'rid' in kwargs: fetch_name = True if check_permission: user = self.manager.user_info if not user['is_superuser'] and \ not user['can_create_role']: if action != 'update' or 'rid' in kwargs: if kwargs['rid'] != -1: if user['id'] != kwargs['rid']: return forbidden(forbidden_msg) if fetch_name: status, res = self.conn.execute_dict( render_template(self.sql_path + 'permission.sql', rid=kwargs['rid'], conn=self.conn)) if not status: return internal_server_error( _("Error retrieving the role information.\n{0}"). format(res)) if len(res['rows']) == 0: return gone( _("Could not find the role on the database " "server.")) row = res['rows'][0] self.role = row['rolname'] self.rolCanLogin = row['rolcanlogin'] self.rolCatUpdate = row['rolcatupdate'] self.rolSuper = row['rolsuper'] return f(self, **kwargs)
def sql(self, gid, sid, did, fid, fsid, json_resp=True): """ This function will generate sql to show it in sql pane for the selected foreign server node. Args: gid: Server Group ID sid: Server ID did: Database ID fid: Foreign data wrapper ID fsid: Foreign server ID json_resp: """ sql = render_template("/".join( [self.template_path, self._PROPERTIES_SQL]), fsid=fsid, conn=self.conn) status, res = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) if len(res['rows']) == 0: return gone( gettext("Could not find the foreign server information.")) if fid is None and 'fdwid' in res['rows'][0]: fid = res['rows'][0]['fdwid'] is_valid_options = False if res['rows'][0]['fsrvoptions'] is not None: res['rows'][0]['fsrvoptions'] = tokenize_options( res['rows'][0]['fsrvoptions'], 'fsrvoption', 'fsrvvalue') if len(res['rows'][0]['fsrvoptions']) > 0: is_valid_options = True sql = render_template("/".join([self.template_path, self._ACL_SQL]), fsid=fsid) status, fs_rv_acl_res = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=fs_rv_acl_res) for row in fs_rv_acl_res['rows']: privilege = parse_priv_from_db(row) if row['deftype'] in res['rows'][0]: res['rows'][0][row['deftype']].append(privilege) else: res['rows'][0][row['deftype']] = [privilege] # To format privileges if 'fsrvacl' in res['rows'][0]: res['rows'][0]['fsrvacl'] = parse_priv_to_db( res['rows'][0]['fsrvacl'], ['U']) sql = render_template("/".join( [self.template_path, self._PROPERTIES_SQL]), fdwid=fid, conn=self.conn) status, res1 = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res1) fdw_data = res1['rows'][0] sql = '' sql = render_template("/".join([self.template_path, self._CREATE_SQL]), data=res['rows'][0], fdwdata=fdw_data, is_valid_options=is_valid_options, conn=self.conn) sql += "\n" sql_header = u"""-- Foreign Server: {0} -- DROP SERVER {0} """.format(res['rows'][0]['name']) sql = sql_header + sql if not json_resp: return sql.strip('\n') return ajax_response(response=sql.strip('\n'))
def create(self, gid, sid, did, fid): """ This function will create the foreign server node. Args: gid: Server Group ID sid: Server ID did: Database ID fid: foreign data wrapper ID """ required_args = ['name'] data = request.form if request.form else json.loads(request.data, encoding='utf-8') for arg in required_args: if arg not in data: return make_json_response( status=410, success=0, errormsg=gettext( "Could not find the required parameter ({}).").format( arg)) try: if 'fsrvacl' in data: data['fsrvacl'] = parse_priv_to_db(data['fsrvacl'], ['U']) sql = render_template("/".join( [self.template_path, self._PROPERTIES_SQL]), fdwid=fid, conn=self.conn) status, res1 = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res1) if len(res1['rows']) == 0: return gone( gettext( "The specified foreign server could not be found.")) fdw_data = res1['rows'][0] is_valid_options = False if 'fsrvoptions' in data: is_valid_options, data['fsrvoptions'] = validate_options( data['fsrvoptions'], 'fsrvoption', 'fsrvvalue') sql = render_template("/".join( [self.template_path, self._CREATE_SQL]), data=data, fdwdata=fdw_data, is_valid_options=is_valid_options, conn=self.conn) status, res = self.conn.execute_scalar(sql) if not status: return internal_server_error(errormsg=res) sql = render_template("/".join( [self.template_path, self._PROPERTIES_SQL]), data=data, fdwdata=fdw_data, conn=self.conn) status, r_set = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=r_set) return jsonify(node=self.blueprint.generate_browser_node( r_set['rows'][0]['oid'], fid, r_set['rows'][0]['name'], icon="icon-foreign_server")) except Exception as e: return internal_server_error(errormsg=str(e))
def get_sql(self, gid, sid, data, did, fid, fsid=None): """ This function will generate sql from model data. Args: gid: Server Group ID sid: Server ID did: Database ID data: Contains the data of the selected foreign server node fid: foreign data wrapper ID fsid: foreign server ID """ required_args = ['name'] if fsid is not None: sql = render_template("/".join( [self.template_path, self._PROPERTIES_SQL]), fsid=fsid, conn=self.conn) status, res = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) if len(res['rows']) == 0: return gone( gettext("Could not find the foreign server information.")) if res['rows'][0]['fsrvoptions'] is not None: res['rows'][0]['fsrvoptions'] = tokenize_options( res['rows'][0]['fsrvoptions'], 'fsrvoption', 'fsrvvalue') for key in ['fsrvacl']: if key in data and data[key] is not None: if 'added' in data[key]: data[key]['added'] = parse_priv_to_db( data[key]['added'], ['U']) if 'changed' in data[key]: data[key]['changed'] = parse_priv_to_db( data[key]['changed'], ['U']) if 'deleted' in data[key]: data[key]['deleted'] = parse_priv_to_db( data[key]['deleted'], ['U']) old_data = res['rows'][0] for arg in required_args: if arg not in data: data[arg] = old_data[arg] is_valid_added_options = is_valid_changed_options = False if 'fsrvoptions' in data and 'added' in data['fsrvoptions']: is_valid_added_options, data['fsrvoptions']['added'] =\ validate_options( data['fsrvoptions']['added'], 'fsrvoption', 'fsrvvalue') if 'fsrvoptions' in data and 'changed' in data['fsrvoptions']: is_valid_changed_options, data['fsrvoptions']['changed'] =\ validate_options( data['fsrvoptions']['changed'], 'fsrvoption', 'fsrvvalue') sql = render_template( "/".join([self.template_path, self._UPDATE_SQL]), data=data, o_data=old_data, is_valid_added_options=is_valid_added_options, is_valid_changed_options=is_valid_changed_options, conn=self.conn) return sql.strip('\n'), \ data['name'] if 'name' in data else old_data['name'] else: sql = render_template("/".join( [self.template_path, self._PROPERTIES_SQL]), fdwid=fid, conn=self.conn) status, res = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) fdw_data = res['rows'][0] for key in ['fsrvacl']: if key in data and data[key] is not None: data[key] = parse_priv_to_db(data[key], ['U']) is_valid_options = False if 'fsrvoptions' in data: is_valid_options, data['fsrvoptions'] = validate_options( data['fsrvoptions'], 'fsrvoption', 'fsrvvalue') sql = render_template("/".join( [self.template_path, self._CREATE_SQL]), data=data, fdwdata=fdw_data, is_valid_options=is_valid_options, conn=self.conn) sql += "\n" return sql, data['name']
def start_query_download_tool(trans_id): sync_conn = None (status, error_msg, sync_conn, trans_obj, session_obj) = check_transaction_status(trans_id) if status and sync_conn is not None and \ trans_obj is not None and session_obj is not None: data = request.values if request.values else None try: if data and 'query' in data: sql = data['query'] # This returns generator of records. status, gen = sync_conn.execute_on_server_as_csv( sql, records=2000 ) if not status: return make_json_response( data={ 'status': status, 'result': gen } ) r = Response( gen( quote=blueprint.csv_quoting.get(), quote_char=blueprint.csv_quote_char.get(), field_separator=blueprint.csv_field_separator.get(), replace_nulls_with=blueprint.replace_nulls_with.get() ), mimetype='text/csv' if blueprint.csv_field_separator.get() == ',' else 'text/plain' ) if 'filename' in data and data['filename'] != "": filename = data['filename'] else: import time filename = '{0}.{1}'. \ format(int(time.time()), 'csv' if blueprint. csv_field_separator.get() == ',' else 'txt') # We will try to encode report file name with latin-1 # If it fails then we will fallback to default ascii file name # werkzeug only supports latin-1 encoding supported values try: tmp_file_name = filename tmp_file_name.encode('latin-1', 'strict') except UnicodeEncodeError: filename = "download.csv" r.headers[ "Content-Disposition" ] = "attachment;filename={0}".format(filename) return r except (ConnectionLost, SSHTunnelConnectionLost): raise except Exception as e: current_app.logger.error(e) err_msg = "Error: {0}".format( e.strerror if hasattr(e, 'strerror') else str(e)) return internal_server_error(errormsg=err_msg) else: return internal_server_error( errormsg=gettext("Transaction status check failed.") )
def delete(self, gid, sid, did, scid, tid, **kwargs): """ This function will drop the policy object :param plid: policy id :param did: database id :param sid: server id :param gid: group id :param tid: table id :param scid: Schema ID :param kwargs :return: """ plid = kwargs.get('plid', None) only_sql = kwargs.get('only_sql', False) # Below will deplide if it's simple drop or drop with cascade call if self.cmd == 'delete': # This is a cascade operation cascade = True else: cascade = False if plid is None: data = request.form if request.form else json.loads( request.data, encoding='utf-8' ) else: data = {'ids': [plid]} for plid in data['ids']: try: # Get name for policy from plid sql = render_template("/".join([self.template_path, 'get_policy_name.sql']), plid=plid) status, res = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) if not res['rows']: return make_json_response( status=410, success=0, errormsg=gettext( 'Error: Object not found.' ), info=gettext( 'The specified policy object could not be found.\n' ) ) # drop policy result = res['rows'][0] result['schema'] = self.schema result['table'] = self.table sql = render_template("/".join([self.template_path, self._DELETE_SQL]), policy_name=result['name'], cascade=cascade, result=result ) if only_sql: return sql status, res = self.conn.execute_scalar(sql) if not status: return internal_server_error(errormsg=res) except Exception as e: return internal_server_error(errormsg=str(e)) return make_json_response( success=1, info=gettext("policy dropped") )
def properties(self, gid, sid, did, scid, tid, exid=None): """ This function is used to list all the Exclusion constraint nodes within that collection. Args: gid: Server Group ID sid: Server ID did: Database ID scid: Schema ID tid: Table ID exid: Exclusion constraint ID Returns: """ sql = render_template("/".join([self.template_path, 'properties.sql']), did=did, tid=tid, cid=exid) status, res = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) if len(res['rows']) == 0: return gone( _("""Could not find the exclusion constraint in the table.""")) result = res['rows'][0] sql = render_template("/".join( [self.template_path, 'get_constraint_cols.sql']), cid=exid, colcnt=result['col_count']) status, res = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) columns = [] for row in res['rows']: if row['options'] & 1: order = False nulls_order = True if (row['options'] & 2) else False else: order = True nulls_order = True if (row['options'] & 2) else False columns.append({ "column": row['coldef'].strip('"'), "oper_class": row['opcname'], "order": order, "nulls_order": nulls_order, "operator": row['oprname'], "col_type": row['datatype'] }) result['columns'] = columns # Add Include details of the index supported for PG-11+ if self.manager.version >= 110000: sql = render_template("/".join( [self.template_path, 'get_constraint_include.sql']), cid=exid) status, res = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) result['include'] = [col['colname'] for col in res['rows']] return ajax_response(response=result, status=200)
def statistics(self, gid, sid, did, scid, tid, idx=None): """ Statistics Args: gid: Server Group Id sid: Server Id did: Database Id scid: Schema Id tid: Table Id idx: Index Id Returns the statistics for a particular object if idx is specified else return all indexes """ if idx is not None: # Individual index # Check if pgstattuple extension is already created? # if created then only add extended stats status, is_pgstattuple = self.conn.execute_scalar(""" SELECT (pg_catalog.count(extname) > 0) AS is_pgstattuple FROM pg_catalog.pg_extension WHERE extname='pgstattuple' """) if not status: return internal_server_error(errormsg=is_pgstattuple) if is_pgstattuple: # Fetch index details only if extended stats available SQL = render_template( "/".join([self.template_path, self._PROPERTIES_SQL]), did=did, tid=tid, idx=idx, datlastsysoid=self._DATABASE_LAST_SYSTEM_OID) status, res = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) if len(res['rows']) == 0: return gone(self.not_found_error_msg()) data = dict(res['rows'][0]) index = data['name'] else: index = None status, res = self.conn.execute_dict( render_template("/".join([self.template_path, 'stats.sql']), conn=self.conn, schema=self.schema, index=index, idx=idx, is_pgstattuple=is_pgstattuple)) else: status, res = self.conn.execute_dict( render_template("/".join( [self.template_path, 'coll_stats.sql']), conn=self.conn, schema=self.schema, table=self.table)) if not status: return internal_server_error(errormsg=res) return make_json_response(data=res, status=200)
def execute(self, sql, trans_id, http_session, connect=False): session_obj = StartRunningQuery.retrieve_session_information( http_session, trans_id) if type(session_obj) is Response: return session_obj # Remove any existing primary keys or has_oids in session_obj session_obj.pop('primary_keys', None) session_obj.pop('oids', None) transaction_object = pickle.loads(session_obj['command_obj']) can_edit = False can_filter = False notifies = None trans_status = None if transaction_object is not None and session_obj is not None: # set fetched row count to 0 as we are executing query again. transaction_object.update_fetched_row_cnt(0) self.__retrieve_connection_id(transaction_object) try: manager = get_driver(PG_DEFAULT_DRIVER).connection_manager( transaction_object.sid) conn = manager.connection(did=transaction_object.did, conn_id=self.connection_id, auto_reconnect=False, use_binary_placeholder=True, array_to_string=True) except (ConnectionLost, SSHTunnelConnectionLost, CryptKeyMissing): raise except Exception as e: self.logger.error(e) return internal_server_error(errormsg=str(e)) # Connect to the Server if not connected. if connect and not conn.connected(): status, msg = conn.connect() if not status: self.logger.error(msg) return internal_server_error(errormsg=str(msg)) effective_sql_statement = apply_explain_plan_wrapper_if_needed( manager, sql) result, status = self.__execute_query(conn, session_obj, effective_sql_statement, trans_id, transaction_object) can_edit = transaction_object.can_edit() can_filter = transaction_object.can_filter() # Get the notifies notifies = conn.get_notifies() trans_status = conn.transaction_status() else: status = False result = gettext( 'Either transaction object or session object not found.') return make_json_response( data={ 'status': status, 'result': result, 'can_edit': can_edit, 'can_filter': can_filter, 'info_notifier_timeout': self.blueprint_object.info_notifier_timeout.get(), 'notifies': notifies, 'transaction_status': trans_status, })