def get(self, request, schema, table): """ Returns a dictionary that describes the DDL-make-up of this table. Fields are: * name : Name of the table, * schema: Name of the schema, * columns : as specified in :meth:`api.actions.describe_columns` * indexes : as specified in :meth:`api.actions.describe_indexes` * constraints: as specified in :meth:`api.actions.describe_constraints` :param request: :return: """ schema, table = actions.get_table_name(schema, table, restrict_schemas=False) return JsonResponse( { "schema": schema, "name": table, "columns": actions.describe_columns(schema, table), "indexed": actions.describe_indexes(schema, table), "constraints": actions.describe_constraints(schema, table), } )
def delete(self, request, schema, table): schema, table = actions.get_table_name(schema, table) meta_schema = actions.get_meta_schema_name(schema) edit_table = actions.get_edit_table_name(schema, table) actions._get_engine().execute( "DROP TABLE {schema}.{table} CASCADE;".format( schema=meta_schema, table=edit_table ) ) edit_table = actions.get_insert_table_name(schema, table) actions._get_engine().execute( "DROP TABLE {schema}.{table} CASCADE;".format( schema=meta_schema, table=edit_table ) ) edit_table = actions.get_delete_table_name(schema, table) actions._get_engine().execute( "DROP TABLE {schema}.{table} CASCADE;".format( schema=meta_schema, table=edit_table ) ) actions._get_engine().execute( "DROP TABLE {schema}.{table} CASCADE;".format(schema=schema, table=table) ) return JsonResponse({}, status=status.HTTP_200_OK)
def delete(self, request, schema, table): schema, table = actions.get_table_name(schema, table) meta_schema = actions.get_meta_schema_name(schema) edit_table = actions.get_edit_table_name(schema, table) actions._get_engine().execute( "DROP TABLE \"{schema}\".\"{table}\" CASCADE;".format( schema=meta_schema, table=edit_table)) edit_table = actions.get_insert_table_name(schema, table) actions._get_engine().execute( "DROP TABLE \"{schema}\".\"{table}\" CASCADE;".format( schema=meta_schema, table=edit_table)) edit_table = actions.get_delete_table_name(schema, table) actions._get_engine().execute( "DROP TABLE \"{schema}\".\"{table}\" CASCADE;".format( schema=meta_schema, table=edit_table)) actions._get_engine().execute( "DROP TABLE \"{schema}\".\"{table}\" CASCADE;".format( schema=schema, table=table)) table_object, _ = DBTable.objects.get_or_create(name=table, schema__name=schema) table_object.delete() return JsonResponse({}, status=status.HTTP_200_OK)
def put(self, request, schema, table, row_id=None): schema, table = actions.get_table_name(schema, table) if not row_id: return JsonResponse(actions._response_error('This methods requires an id'), status=status.HTTP_400_BAD_REQUEST) column_data = request.data['query'] if row_id and column_data.get('id', int(row_id)) != int(row_id): raise actions.APIError( 'Id in URL and query do not match. Ids may not change.', status=status.HTTP_409_CONFLICT) engine = actions._get_engine() conn = engine.connect() # check whether id is already in use exists = conn.execute('select count(*) ' 'from {schema}.{table} ' 'where id = {id};'.format(schema=schema, table=table, id=row_id)).first()[0] > 0 if row_id else False conn.close() if exists: response = self.__update_rows(request, schema, table, column_data, row_id) actions.apply_changes(schema, table) return JsonResponse(response) else: result = self.__insert_row(request, schema, table, column_data, row_id) actions.apply_changes(schema, table) return JsonResponse(result, status=status.HTTP_201_CREATED)
def get(self, request, schema, table, id, column=None): schema, table = actions.get_table_name(schema, table, restrict_schemas=False) if not parser.is_pg_qual(table) or not parser.is_pg_qual(schema) or not parser.is_pg_qual(id) or not parser.is_pg_qual(column): return ModHttpResponse({"error": "Bad Request", "http_status": 400}) returnValue = actions.getValue(schema, table, column, id); return HttpResponse(returnValue if returnValue is not None else "", status= (404 if returnValue is None else 200))
def get(self, request, schema, table, column=None): schema, table = actions.get_table_name(schema, table, restrict_schemas=False) response = actions.describe_columns(schema, table) if column: try: response = response[column] except KeyError: raise actions.APIError('The column specified is not part of ' 'this table.') return JsonResponse(response)
def post(self, request, schema, table, row_id=None, action=None): schema, table = actions.get_table_name(schema, table) column_data = request.data['query'] status_code = status.HTTP_200_OK if row_id: response = self.__update_rows(request, schema, table, column_data, row_id) else: if action=='new': response = self.__insert_row(request, schema, table, column_data, row_id) status_code=status.HTTP_201_CREATED else: response = self.__update_rows(request, schema, table, column_data, None) actions.apply_changes(schema, table) return JsonResponse(response, status=status_code)
def put(self, request, schema, table, row_id=None, action=None): if action: raise APIError( "This request type (PUT) is not supported. The " "'new' statement is only possible in POST requests." ) schema, table = actions.get_table_name(schema, table) if not row_id: return JsonResponse( actions._response_error("This methods requires an id"), status=status.HTTP_400_BAD_REQUEST, ) column_data = request.data["query"] if row_id and column_data.get("id", int(row_id)) != int(row_id): raise actions.APIError( "Id in URL and query do not match. Ids may not change.", status=status.HTTP_409_CONFLICT, ) engine = actions._get_engine() # check whether id is already in use exists = ( engine.execute( "select count(*) " "from {schema}.{table} " "where id = {id};".format(schema=schema, table=table, id=row_id) ).first()[0] > 0 if row_id else False ) if exists: response = self.__update_rows(request, schema, table, column_data, row_id) actions.apply_changes(schema, table) return JsonResponse(response) else: result = self.__insert_row(request, schema, table, column_data, row_id) actions.apply_changes(schema, table) return JsonResponse(result, status=status.HTTP_201_CREATED)
def delete(self, request, table, schema, row_id=None): schema, table = actions.get_table_name(schema, table) result = self.__delete_rows(request, schema, table, row_id) actions.apply_changes(schema, table) return JsonResponse(result)
def get(self, request, schema, table, row_id=None): schema, table = actions.get_table_name(schema, table, restrict_schemas=False) columns = request.GET.getlist("column") where = request.GET.getlist("where") if row_id and where: raise actions.APIError( "Where clauses and row id are not allowed in the same query" ) orderby = request.GET.getlist("orderby") if row_id and orderby: raise actions.APIError( "Order by clauses and row id are not allowed in the same query" ) limit = request.GET.get("limit") if row_id and limit: raise actions.APIError( "Limit by clauses and row id are not allowed in the same query" ) offset = request.GET.get("offset") if row_id and offset: raise actions.APIError( "Order by clauses and row id are not allowed in the same query" ) format = request.GET.get("form") if offset is not None and not offset.isdigit(): raise actions.APIError("Offset must be integer") if limit is not None and not limit.isdigit(): raise actions.APIError("Limit must be integer") if not all(parser.is_pg_qual(c) for c in columns): raise actions.APIError("Columns are no postgres qualifiers") if not all(parser.is_pg_qual(c) for c in orderby): raise actions.APIError( "Columns in groupby-clause are no postgres qualifiers" ) # OPERATORS could be EQUALS, GREATER, LOWER, NOTEQUAL, NOTGREATER, NOTLOWER # CONNECTORS could be AND, OR # If you connect two values with an +, it will convert the + to a space. Whatever. where_clauses = self.__read_where_clause(where) if row_id: clause = { "operands": [{"type": "column", "column": "id"}, row_id], "operator": "EQUALS", "type": "operator", } if where_clauses: where_clauses = conjunction(clause, where_clauses) else: where_clauses = clause # TODO: Validate where_clauses. Should not be vulnerable data = { "schema": schema, "table": table, "columns": columns, "where": where_clauses, "orderby": orderby, "limit": limit, "offset": offset, } return_obj = self.__get_rows(request, data) session = sessions.load_session_from_context(return_obj.pop("context")) if "context" in return_obj else None # Extract column names from description if "description" in return_obj: cols = [col[0] for col in return_obj["description"]] else: cols = [] return_obj["data"] = [] return_obj["rowcount"] = 0 if format == "csv": pseudo_buffer = Echo() writer = csv.writer(pseudo_buffer, quoting=csv.QUOTE_ALL) response = OEPStream( ( writer.writerow(x) for x in itertools.chain([cols], return_obj["data"]) ), content_type="text/csv", session=session, ) response[ "Content-Disposition" ] = 'attachment; filename="{schema}__{table}.csv"'.format( schema=schema, table=table ) return response else: if row_id: dict_list = [dict(zip(cols, row)) for row in return_obj["data"]] if dict_list: dict_list = dict_list[0] else: raise Http404 # TODO: Figure out what JsonResponse does different. return JsonResponse(dict_list, safe=False) return stream((dict(zip(cols, row)) for row in return_obj["data"]), session=session)
def put(self, request, schema, table, column): schema, table = actions.get_table_name(schema, table) actions.column_add(schema, table, column, request.data["query"]) return JsonResponse({}, status=201)
def post(self, request, schema, table, column): schema, table = actions.get_table_name(schema, table) response = actions.column_alter( request.data["query"], {}, schema, table, column ) return JsonResponse(response)
def get(self, request, schema, table, row_id=None): schema, table = actions.get_table_name(schema, table, restrict_schemas=False) columns = request.GET.getlist('column') where = request.GET.get('where') if row_id and where: raise actions.APIError('Where clauses and row id are not allowed in the same query') orderby = request.GET.getlist('orderby') if row_id and orderby: raise actions.APIError('Order by clauses and row id are not allowed in the same query') limit = request.GET.get('limit') if row_id and limit: raise actions.APIError('Limit by clauses and row id are not allowed in the same query') offset = request.GET.get('offset') if row_id and offset: raise actions.APIError('Order by clauses and row id are not allowed in the same query') if offset is not None and not offset.isdigit(): raise actions.APIError("Offset must be integer") if limit is not None and not limit.isdigit(): raise actions.APIError("Limit must be integer") if not all(parser.is_pg_qual(c) for c in columns): raise actions.APIError("Columns are no postgres qualifiers") if not all(parser.is_pg_qual(c) for c in orderby): raise actions.APIError("Columns in groupby-clause are no postgres qualifiers") # OPERATORS could be EQUALS, GREATER, LOWER, NOTEQUAL, NOTGREATER, NOTLOWER # CONNECTORS could be AND, OR # If you connect two values with an +, it will convert the + to a space. Whatever. where_clauses = self.__read_where_clause(where) if row_id: where_clauses.append({'left': {'type': 'column', 'column': 'id'}, 'operator': 'EQUALS', 'right': row_id, 'type': 'operator_binary'}) # TODO: Validate where_clauses. Should not be vulnerable data = {'schema': schema, 'table': table, 'columns': columns, 'where': where_clauses, 'orderby': orderby, 'limit': limit, 'offset': offset } return_obj = self.__get_rows(request, data) # Extract column names from description cols = [col[0] for col in return_obj['description']] dict_list = [dict(zip(cols,row)) for row in return_obj['data']] if row_id: if dict_list: dict_list = dict_list[0] else: raise Http404 # TODO: Figure out what JsonResponse does different. return JsonResponse(dict_list, safe=False)
def get(self, request, schema, table, row_id=None): schema, table = actions.get_table_name(schema, table, restrict_schemas=False) columns = request.GET.getlist('column') where = request.GET.getlist('where') if row_id and where: raise actions.APIError( 'Where clauses and row id are not allowed in the same query') orderby = request.GET.getlist('orderby') if row_id and orderby: raise actions.APIError( 'Order by clauses and row id are not allowed in the same query' ) limit = request.GET.get('limit') if row_id and limit: raise actions.APIError( 'Limit by clauses and row id are not allowed in the same query' ) offset = request.GET.get('offset') if row_id and offset: raise actions.APIError( 'Order by clauses and row id are not allowed in the same query' ) format = request.GET.get('form') if offset is not None and not offset.isdigit(): raise actions.APIError("Offset must be integer") if limit is not None and not limit.isdigit(): raise actions.APIError("Limit must be integer") if not all(parser.is_pg_qual(c) for c in columns): raise actions.APIError("Columns are no postgres qualifiers") if not all(parser.is_pg_qual(c) for c in orderby): raise actions.APIError( "Columns in groupby-clause are no postgres qualifiers") # OPERATORS could be EQUALS, GREATER, LOWER, NOTEQUAL, NOTGREATER, NOTLOWER # CONNECTORS could be AND, OR # If you connect two values with an +, it will convert the + to a space. Whatever. where_clauses = self.__read_where_clause(where) if row_id: clause = { 'operands': [{ 'type': 'column', 'column': 'id' }, row_id], 'operator': 'EQUALS', 'type': 'operator' } if where_clauses: where_clauses = conjunction(clause, where_clauses) else: where_clauses = clause # TODO: Validate where_clauses. Should not be vulnerable data = { 'schema': schema, 'table': table, 'columns': columns, 'where': where_clauses, 'orderby': orderby, 'limit': limit, 'offset': offset } return_obj = self.__get_rows(request, data) # Extract column names from description cols = [col[0] for col in return_obj['description']] if format == 'csv': pseudo_buffer = Echo() writer = csv.writer(pseudo_buffer, quoting=csv.QUOTE_ALL) response = StreamingHttpResponse( (writer.writerow(x) for x in itertools.chain([cols], return_obj['data'])), content_type="text/csv") response[ 'Content-Disposition'] = 'attachment; filename="{schema}__{table}.csv"'.format( schema=schema, table=table) return response else: if row_id: dict_list = [ dict(zip(cols, row)) for row in return_obj['data'] ] if dict_list: dict_list = dict_list[0] else: raise Http404 # TODO: Figure out what JsonResponse does different. return JsonResponse(dict_list, safe=False) return stream((dict(zip(cols, row)) for row in return_obj['data']))