Ejemplo n.º 1
0
    def get(self, request, schema, table, id, column=None):
        schema, table = actions.get_table_name(schema, table, restrict_schemas=False)
        if not parser.is_pg_qual(table) or not  parser.is_pg_qual(schema) or not parser.is_pg_qual(id) or not parser.is_pg_qual(column):
            return ModHttpResponse({"error": "Bad Request", "http_status": 400})

        returnValue = actions.getValue(schema, table, column, id);

        return HttpResponse(returnValue if returnValue is not None else "", status= (404 if returnValue is None else 200))
Ejemplo n.º 2
0
def table_drop(request, context=None):
    db = request["db"]
    engine = _get_engine()
    connection = engine.connect()

    # load schema name and check for sanity    
    schema = request.pop("schema", "public")
    if not is_pg_qual(schema):
        return {'success':False, 'reason':'Invalid schema name: %s'%schema}
        # Check whether schema exists

    # load table name and check for sanity
    table = request.pop("table", None)

    if not is_pg_qual(table):
        return {'success': False, 'reason': 'Invalid table name: %s' % table}

    try:
        exists = bool(request.pop("exists", False))
    except:
        return {'success': False,
                'reason': 'Invalid exists clause: %s' % exists}

    option = request.pop("option", None)
    if option and option.lower() not in ["cascade", "restrict"]:
        return {'success': False, 'reason': 'Invalid option clause name: %s' % option}

    sql_string = "drop table {exists} {schema}.{table} {option} ".format(
        schema=schema,
        table=table,
        option=option if option else "",
        exists="IF EXISTS" if exists else "")

    session = sessionmaker(bind=engine)()
    try:
        session.execute(sql_string.replace('%', '%%'))
    except Exception as e:
        traceback.print_exc()
        session.rollback()
        raise e
    else:
        session.commit()

    return {}
Ejemplo n.º 3
0
    def get(self, request, schema, table, row_id=None):
        schema, table = actions.get_table_name(schema, table, restrict_schemas=False)
        columns = request.GET.getlist("column")

        where = request.GET.getlist("where")
        if row_id and where:
            raise actions.APIError(
                "Where clauses and row id are not allowed in the same query"
            )

        orderby = request.GET.getlist("orderby")
        if row_id and orderby:
            raise actions.APIError(
                "Order by clauses and row id are not allowed in the same query"
            )

        limit = request.GET.get("limit")
        if row_id and limit:
            raise actions.APIError(
                "Limit by clauses and row id are not allowed in the same query"
            )

        offset = request.GET.get("offset")
        if row_id and offset:
            raise actions.APIError(
                "Order by clauses and row id are not allowed in the same query"
            )

        format = request.GET.get("form")

        if offset is not None and not offset.isdigit():
            raise actions.APIError("Offset must be integer")
        if limit is not None and not limit.isdigit():
            raise actions.APIError("Limit must be integer")
        if not all(parser.is_pg_qual(c) for c in columns):
            raise actions.APIError("Columns are no postgres qualifiers")
        if not all(parser.is_pg_qual(c) for c in orderby):
            raise actions.APIError(
                "Columns in groupby-clause are no postgres qualifiers"
            )

        # OPERATORS could be EQUALS, GREATER, LOWER, NOTEQUAL, NOTGREATER, NOTLOWER
        # CONNECTORS could be AND, OR
        # If you connect two values with an +, it will convert the + to a space. Whatever.

        where_clauses = self.__read_where_clause(where)

        if row_id:
            clause = {
                "operands": [{"type": "column", "column": "id"}, row_id],
                "operator": "EQUALS",
                "type": "operator",
            }
            if where_clauses:
                where_clauses = conjunction(clause, where_clauses)
            else:
                where_clauses = clause

        # TODO: Validate where_clauses. Should not be vulnerable
        data = {
            "schema": schema,
            "table": table,
            "columns": columns,
            "where": where_clauses,
            "orderby": orderby,
            "limit": limit,
            "offset": offset,
        }

        return_obj = self.__get_rows(request, data)
        session = sessions.load_session_from_context(return_obj.pop("context")) if "context" in return_obj else None
        # Extract column names from description
        if "description" in return_obj:
            cols = [col[0] for col in return_obj["description"]]
        else:
            cols = []
            return_obj["data"] = []
            return_obj["rowcount"] = 0
        if format == "csv":
            pseudo_buffer = Echo()
            writer = csv.writer(pseudo_buffer, quoting=csv.QUOTE_ALL)

            response = OEPStream(
                (
                    writer.writerow(x)
                    for x in itertools.chain([cols], return_obj["data"])
                ),
                content_type="text/csv",
                session=session,
            )
            response[
                "Content-Disposition"
            ] = 'attachment; filename="{schema}__{table}.csv"'.format(
                schema=schema, table=table
            )
            return response

        else:
            if row_id:
                dict_list = [dict(zip(cols, row)) for row in return_obj["data"]]
                if dict_list:
                    dict_list = dict_list[0]
                else:
                    raise Http404
                # TODO: Figure out what JsonResponse does different.
                return JsonResponse(dict_list, safe=False)

            return stream((dict(zip(cols, row)) for row in return_obj["data"]), session=session)
Ejemplo n.º 4
0
    def get(self, request, schema, table, row_id=None):
        schema, table = actions.get_table_name(schema, table, restrict_schemas=False)
        columns = request.GET.getlist('column')

        where = request.GET.get('where')
        if row_id and where:
            raise actions.APIError('Where clauses and row id are not allowed in the same query')

        orderby = request.GET.getlist('orderby')
        if row_id and orderby:
            raise actions.APIError('Order by clauses and row id are not allowed in the same query')

        limit = request.GET.get('limit')
        if row_id and limit:
            raise actions.APIError('Limit by clauses and row id are not allowed in the same query')

        offset = request.GET.get('offset')
        if row_id and offset:
            raise actions.APIError('Order by clauses and row id are not allowed in the same query')

        if offset is not None and not offset.isdigit():
            raise actions.APIError("Offset must be integer")
        if limit is not None and not limit.isdigit():
            raise actions.APIError("Limit must be integer")
        if not all(parser.is_pg_qual(c) for c in columns):
            raise actions.APIError("Columns are no postgres qualifiers")
        if not all(parser.is_pg_qual(c) for c in orderby):
            raise actions.APIError("Columns in groupby-clause are no postgres qualifiers")

        # OPERATORS could be EQUALS, GREATER, LOWER, NOTEQUAL, NOTGREATER, NOTLOWER
        # CONNECTORS could be AND, OR
        # If you connect two values with an +, it will convert the + to a space. Whatever.

        where_clauses = self.__read_where_clause(where)

        if row_id:
            where_clauses.append({'left': {'type': 'column',
                                           'column': 'id'},
             'operator': 'EQUALS',
             'right': row_id,
             'type': 'operator_binary'})

        # TODO: Validate where_clauses. Should not be vulnerable
        data = {'schema': schema,
                'table': table,
                'columns': columns,
                'where': where_clauses,
                'orderby': orderby,
                'limit': limit,
                'offset': offset
                }

        return_obj = self.__get_rows(request, data)

        # Extract column names from description
        cols = [col[0] for col in return_obj['description']]
        dict_list = [dict(zip(cols,row)) for row in return_obj['data']]

        if row_id:
            if dict_list:
                dict_list = dict_list[0]
            else:
                raise Http404

        # TODO: Figure out what JsonResponse does different.
        return JsonResponse(dict_list, safe=False)
Ejemplo n.º 5
0
    def get(self, request, schema, table, row_id=None):
        schema, table = actions.get_table_name(schema,
                                               table,
                                               restrict_schemas=False)
        columns = request.GET.getlist('column')

        where = request.GET.getlist('where')
        if row_id and where:
            raise actions.APIError(
                'Where clauses and row id are not allowed in the same query')

        orderby = request.GET.getlist('orderby')
        if row_id and orderby:
            raise actions.APIError(
                'Order by clauses and row id are not allowed in the same query'
            )

        limit = request.GET.get('limit')
        if row_id and limit:
            raise actions.APIError(
                'Limit by clauses and row id are not allowed in the same query'
            )

        offset = request.GET.get('offset')
        if row_id and offset:
            raise actions.APIError(
                'Order by clauses and row id are not allowed in the same query'
            )

        format = request.GET.get('form')

        if offset is not None and not offset.isdigit():
            raise actions.APIError("Offset must be integer")
        if limit is not None and not limit.isdigit():
            raise actions.APIError("Limit must be integer")
        if not all(parser.is_pg_qual(c) for c in columns):
            raise actions.APIError("Columns are no postgres qualifiers")
        if not all(parser.is_pg_qual(c) for c in orderby):
            raise actions.APIError(
                "Columns in groupby-clause are no postgres qualifiers")

        # OPERATORS could be EQUALS, GREATER, LOWER, NOTEQUAL, NOTGREATER, NOTLOWER
        # CONNECTORS could be AND, OR
        # If you connect two values with an +, it will convert the + to a space. Whatever.

        where_clauses = self.__read_where_clause(where)

        if row_id:
            clause = {
                'operands': [{
                    'type': 'column',
                    'column': 'id'
                }, row_id],
                'operator': 'EQUALS',
                'type': 'operator'
            }
            if where_clauses:
                where_clauses = conjunction(clause, where_clauses)
            else:
                where_clauses = clause

        # TODO: Validate where_clauses. Should not be vulnerable
        data = {
            'schema': schema,
            'table': table,
            'columns': columns,
            'where': where_clauses,
            'orderby': orderby,
            'limit': limit,
            'offset': offset
        }

        return_obj = self.__get_rows(request, data)
        # Extract column names from description
        cols = [col[0] for col in return_obj['description']]

        if format == 'csv':
            pseudo_buffer = Echo()
            writer = csv.writer(pseudo_buffer, quoting=csv.QUOTE_ALL)
            response = StreamingHttpResponse(
                (writer.writerow(x)
                 for x in itertools.chain([cols], return_obj['data'])),
                content_type="text/csv")
            response[
                'Content-Disposition'] = 'attachment; filename="{schema}__{table}.csv"'.format(
                    schema=schema, table=table)
            return response

        else:
            if row_id:
                dict_list = [
                    dict(zip(cols, row)) for row in return_obj['data']
                ]
                if dict_list:
                    dict_list = dict_list[0]
                else:
                    raise Http404
                # TODO: Figure out what JsonResponse does different.
                return JsonResponse(dict_list, safe=False)

            return stream((dict(zip(cols, row)) for row in return_obj['data']))