Exemplo n.º 1
0
def load_table_from_metadata(table_name, schema_name=None):
    ext_name = table_name
    if schema_name:
        ext_name = schema_name + "." + ext_name
    if ext_name and ext_name in __PARSER_META.tables:
        return __PARSER_META.tables[ext_name]
    else:
        if _get_engine().dialect.has_table(
            _get_engine().connect(), table_name, schema=schema_name
        ):
            return Table(table_name, __PARSER_META, autoload=True, schema=schema_name)
Exemplo n.º 2
0
def parse_from_item(d):
    """
        Defintion of a from_item according to 
        http://www.postgresql.org/docs/9.3/static/sql-select.html
        
        return: A from_item string with checked psql qualifiers.
        
        Not implemented:
            with_query_name [ [ AS ] alias [ ( column_alias [, ...] ) ] ]
            [ LATERAL ] function_name ( [ argument [, ...] ] ) [ AS ] alias [ ( column_alias [, ...] | column_definition [, ...] ) ]
            [ LATERAL ] function_name ( [ argument [, ...] ] ) AS ( column_definition [, ...] )
    """
    # TODO: If 'type' is not set assume just a table name is present
    if isinstance(d, str):
        d = {'type': 'table', 'table': d}
    dtype = get_or_403(d, 'type')
    if dtype == 'table':
        schema_name = read_pgid(d['schema']) if 'schema' in d else None
        only = d.get('only', False)
        table_name = read_pgid(get_or_403(d, 'table'))
        item = Table(table_name,
                     MetaData(bind=_get_engine()),
                     schema=schema_name)
        engine = _get_engine()
        conn = engine.connect()
        exists = engine.dialect.has_table(conn, item.name, item.schema)
        conn.close()
        if not exists:
            raise APIError('Table not found: ' + str(item))
    elif dtype == 'select':
        item = parse_select(d['query'])
    elif dtype == 'join':
        left = parse_from_item(get_or_403(d, 'left'))
        right = parse_from_item(get_or_403(d, 'right'))
        is_outer = d.get('is_outer', False)
        full = d.get('is_full', False)
        on_clause = None
        if 'on' in d:
            on_clause = parse_condition(d['on'])
        item = left.join(right,
                         onclause=on_clause,
                         isouter=is_outer,
                         full=full)
    else:
        raise APIError('Unknown from-item: ' + dtype)

    if 'alias' in d:
        item = item.alias(read_pgid(d['alias']))
    return item
Exemplo n.º 3
0
def parse_from_item(d):
    """
        Defintion of a from_item according to 
        http://www.postgresql.org/docs/9.3/static/sql-select.html
        
        return: A from_item string with checked psql qualifiers.
        
        Not implemented:
            with_query_name [ [ AS ] alias [ ( column_alias [, ...] ) ] ]
            [ LATERAL ] function_name ( [ argument [, ...] ] ) [ AS ] alias [ ( column_alias [, ...] | column_definition [, ...] ) ]
            [ LATERAL ] function_name ( [ argument [, ...] ] ) AS ( column_definition [, ...] )
    """
    # TODO: If 'type' is not set assume just a table name is present
    if isinstance(d, str):
        d = {"type": "table", "table": d}
    if isinstance(d, list):
        return [parse_from_item(f) for f in d]
    dtype = get_or_403(d, "type")
    if dtype == "table":
        schema_name = read_pgid(d["schema"]) if "schema" in d else None
        only = d.get("only", False)
        ext_name = table_name = read_pgid(get_or_403(d, "table"))
        tkwargs = dict(autoload=True)
        if schema_name:
            ext_name = schema_name + "." + ext_name
            tkwargs["schema"] = d["schema"]
        if ext_name in __PARSER_META.tables:
            item = __PARSER_META.tables[ext_name]
        else:
            try:
                item = Table(d["table"], __PARSER_META, **tkwargs)
            except sa.exc.NoSuchTableError as e:
                raise APIError("Table {table} not found".format(table=ext_name))

        engine = _get_engine()
        conn = engine.connect()
        exists = engine.dialect.has_table(conn, item.name, item.schema)
        conn.close()
        if not exists:
            raise APIError("Table not found: " + str(item), status=400)
    elif dtype == "select":
        item = parse_select(d)
    elif dtype == "join":
        left = parse_from_item(get_or_403(d, "left"))
        right = parse_from_item(get_or_403(d, "right"))
        is_outer = d.get("is_outer", False)
        full = d.get("is_full", False)
        on_clause = None
        if "on" in d:
            on_clause = parse_condition(d["on"])
        item = left.join(right, onclause=on_clause, isouter=is_outer, full=full)
    else:
        raise APIError("Unknown from-item: " + dtype)

    if "alias" in d:
        item = item.alias(read_pgid(d["alias"]))
    return item
Exemplo n.º 4
0
def parse_insert(d, context, message=None, mapper=None):
    table = Table(
        read_pgid(get_or_403(d, "table")),
        MetaData(bind=_get_engine()),
        autoload=True,
        schema=read_pgid(get_or_403(d, "schema")),
    )
    field_strings = []
    for field in d.get("fields", []):
        if not ((isinstance(field, dict) and "type" in field
                 and field["type"] == "column") or isinstance(field, str)):
            raise APIError(
                "Only pure column expressions are allowed in insert")
        field_strings.append(parse_expression(field))

    query = table.insert()

    if not "method" in d:
        d["method"] = "values"
    if d["method"] == "values":
        if field_strings:
            raw_values = get_or_403(d, "values")
            if not isinstance(raw_values, list):
                raise APIError("{} is not a list".format(raw_values))
            values = (zip(
                field_strings,
                parse_expression(x,
                                 allow_untyped_dicts=True,
                                 escape_quotes=False),
            ) for x in raw_values)
        else:
            values = get_or_403(d, "values")

        def clear_meta(vals):
            val_dict = dict(vals)
            # make sure meta fields are not compromised
            if context["user"].is_anonymous:
                username = "******"
            else:
                username = context["user"].name
            val_dict.update(set_meta_info("insert", username, message))
            return val_dict

        values = list(map(clear_meta, values))

        query = query.values(values)
    elif d["method"] == "select":
        values = parse_select(d["values"])
        query = query.from_select(field_strings, values)
    else:
        raise APIError("Unknown insert method: " + str(d["method"]))

    if "returning" in d:
        return_clauses = [parse_expression(x, mapper) for x in d["returning"]]
        query = query.returning(*return_clauses)

    return query, values
Exemplo n.º 5
0
def parse_insert(d, context, message=None, mapper=None):
    table = Table(read_pgid(get_or_403(d, 'table')),
                  MetaData(bind=_get_engine()),
                  autoload=True,
                  schema=read_pgid(get_or_403(d, 'schema')))
    field_strings = []
    for field in d.get('fields', []):
        if not ((isinstance(field, dict) and 'type' in field
                 and field['type'] == 'column') or isinstance(field, str)):
            raise APIError(
                'Only pure column expressions are allowed in insert')
        field_strings.append(parse_expression(field))

    query = table.insert()

    if not 'method' in d:
        d['method'] = 'values'
    if d['method'] == 'values':
        if field_strings:
            raw_values = get_or_403(d, 'values')
            if not isinstance(raw_values, list):
                raise APIError('{} is not a list'.format(raw_values))
            values = (zip(field_strings,
                          parse_expression(x, allow_untyped_dicts=True))
                      for x in raw_values)
        else:
            values = get_or_403(d, 'values')

        def clear_meta(vals):
            val_dict = dict(vals)
            # make sure meta fields are not compromised
            if context['user'].is_anonymous:
                username = '******'
            else:
                username = context['user'].name
            val_dict.update(set_meta_info('insert', username, message))
            return val_dict

        values = list(map(clear_meta, values))

        query = query.values(values)
    elif d['method'] == 'select':
        values = parse_select(d['values'])
        query = query.from_select(field_strings, values)
    else:
        raise APIError('Unknown insert method: ' + str(d['method']))

    if 'returning' in d:
        return_clauses = [parse_expression(x, mapper) for x in d['returning']]
        query = query.returning(*return_clauses)

    return query, values
Exemplo n.º 6
0
 def handle(self, *args, **options):
     engine = _get_engine()
     inspector = sqla.inspect(engine)
     real_tables = {(schema, table_name) for schema in schema_whitelist
         for table_name in inspector.get_table_names(schema=schema) if schema in schema_whitelist}
     table_objects = {(t.schema.name, t.name) for t in Table.objects.all() if t.schema.name in schema_whitelist}
     for schema, table in table_objects.difference(real_tables):
         print(schema, table)
         Table.objects.get(name=table, schema__name=schema).delete()
     print("---")
     for schema, table in real_tables.difference(table_objects):
         print(schema, table)
         s, _ = Schema.objects.get_or_create(name=schema)
         t=Table(name=table, schema=s)
         t.save()
Exemplo n.º 7
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    connectable = _get_engine()

    with connectable.connect() as connection:
        context.configure(connection=connection,
                          target_metadata=target_metadata)

        with context.begin_transaction():
            context.run_migrations()
Exemplo n.º 8
0
def parse_insert(d, context, message=None):
    table = Table(read_pgid(get_or_403(d, 'table')),
                  MetaData(bind=_get_engine()),
                  autoload=True,
                  schema=read_pgid(get_or_403(d, 'schema')))

    meta_cols = ['_message', '_user']

    field_strings = []
    for field in d.get('fields', []):
        assert ('type' in field and field['type'] == 'column'
                ), 'Only pure column expressions are allowed in insert'

        field_strings.append(parse_expression(field))

    query = table.insert()

    if not 'method' in d:
        d['method'] = 'values'
    if d['method'] == 'values':
        if field_strings:
            raw_values = get_or_403(d, 'values')
            assert (isinstance(raw_values, list))
            values = map(lambda x: zip(field_strings, x), raw_values)
        else:
            values = get_or_403(d, 'values')

        def clear_meta(vals):
            val_dict = vals
            # make sure meta fields are not compromised
            if context['user'].is_anonymous:
                username = '******'
            else:
                username = context['user'].name
            val_dict.update(set_meta_info('insert', username, message))
            return val_dict

        values = list(map(clear_meta, values))

        query = query.values(values)

    if 'returning' in d:
        query = query.returning(*map(Column, d['returning']))

    return query, values
Exemplo n.º 9
0
def parse_from_item(d):
    """
        Defintion of a from_item according to 
        http://www.postgresql.org/docs/9.3/static/sql-select.html
        
        return: A from_item string with checked psql qualifiers.
        
        Not implemented:
            with_query_name [ [ AS ] alias [ ( column_alias [, ...] ) ] ]
            [ LATERAL ] function_name ( [ argument [, ...] ] ) [ AS ] alias [ ( column_alias [, ...] | column_definition [, ...] ) ]
            [ LATERAL ] function_name ( [ argument [, ...] ] ) AS ( column_definition [, ...] )
    """
    # TODO: If 'type' is not set assume just a table name is present
    if isinstance(d, str):
        d = {'type': 'table', 'table': d}
    if d['type'] == 'table':
        schema_name = read_pgid(d['schema']) if 'schema' in d else None
        only = d.get('only', False)
        table_name = read_pgid(d['table'])
        table = Table(table_name,
                      MetaData(bind=_get_engine()),
                      schema=schema_name)
        if 'alias' in d:
            table = table.alias(read_pgid(d['alias']))
        return table
    elif d['type'] == 'select':
        return parse_select(d)
    elif d['type'] == 'join':
        left = parse_from_item(d['left'])
        right = parse_from_item(d['right'])
        is_outer = parse_from_item(d['is_outer'])
        full = parse_from_item(d['is_full'])
        on_clause = None
        if 'on' in d:
            on_clause = parse_condition(d['on'])
        return left.join(right,
                         onclause=on_clause,
                         isouter=is_outer,
                         full=full)
    else:
        raise APIError('Unknown from-item: ' + d['type'])
Create Date: 2017-09-18 17:48:59.971501

"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.orm import sessionmaker

from api.connection import _get_engine

# revision identifiers, used by Alembic.
revision = '71463e8fd9c3'
down_revision = '048215319c74'
branch_labels = None
depends_on = None

engine = _get_engine()


def upgrade():

    Session = sessionmaker(bind=engine)
    sess = Session()
    try:
        if not engine.dialect.has_table(sess, table_name='_edit_base'):
            op.create_table('_edit_base',
                            sa.Column('_id',
                                      sa.BigInteger,
                                      primary_key=True,
                                      autoincrement=True),
                            sa.Column('_message',
                                      sa.String(500),
Exemplo n.º 11
0
 def handle(self, *args, **options):
     engine = _get_engine()
     inspector = sqla.inspect(engine)
     for schema in schema_whitelist:
         for table_name in inspector.get_table_names(schema=schema):
             update_meta_search(table_name, schema)
Exemplo n.º 12
0
        right = parse_from_item(get_or_403(d, "right"))
        is_outer = d.get("is_outer", False)
        full = d.get("is_full", False)
        on_clause = None
        if "on" in d:
            on_clause = parse_condition(d["on"])
        item = left.join(right, onclause=on_clause, isouter=is_outer, full=full)
    else:
        raise APIError("Unknown from-item: " + dtype)

    if "alias" in d:
        item = item.alias(read_pgid(d["alias"]))
    return item


__PARSER_META = MetaData(bind=_get_engine())


def load_table_from_metadata(table_name, schema_name=None):
    ext_name = table_name
    if schema_name:
        ext_name = schema_name + "." + ext_name
    if ext_name and ext_name in __PARSER_META.tables:
        return __PARSER_META.tables[ext_name]
    else:
        if _get_engine().dialect.has_table(
            _get_engine().connect(), table_name, schema=schema_name
        ):
            return Table(table_name, __PARSER_META, autoload=True, schema=schema_name)

Exemplo n.º 13
0
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata

if target_metadata.bind is None:
    target_metadata.bind = _get_engine()
# target_metadata.reflect()

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.


def run_migrations_offline():
    """Run migrations in 'offline' mode.

    This configures the context with just a URL
    and not an Engine, though an Engine is acceptable
    here as well.  By skipping the Engine creation
    we don't even need a DBAPI to be available.