Beispiel #1
0
class Relationship(object):
    def __init__(self, source_cls, target_cls):
        super(Relationship, self).__init__()
        self.source_cls = source_cls
        self.target_cls = target_cls
        self.kwargs = OrderedDict()

    @property
    def suggested_names(self):
        yield self.preferred_name if not iskeyword(self.preferred_name) else self.preferred_name + '_'

        iteration = 0
        while True:
            iteration += 1
            yield self.preferred_name + str(iteration)

    def render(self):
        text = 'relationship('
        args = [repr(self.target_cls)]

        if 'secondaryjoin' in self.kwargs:
            text += '\n        '
            delimiter, end = ',\n        ', '\n    )'
        else:
            delimiter, end = ', ', ')'

        args.extend([key + '=' + value for key, value in list(self.kwargs.items())])
        return text + delimiter.join(args) + end
Beispiel #2
0
class Relationship(object):
    def __init__(self, source_cls, target_cls):
        super(Relationship, self).__init__()
        self.source_cls = source_cls
        self.target_cls = target_cls
        self.kwargs = OrderedDict()

    def render(self):
        text = 'relationship('
        args = [repr(self.target_cls)]

        if 'secondaryjoin' in self.kwargs:
            text += '\n        '
            delimiter, end = ',\n        ', '\n    )'
        else:
            delimiter, end = ', ', ')'

        args.extend([key + '=' + value for key, value in self.kwargs.items()])
        return text + delimiter.join(args) + end
Beispiel #3
0
class Relationship(object):
    def __init__(self, source_cls, target_cls):
        super(Relationship, self).__init__()
        self.source_cls = source_cls
        self.target_cls = target_cls
        self.kwargs = OrderedDict()
        self.backref_name = _underscore(self.source_cls)

    def render(self):
        text = _flask_prepend + 'relationship('
        args = [repr(self.target_cls)]

        if 'secondaryjoin' in self.kwargs:
            text += '\n        '
            delimiter, end = ',\n        ', '\n    )'
        else:
            delimiter, end = ', ', ')'

        args.extend([key + '=' + value for key, value in self.kwargs.items()])
        return text + delimiter.join(args) + end

    def make_backref(self, relationships, classes):
        backref = self.backref_name
        original_backref = backref
        # Check if backref already exists for relationship source_cls to target_cls and add suffix
        suffix = 0
        while (self.target_cls, backref) in [(x.target_cls, x.backref_name) for x in relationships]:
            backref = original_backref + str('_{0}'.format(suffix))
            suffix += 1

        self.kwargs['backref'] = repr(backref)
        # Check if any of the target_cls inherit from other target_cls
        # If so, modify backref name of descendant
        # "backref({0}, lazy='dynamic')".format(repr(backref))
        for rel in [x for x in relationships if 'backref' in x.kwargs]:
            if self.target_cls in classes and rel.target_cls in classes:
                if _is_model_descendant(classes[self.target_cls], classes[rel.target_cls]):
                    self.backref_name = self.target_cls.lower() + '_' + backref
                    self.kwargs['backref'] = repr(self.backref_name)
                if _is_model_descendant(classes[rel.target_cls], classes[self.target_cls]):
                    backref = rel.backref_name
                    rel.backref_name = rel.target_cls.lower() + '_' + backref
                    rel.kwargs['backref'] = repr(rel.backref_name)
Beispiel #4
0
class ModelClass(Model):
    parent_name = 'Base'

    def __init__(self, table, association_tables, inflect_engine, detect_joined):
        super(ModelClass, self).__init__(table)
        self.name = self._tablename_to_classname(table.name, inflect_engine)
        self.children = []
        self.attributes = OrderedDict()

        # Assign attribute names for columns
        for column in table.columns:
            self._add_attribute(column.name, column)

        # Add many-to-one relationships
        pk_column_names = set(col.name for col in table.primary_key.columns)
        for constraint in sorted(table.constraints, key=_get_constraint_sort_key):
            if isinstance(constraint, ForeignKeyConstraint):
                target_cls = self._tablename_to_classname(constraint.elements[0].column.table.name, inflect_engine)
                if (detect_joined and self.parent_name == 'Base' and
                        set(_get_column_names(constraint)) == pk_column_names):
                    self.parent_name = target_cls
                else:
                    relationship_ = ManyToOneRelationship(self.name, target_cls, constraint, inflect_engine)
                    self._add_attribute(relationship_.preferred_name, relationship_)

        # Add many-to-many relationships
        for association_table in association_tables:
            fk_constraints = [c for c in association_table.constraints if isinstance(c, ForeignKeyConstraint)]
            fk_constraints.sort(key=_get_constraint_sort_key)
            target_cls = self._tablename_to_classname(fk_constraints[1].elements[0].column.table.name, inflect_engine)
            relationship_ = ManyToManyRelationship(self.name, target_cls, association_table, inflect_engine)
            self._add_attribute(relationship_.preferred_name, relationship_)

    @staticmethod
    def _tablename_to_classname(tablename, inflect_engine):
        camel_case_name = ''.join(part[:1].upper() + part[1:] for part in tablename.split('_'))
        return inflect_engine.singular_noun(camel_case_name) or camel_case_name

    def _add_attribute(self, attrname, value):
        attrname = tempname = _convert_to_valid_identifier(attrname)
        counter = 1
        while tempname in self.attributes:
            tempname = attrname + str(counter)
            counter += 1

        self.attributes[tempname] = value
        return tempname

    def add_imports(self, collector):
        super(ModelClass, self).add_imports(collector)

        if any(isinstance(value, Relationship) for value in self.attributes.values()):
            collector.add_literal_import('sqlalchemy.orm', 'relationship')

        for child in self.children:
            child.add_imports(collector)

    def render(self):
        text = 'class {0}({1}):\n'.format(self.name, self.parent_name)
        text += '    __tablename__ = {0!r}\n'.format(self.table.name)

        # Render constraints and indexes as __table_args__
        table_args = []
        for constraint in sorted(self.table.constraints, key=_get_constraint_sort_key):
            if isinstance(constraint, PrimaryKeyConstraint):
                continue
            if isinstance(constraint, (ForeignKeyConstraint, UniqueConstraint)) and len(constraint.columns) == 1:
                continue
            table_args.append(_render_constraint(constraint))
        for index in self.table.indexes:
            if len(index.columns) > 1:
                table_args.append(_render_index(index))

        table_kwargs = {}
        if self.schema:
            table_kwargs['schema'] = self.schema

        kwargs_items = ', '.join('{0!r}: {1!r}'.format(key, table_kwargs[key]) for key in table_kwargs)
        kwargs_items = '{{{0}}}'.format(kwargs_items) if kwargs_items else None
        if table_kwargs and not table_args:
            text += '    __table_args__ = {0}\n'.format(kwargs_items)
        elif table_args:
            if kwargs_items:
                table_args.append(kwargs_items)
            if len(table_args) == 1:
                table_args[0] += ','
            text += '    __table_args__ = (\n        {0}\n    )\n'.format(',\n        '.join(table_args))

        # Render columns
        text += '\n'
        for attr, column in self.attributes.items():
            if isinstance(column, Column):
                show_name = attr != column.name
                text += '    {0} = {1}\n'.format(attr, _render_column(column, show_name))

        # Render relationships
        if any(isinstance(value, Relationship) for value in self.attributes.values()):
            text += '\n'
        for attr, relationship in self.attributes.items():
            if isinstance(relationship, Relationship):
                text += '    {0} = {1}\n'.format(attr, relationship.render())

        # Render subclasses
        for child_class in self.children:
            text += '\n\n' + child_class.render()

        return text
Beispiel #5
0
    def test_1_row_2_package(self):
        row_dict = OrderedDict([
            (u'Dataset Ref#', u'BIS-000002'),
            (u'Dataset Status', u'Proposed'),
            (u'Agency', u'Higher Education Statistics Agency'),
            (u'Primary Contact', u'*****@*****.**'),
            (u'Secondary Contact', None),
            (u'Title', u'Higher Education Statistics: All HE students by level of study, mode of study, subject of study, domicile and gender 2007/08'),
            (u'Abstract', u'This dataset provides the 2007/08 higher education statistics for all students by level of study, mode of study, subject of study, domicile and gender'),
            (u'Date Released', 2008),
            (u'Date Updated', 2008),
            (u'Update Frequency', u'Never'),
            (u'Tags', u'hesa higher-education-statistics 2007-2008'),
            (u'Department', u'Department for Business, Innovation & Skills'),
            (u'Wiki', u'-'),
            (u'Identifier', u'higher-education-statistics-all-he-students-by-level-of-study-mode-of-study-subject-of-study-meeeeeeeeeeeeeeeeeeeeeeeeeeeega-long-name-2007-2008'),
            (u'Licence', u'Higher Education Statistcs Agency Copyright with data.gov.uk rights'),
            (u'Version', u'-'),
            (u'Geographic Coverage', u'United Kingdom (England, Scotland, Wales, Northern Ireland)'),
            (u'Geographic Granularity', u'national'),
            (u'Temporal Granularity', u'years'),
            (u'Precision', u'integer to the nearest 5'),
            (u'Taxonomy url', u'-'),
            (u'Temporal Coverage From', datetime.date(2007, 8, 1)),
            (u'Temporal Coverage To', datetime.date(2008, 7, 31)),
            (u'National Statistic', u'no'),
            (u'Categories', u'-'),
            (u'Contact information', u'Higher Education Statistics Agency (HESA)\n+44 (0) 1242 211133\[email protected]'),
            (u'Data File', 1),
            (u'Reference Material', 2),
            (u'Information', 0),
            (u'Full Description', 0),
            (u'Unknown', 0),
            (u'Total', 3)
            ])
        expected_pkg_dict = OrderedDict([
            ('name', u'hesa-all-he-students-by-level-of-study-mode-of-study-subject-of-study-meeeeeeeeeeeeee-2007-2008'),
            ('title', u'Higher Education Statistics: All HE students by level of study, mode of study, subject of study, domicile and gender 2007/08'),
            ('version', u'-'),
            ('url', None),
            ('author', u'Higher Education Statistics Agency (HESA)'),
            ('author_email', u'*****@*****.**'),
            ('maintainer', u''),
            ('maintainer_email', u''),
            ('notes', u'This dataset provides the 2007/08 higher education statistics for all students by level of study, mode of study, subject of study, domicile and gender'),
            ('license_id', u'hesa-withrights'),
            ('tags', [u'2007-2008', u'education', u'hesa', \
                      u'higher-education', u'higher-education-statistics']),
            ('groups', ['ukgov']),
            ('resources', [OrderedDict([
                ('url', 'http://www.hesa.ac.uk/dox/dataTables/studentsAndQualifiers/download/subject0708.xls?v=1.0'),
                ('format', 'XLS'),
                ('description', 'Data File - XLS Format'),
                ]),
                           OrderedDict([
                               ('url', 'http://www.hesa.ac.uk/index.php/component/option,com_datatables/task,show_file/defs,1/Itemid,121/catdex,3/disp,/dld,subject0708.xls/yrStr,2007+to+2008/dfile,studefs0708.htm/area,subject/mx,0/'),
                               ('format', 'HTML'),
                               ('description', 'Reference Material - Data File Definition'),
                               ]),
                           OrderedDict([
                               ('url', 'http://www.hesa.ac.uk/index.php/component/option,com_datatables/task,show_file/defs,2/Itemid,121/catdex,3/disp,/dld,subject0708.xls/yrStr,2007+to+2008/dfile,notes0708.htm/area,subject/mx,0/'),
                               ('format', 'HTML'),
                               ('description', 'Reference Material - Notes Regarding Data File Content'),
                               ]),
                           ]),
            ('extras', OrderedDict([
                ('external_reference', 'BIS-000002'),
                ('date_released', '2008'),
                ('date_updated', '2008'),
                ('temporal_granularity', 'years'),
                ('temporal_coverage_to', '2008-07-31'),
                ('temporal_coverage_from', '2007-08-01'),
                ('geographic_coverage', '111100: United Kingdom (England, Scotland, Wales, Northern Ireland)'),
                ('geographical_granularity', 'national'),
                ('agency', u'Higher Education Statistics Agency'),
                ('precision', 'integer to the nearest 5'),
                ('taxonomy_url', '-'),
                ('import_source', 'BIS-%s' % os.path.basename(self._filepath)),
                ('department', u'Department for Business, Innovation and Skills'),
                ('update_frequency', 'Never'),
                ('national_statistic', 'no'),
                ('categories', '-'),
                ])
             ),
            ])
        pkg_dict = self.importer.row_2_package(row_dict)

        log = self.importer.get_log()
        assert not log, log

        PackageDictUtil.check_dict(pkg_dict, expected_pkg_dict)
        expected_keys = set([key for key, value in expected_pkg_dict.items()])
        keys = set(pkg_dict.keys())
        key_difference = expected_keys - keys
        assert not key_difference, key_difference
Beispiel #6
0
class ModelClass(Model):
    parent_name = 'Base'

    def __init__(self, table, association_tables, inflect_engine,
                 detect_joined):
        super(ModelClass, self).__init__(table)
        self.name = self._tablename_to_classname(table.name, inflect_engine)
        self.children = []
        self.attributes = OrderedDict()

        # Assign attribute names for columns
        for column in table.columns:
            self._add_attribute(column.name, column)

        # Add many-to-one relationships
        pk_column_names = set(col.name for col in table.primary_key.columns)
        for constraint in sorted(table.constraints,
                                 key=_get_constraint_sort_key):
            if isinstance(constraint, ForeignKeyConstraint):
                target_cls = self._tablename_to_classname(
                    constraint.elements[0].column.table.name, inflect_engine)
                if (detect_joined and self.parent_name == 'Base' and set(
                        _get_column_names(constraint)) == pk_column_names):
                    self.parent_name = target_cls
                else:
                    relationship_ = ManyToOneRelationship(
                        self.name, target_cls, constraint, inflect_engine)
                    self._add_attribute(relationship_.preferred_name,
                                        relationship_)

        # Add many-to-many relationships
        for association_table in association_tables:
            fk_constraints = [
                c for c in association_table.constraints
                if isinstance(c, ForeignKeyConstraint)
            ]
            fk_constraints.sort(key=_get_constraint_sort_key)
            target_cls = self._tablename_to_classname(
                fk_constraints[1].elements[0].column.table.name,
                inflect_engine)
            relationship_ = ManyToManyRelationship(self.name, target_cls,
                                                   association_table,
                                                   inflect_engine)
            self._add_attribute(relationship_.preferred_name, relationship_)

    @staticmethod
    def _tablename_to_classname(tablename, inflect_engine):
        camel_case_name = ''.join(part[:1].upper() + part[1:]
                                  for part in tablename.split('_'))
        return inflect_engine.singular_noun(camel_case_name) or camel_case_name

    def _add_attribute(self, attrname, value):
        attrname = tempname = _convert_to_valid_identifier(attrname)
        counter = 1
        while tempname in self.attributes:
            tempname = attrname + str(counter)
            counter += 1

        self.attributes[tempname] = value
        return tempname

    def add_imports(self, collector):
        super(ModelClass, self).add_imports(collector)

        if any(
                isinstance(value, Relationship)
                for value in self.attributes.values()):
            collector.add_literal_import('sqlalchemy.orm', 'relationship')

        for child in self.children:
            child.add_imports(collector)

    def render(self):
        text = 'class {0}({1}):\n'.format(self.name, self.parent_name)
        text += '    __tablename__ = {0!r}\n'.format(self.table.name)

        # Render constraints and indexes as __table_args__
        table_args = []
        for constraint in sorted(self.table.constraints,
                                 key=_get_constraint_sort_key):
            if isinstance(constraint, PrimaryKeyConstraint):
                continue
            if isinstance(constraint,
                          (ForeignKeyConstraint, UniqueConstraint)) and len(
                              constraint.columns) == 1:
                continue
            table_args.append(_render_constraint(constraint))
        for index in self.table.indexes:
            if len(index.columns) > 1:
                table_args.append(_render_index(index))

        table_kwargs = {}
        if self.schema:
            table_kwargs['schema'] = self.schema

        kwargs_items = ', '.join('{0!r}: {1!r}'.format(key, table_kwargs[key])
                                 for key in table_kwargs)
        kwargs_items = '{{{0}}}'.format(kwargs_items) if kwargs_items else None
        if table_kwargs and not table_args:
            text += '    __table_args__ = {0}\n'.format(kwargs_items)
        elif table_args:
            if kwargs_items:
                table_args.append(kwargs_items)
            if len(table_args) == 1:
                table_args[0] += ','
            text += '    __table_args__ = (\n        {0}\n    )\n'.format(
                ',\n        '.join(table_args))

        # Render columns
        text += '\n'
        for attr, column in self.attributes.items():
            if isinstance(column, Column):
                show_name = attr != column.name
                text += '    {0} = {1}\n'.format(
                    attr, _render_column(column, show_name))

        # Render relationships
        if any(
                isinstance(value, Relationship)
                for value in self.attributes.values()):
            text += '\n'
        for attr, relationship in self.attributes.items():
            if isinstance(relationship, Relationship):
                text += '    {0} = {1}\n'.format(attr, relationship.render())

        # Render subclasses
        for child_class in self.children:
            text += '\n\n' + child_class.render()

        return text
Beispiel #7
0
class ApplyBatchImpl(object):
    def __init__(self, table, table_args, table_kwargs):
        self.table = table  # this is a Table object
        self.table_args = table_args
        self.table_kwargs = table_kwargs
        self.new_table = None
        self.column_transfers = OrderedDict((c.name, {
            'expr': c
        }) for c in self.table.c)
        self._grab_table_elements()

    def _grab_table_elements(self):
        schema = self.table.schema
        self.columns = OrderedDict()
        for c in self.table.c:
            c_copy = c.copy(schema=schema)
            c_copy.unique = c_copy.index = False
            self.columns[c.name] = c_copy
        self.named_constraints = {}
        self.unnamed_constraints = []
        self.indexes = {}
        self.new_indexes = {}
        for const in self.table.constraints:
            if _is_type_bound(const):
                continue
            if const.name:
                self.named_constraints[const.name] = const
            else:
                self.unnamed_constraints.append(const)

        for idx in self.table.indexes:
            self.indexes[idx.name] = idx

        for k in self.table.kwargs:
            self.table_kwargs.setdefault(k, self.table.kwargs[k])

    def _transfer_elements_to_new_table(self):
        assert self.new_table is None, "Can only create new table once"

        m = MetaData()
        schema = self.table.schema

        self.new_table = new_table = Table('_alembic_batch_temp',
                                           m,
                                           *(list(self.columns.values()) +
                                             list(self.table_args)),
                                           schema=schema,
                                           **self.table_kwargs)

        for const in list(self.named_constraints.values()) + \
                self.unnamed_constraints:

            const_columns = set(
                [c.key for c in _columns_for_constraint(const)])

            if not const_columns.issubset(self.column_transfers):
                continue

            if isinstance(const, ForeignKeyConstraint):
                if _fk_is_self_referential(const):
                    # for self-referential constraint, refer to the
                    # *original* table name, and not _alembic_batch_temp.
                    # This is consistent with how we're handling
                    # FK constraints from other tables; we assume SQLite
                    # no foreign keys just keeps the names unchanged, so
                    # when we rename back, they match again.
                    const_copy = const.copy(schema=schema,
                                            target_table=self.table)
                else:
                    # "target_table" for ForeignKeyConstraint.copy() is
                    # only used if the FK is detected as being
                    # self-referential, which we are handling above.
                    const_copy = const.copy(schema=schema)
            else:
                const_copy = const.copy(schema=schema, target_table=new_table)
            if isinstance(const, ForeignKeyConstraint):
                self._setup_referent(m, const)
            new_table.append_constraint(const_copy)

    def _gather_indexes_from_both_tables(self):
        idx = []
        idx.extend(self.indexes.values())
        for index in self.new_indexes.values():
            idx.append(
                Index(index.name,
                      unique=index.unique,
                      *[self.new_table.c[col] for col in index.columns.keys()],
                      **index.kwargs))
        return idx

    def _setup_referent(self, metadata, constraint):
        spec = constraint.elements[0]._get_colspec()
        parts = spec.split(".")
        tname = parts[-2]
        if len(parts) == 3:
            referent_schema = parts[0]
        else:
            referent_schema = None

        if tname != '_alembic_batch_temp':
            key = sql_schema._get_table_key(tname, referent_schema)
            if key in metadata.tables:
                t = metadata.tables[key]
                for elem in constraint.elements:
                    colname = elem._get_colspec().split(".")[-1]
                    if not t.c.contains_column(colname):
                        t.append_column(Column(colname, sqltypes.NULLTYPE))
            else:
                Table(tname,
                      metadata,
                      *[
                          Column(n, sqltypes.NULLTYPE) for n in [
                              elem._get_colspec().split(".")[-1]
                              for elem in constraint.elements
                          ]
                      ],
                      schema=referent_schema)

    def _create(self, op_impl):
        self._transfer_elements_to_new_table()

        op_impl.prep_table_for_batch(self.table)
        op_impl.create_table(self.new_table)

        try:
            op_impl._exec(
                self.new_table.insert(inline=True).from_select(
                    list(k for k, transfer in self.column_transfers.items()
                         if 'expr' in transfer),
                    select([
                        transfer['expr']
                        for transfer in self.column_transfers.values()
                        if 'expr' in transfer
                    ])))
            op_impl.drop_table(self.table)
        except:
            op_impl.drop_table(self.new_table)
            raise
        else:
            op_impl.rename_table("_alembic_batch_temp",
                                 self.table.name,
                                 schema=self.table.schema)
            self.new_table.name = self.table.name
            try:
                for idx in self._gather_indexes_from_both_tables():
                    op_impl.create_index(idx)
            finally:
                self.new_table.name = "_alembic_batch_temp"

    def alter_column(self,
                     table_name,
                     column_name,
                     nullable=None,
                     server_default=False,
                     name=None,
                     type_=None,
                     autoincrement=None,
                     **kw):
        existing = self.columns[column_name]
        existing_transfer = self.column_transfers[column_name]
        if name is not None and name != column_name:
            # note that we don't change '.key' - we keep referring
            # to the renamed column by its old key in _create().  neat!
            existing.name = name
            existing_transfer["name"] = name

        if type_ is not None:
            type_ = sqltypes.to_instance(type_)
            existing.type = type_
            existing_transfer["expr"] = cast(existing_transfer["expr"], type_)
        if nullable is not None:
            existing.nullable = nullable
        if server_default is not False:
            sql_schema.DefaultClause(server_default)._set_parent(existing)
        if autoincrement is not None:
            existing.autoincrement = bool(autoincrement)

    def add_column(self, table_name, column, **kw):
        # we copy the column because operations.add_column()
        # gives us a Column that is part of a Table already.
        self.columns[column.name] = column.copy(schema=self.table.schema)
        self.column_transfers[column.name] = {}

    def drop_column(self, table_name, column, **kw):
        del self.columns[column.name]
        del self.column_transfers[column.name]

    def add_constraint(self, const):
        if not const.name:
            raise ValueError("Constraint must have a name")
        if isinstance(const, sql_schema.PrimaryKeyConstraint):
            if self.table.primary_key in self.unnamed_constraints:
                self.unnamed_constraints.remove(self.table.primary_key)

        self.named_constraints[const.name] = const

    def drop_constraint(self, const):
        if not const.name:
            raise ValueError("Constraint must have a name")
        try:
            del self.named_constraints[const.name]
        except KeyError:
            raise ValueError("No such constraint: '%s'" % const.name)

    def create_index(self, idx):
        self.new_indexes[idx.name] = idx

    def drop_index(self, idx):
        try:
            del self.indexes[idx.name]
        except KeyError:
            raise ValueError("No such index: '%s'" % idx.name)

    def rename_table(self, *arg, **kw):
        raise NotImplementedError("TODO")
Beispiel #8
0
class FieldSet(DefaultRenderers):
    """
    A `FieldSet` is bound to a SQLAlchemy mapped instance (or class, for
    creating new instances) and can render a form for editing that instance,
    perform validation, and sync the form data back to the bound instance.

    `FieldSets` are responsible for generating HTML fields from a given
    `model`.

    You can derive your own subclasses from `FieldSet` to provide a customized
    `render` and/or `configure`.

    You can write `render` by manually sticking strings together if that's what you want,
    but we recommend using a templating package for clarity and maintainability.
    !FormAlchemy includes the Tempita templating package as formalchemy.tempita;
    see http://pythonpaste.org/tempita/ for documentation.

    `formalchemy.forms.template_text_tempita` is the default template used by `FieldSet.`
    !FormAlchemy also includes a Mako version, `formalchemy.forms.template_text_mako`,
    and will use that instead if Mako is available.  The rendered HTML is identical
    but (we suspect) Mako is faster.

    Usage:

        - `model`:
              a SQLAlchemy mapped class or instance.  New object creation
              should be done by passing the class, which will need a default
              (no-parameter) constructor.  After construction or binding of
              the :class:`~formalchemy.forms.FieldSet`, the instantiated object will be available as
              the `.model` attribute.

        - `session=None`:
              the session to use for queries (for relations). If `model` is associated
              with a session, that will be used by default. (Objects mapped with a
              `scoped_session
              <http://www.sqlalchemy.org/docs/05/session.html#contextual-thread-local-sessions>`_
              will always have a session. Other objects will
              also have a session if they were loaded by a Query.)

        - `data=None`:
              dictionary-like object of user-submitted data to validate and/or
              sync to the `model`. Scalar attributes should have a single
              value in the dictionary; multi-valued relations should have a
              list, even if there are zero or one values submitted.  Currently,
              pylons request.params() objects and plain dictionaries are known
              to work.

        - `request=None`:
              WebOb-like object that can be taken in place of `data`.
              FormAlchemy will make sure it's a POST, and use its 'POST'
              attribute as the data.  Also, the request object will be
              available to renderers as the `.request` attribute.

        - `prefix=None`:
              the prefix to prepend to html name attributes. This is useful to avoid
              field name conflicts when there are two fieldsets creating objects
              from the same model in one html page.  (This is not needed when
              editing existing objects, since the object primary key is used as part
              of the field name.)


        Only the `model` parameter is required.

        After binding, :class:`~formalchemy.forms.FieldSet`'s `model` attribute will always be an instance.
        If you bound to a class, `FormAlchemy` will call its constructor with no
        arguments to create an appropriate instance.

        .. NOTE::

          This instance will not be added to the current session, even if you are using `Session.mapper`.

        All of these parameters may be overridden by the `bind` or `rebind`
        methods.  The `bind` method returns a new instance bound as specified,
        while `rebind` modifies the current :class:`~formalchemy.forms.FieldSet` and has
        no return value. (You may not `bind` to a different type of SQLAlchemy
        model than the initial one -- if you initially bind to a `User`, you
        must subsequently bind `User`'s to that :class:`~formalchemy.forms.FieldSet`.)

        Typically, you will configure a :class:`~formalchemy.forms.FieldSet` once in
        your common form library, then `bind` specific instances later for editing. (The
        `bind` method is thread-safe; `rebind` is not.)  Thus:

        load stuff:

        >>> from formalchemy.tests import FieldSet, User, session

        now, in `library.py`

        >>> fs = FieldSet(User)
        >>> fs.configure(options=[]) # put all configuration stuff here

        and in `controller.py`

        >>> from library import fs
        >>> user = session.query(User).first()
        >>> fs2 = fs.bind(user)
        >>> html = fs2.render()

        The `render_fields` attribute is an OrderedDict of all the `Field`'s
        that have been configured, keyed by name. The order of the fields
        is the order in `include`, or the order they were declared
        in the SQLAlchemy model class if no `include` is specified.

        The `_fields` attribute is an OrderedDict of all the `Field`'s
        the ModelRenderer knows about, keyed by name, in their
        unconfigured state.  You should not normally need to access
        `_fields` directly.

        (Note that although equivalent `Field`'s (fields referring to
        the same attribute on the SQLAlchemy model) will equate with
        the == operator, they are NOT necessarily the same `Field`
        instance.  Stick to referencing `Field`'s from their parent
        `FieldSet` to always get the "right" instance.)
    """
    __sa__ = True
    engine = _render = _render_readonly = None

    prettify = staticmethod(prettify)

    def __init__(self, model, session=None, data=None, prefix=None,
                 format=u'%(model)s-%(pk)s-%(name)s',
                 request=None):
        self._fields = OrderedDict()
        self._render_fields = OrderedDict()
        self.model = self.session = None
        self.readonly = False
        self.validator = None
        self.focus = True
        self._request = request
        self._format = format
        self._prefix = prefix
        self._errors = []


        if not model:
            raise Exception('model parameter may not be None')
        self._original_cls = isinstance(model, type) and model or type(model)

        if self.__sa__:
            FieldSet.rebind(self, model, session, data, request)

            cls = isinstance(self.model, type) and self.model or type(self.model)
            try:
                class_mapper(cls)
            except:
                # this class is not managed by SA.  extract any raw Fields defined on it.
                keys = sorted(cls.__dict__.keys(), key=lambda a: a.lower())
                for key in keys:
                    field = cls.__dict__[key]
                    if isinstance(field, fields.Field):
                        if field.name and field.name != key:
                            raise Exception('Fields in a non-mapped class have the same name as their attribute.  Do not manually give them a name.')
                        field.name = field.key = key
                        self.append(field)
                if not self._fields:
                    raise Exception("not bound to a SA instance, and no manual Field definitions found")
            else:
                # SA class.
                # load synonyms so we can ignore them
                ignore_keys = set()
                for p in class_mapper(cls).iterate_properties:
                    if isinstance(p, SynonymProperty):
                        #ignore_keys.add(p.name)
                        # Can't ignore the original, this hides synonymized relationships when the ID it points to is not also synonymed
                        ignore_keys.add(p.key)
                    elif hasattr(p, '_is_polymorphic_discriminator') and p._is_polymorphic_discriminator:
                        ignore_keys.add(p.key)
                    elif isinstance(p, CompositeProperty):
                        for p in p.props:
                            ignore_keys.add(p.key)

                # attributes we're interested in
                attrs = []
                for p in class_mapper(cls).iterate_properties:
                    attr = _get_attribute(cls, p)
                    if attr.property.key not in ignore_keys and p.key not in ignore_keys and not isinstance(attr.impl, DynamicAttributeImpl):
                        attrs.append(attr)
                # sort relations last before storing in the OrderedDict
                L = [fields.AttributeField(attr, self) for attr in attrs]
                L.sort(key=lambda a: a.is_relation)
                self._fields.update((field.key, field) for field in L)


    def configure(self, pk=False, focus=True, readonly=False, global_validator=None, exclude=[], include=[], options=[]):
        """
        The `configure` method specifies a set of attributes to be rendered.
        By default, all attributes are rendered except primary keys and
        foreign keys.  But, relations `based on` foreign keys `will` be
        rendered.  For example, if an `Order` has a `user_id` FK and a `user`
        relation based on it, `user` will be rendered (as a select box of
        `User`'s, by default) but `user_id` will not.

        Parameters:
          * `pk=False`:
                set to True to include primary key columns
          * `exclude=[]`:
                an iterable of attributes to exclude.  Other attributes will
                be rendered normally
          * `include=[]`:
                an iterable of attributes to include.  Other attributes will
                not be rendered
          * `options=[]`:
                an iterable of modified attributes.  The set of attributes to
                be rendered is unaffected
          * `global_validator=None`:
                global_validator` should be a function that performs
                validations that need to know about the entire form.
          * `focus=True`:
                the attribute (e.g., `fs.orders`) whose rendered input element
                gets focus. Default value is True, meaning, focus the first
                element. False means do not focus at all.
          * `readonly=False`:
                if true, the fieldset will be rendered as a table (tbody)
                of names+values instead of a group of input elements.
                Opening and closing table tags are not included.
          * `modify=False`:
                if true, the fieldset will be further modified by this call.
                Otherwise the raw fields will be used.

        Only one of {`include`, `exclude`} may be specified.

        Note that there is no option to include foreign keys.  This is
        deliberate.  Use `include` if you really need to manually edit FKs.

        If `include` is specified, fields will be rendered in the order given
        in `include`.  Otherwise, fields will be rendered in alphabetical
        order.

        Examples: given a `FieldSet` `fs` bound to a `User` instance as a
        model with primary key `id` and attributes `name` and `email`, and a
        relation `orders` of related Order objects, the default will be to
        render `name`, `email`, and `orders`. To render the orders list as
        checkboxes instead of a select, you could specify::

        >>> from formalchemy.tests import FieldSet, User
        >>> fs = FieldSet(User)
        >>> fs.configure(options=[fs.orders.checkbox()])

        To render only name and email,

        >>> fs.configure(include=[fs.name, fs.email])

        or

        >>> fs.configure(exclude=[fs.orders])

        Of course, you can include modifications to a field in the `include`
        parameter, such as here, to render name and options-as-checkboxes:

        >>> fs.configure(include=[fs.name, fs.orders.checkbox()])

        Calling `configure` multiple times will only leave the last call's
        effects in place. If you want to further modify a form, use
        `reconfigure`.

        """
        self.focus = focus
        self.readonly = readonly
        self.validator = global_validator
        self._render_fields = OrderedDict([(field.key, field) for field in self._get_fields(pk, exclude, include, options, use_rendered=False)])

    def reconfigure(self, pk=False, focus=True, readonly=False, global_validator=None, exclude=[], include=[], options=[]):
        """
        Like `configure`, but does not undo the effects of a previous call
        to `configure` or `reconfigure`.
        """
        self.focus = focus
        self.readonly = readonly
        self.validator = global_validator
        self._render_fields = OrderedDict([(field.key, field) for field in self._get_fields(pk, exclude, include, options, use_rendered=True)])

    def bind(self, model=None, session=None, data=None, request=None,
             with_prefix=True):
        """
        Return a copy of this FieldSet or Grid, bound to the given
        `model`, `session`, and `data`. The parameters to this method are the
        same as in the constructor.

        Often you will create and `configure` a FieldSet or Grid at application
        startup, then `bind` specific instances to it for actual editing or display.
        """
        if not (model is not None or session or data or request):
            raise Exception('must specify at least one of {model, session, data, request}')

        if not model:
            if not self.model:
                raise Exception('model must be specified when none is already set')
            model = fields._pk(self.model) is None and type(self.model) or self.model

        # copy.copy causes a stacktrace on python 2.5.2/OSX + pylons.  unable to reproduce w/ simpler sample.
        mr = object.__new__(self.__class__)
        mr.__dict__ = dict(self.__dict__)
        # two steps so bind's error checking can work
        FieldSet.rebind(mr, model, session, data, request,
                        with_prefix=with_prefix)
        mr._fields = OrderedDict([(key, renderer.bind(mr)) for key, renderer in self._fields.items()])
        if self._render_fields:
            mr._render_fields = OrderedDict([(field.key, field) for field in
                                             [field.bind(mr) for field in self._render_fields.values()]])
        mr._request = request
        return mr


    def rebind(self, model=None, session=None, data=None, request=None,
               with_prefix=True):
        """
        Like `bind`, but acts on this instance.  No return value.
        Not all parameters are treated the same; specifically, what happens if they are NOT specified is different:

        * if `model` is not specified, the old model is used
        * if `session` is not specified, FA tries to re-guess session from the model
        * if `data` is not specified, it is rebound to None
        * if `request` is specified and not `data` request.POST is used as data.
          `request` is also saved to be access by renderers (as
          `fs.FIELD.renderer.request`).
        * if `with_prefix` is False then a prefix ``{Model}-{pk}`` is added to each data keys
        """
        if data is None and request is not None:
            if hasattr(request, 'environ') and hasattr(request, 'POST'):
                if request.environ.get('REQUEST_METHOD', '').upper() == 'POST':
                    data = request.POST or None

        original_model = model
        if model:
            if isinstance(model, type):
                try:
                    model = model()
                except Exception as e:
                    model_error = str(e)
                    msg = ("%s appears to be a class, not an instance, but "
                           "FormAlchemy cannot instantiate it. "
                           "(Make sure all constructor parameters are "
                           "optional!). The error was:\n%s")
                    raise Exception(msg % (model, model_error))

                # take object out of session, if present
                try:
                    _obj_session = object_session(model)
                except (AttributeError, UnmappedInstanceError):
                    pass # non-SA object; doesn't need session
                else:
                    if _obj_session:
                        _obj_session.expunge(model)
            else:
                try:
                    session_ = object_session(model)
                except:
                    # non SA class
                    if fields._pk(model) is None and model is not self._original_cls:
                        error = ('Mapped instances to be bound must either have '
                                'a primary key set or not be in a Session.  When '
                                'creating a new object, bind the class instead '
                                '[i.e., bind(User), not bind(User())].')
                        raise Exception(error)
                else:
                    if session_:
                        # for instances of mapped classes, require that the instance
                        # have a PK already
                        try:
                            class_mapper(type(model))
                        except:
                            pass
                        else:
                            if fields._pk(model) is None:
                                error = ('Mapped instances to be bound must either have '
                                        'a primary key set or not be in a Session.  When '
                                        'creating a new object, bind the class instead '
                                        '[i.e., bind(User), not bind(User())]')
                                raise Exception(error)
            if (self.model and type(self.model) != type(model) and
                not issubclass(model.__class__, self._original_cls)):
                raise ValueError('You can only bind to another object of the same type or subclass you originally bound to (%s), not %s' % (type(self.model), type(model)))
            self.model = model
            self._bound_pk = fields._pk(model)

        if data is not None and not with_prefix:
            if isinstance(data, multidict.UnicodeMultiDict):
                encoding = data.encoding
            else:
                encoding = config.encoding
            pk = fields._pk(self.model) or ''
            prefix = '%s-%s' % (self._original_cls.__name__, pk)
            if self._prefix:
                prefix = '%s-%s' % (self._prefix, prefix)
            data = SimpleMultiDict([('%s-%s' % (prefix, k), v) for k, v in data.items()], encoding=encoding)

        if data is None:
            self.data = None
        elif isinstance(data, multidict.UnicodeMultiDict):
            self.data = data
        elif isinstance(data, multidict.MultiDict):
            self.data = multidict.UnicodeMultiDict(multi=data, encoding=config.encoding)
        elif hasattr(data, 'getall') and hasattr(data, 'getone'):
            self.data = data
        elif isinstance(data, (dict, list)):
            self.data = SimpleMultiDict(data, encoding=config.encoding)
        else:
            raise Exception('unsupported data object %s.  currently only dicts and Paste multidicts are supported' % self.data)

        if not self.__sa__:
            return

        if session:
            self.session = session
        elif model:
            if '_obj_session' in locals():
                # model may be a temporary object, expunged from its session -- grab the existing reference
                self.session = _obj_session
            else:
                try:
                    o_session = object_session(model)
                except (AttributeError, UnmappedInstanceError):
                    pass # non-SA object
                else:
                    if o_session:
                        self.session = o_session
        # if we didn't just instantiate (in which case object_session will be None),
        # the session should be the same as the object_session
        if self.session and model == original_model:
            try:
                o_session = object_session(self.model)
            except (AttributeError, UnmappedInstanceError):
                pass # non-SA object
            else:
                if o_session and self.session is not o_session:
                    raise Exception('You may not explicitly bind to a session when your model already belongs to a different one')

    def validate(self):
        """
        Validate attributes and `global_validator`.
        If validation fails, the validator should raise `ValidationError`.
        """
        if self.readonly:
            raise ValidationError('Cannot validate a read-only FieldSet')
        if self.data is None:
            raise ValidationError('Cannot validate without binding data')
        success = True
        for field in self.render_fields.values():
            success = field._validate() and success
        # run this _after_ the field validators, since each field validator
        # resets its error list. we want to allow the global validator to add
        # errors to individual fields.
        if self.validator:
            self._errors = []
            try:
                self.validator(self)
            except ValidationError as e:
                self._errors = e.args
                success = False
        return success

    def sync(self):
        """
        Sync (copy to the corresponding attributes) the data passed to the constructor or `bind` to the `model`.
        """
        if self.readonly:
            raise Exception('Cannot sync a read-only FieldSet')
        if self.data is None:
            raise Exception("No data bound; cannot sync")
        for field in self.render_fields.values():
            field.sync()
        if self.session:
            self.session.add(self.model)

    def render(self, **kwargs):
        if fields._pk(self.model) != self._bound_pk and self.data is not None:
            msg = ("Primary key of model has changed since binding, "
                   "probably due to sync()ing a new instance (from %r to %r). "
                   "You can solve this by either binding to a model "
                   "with the original primary key again, or by binding data to None.")
            raise exceptions.PkError(msg % (self._bound_pk, fields._pk(self.model)))
        engine = self.engine or config.engine
        if 'request' not in kwargs:
            kwargs['request'] = self._request
        if self.readonly:
            template = 'fieldset_readonly'
        else:
            template = 'fieldset'
        return engine(template, fieldset=self, **kwargs)

    @property
    def errors(self):
        """
        A dictionary of validation failures.  Always empty before `validate()` is run.
        Dictionary keys are attributes; values are lists of messages given to `ValidationError`.
        Global errors (not specific to a single attribute) are under the key `None`.
        """
        errors = {}
        if self._errors:
            errors[None] = self._errors
        errors.update(dict([(field, field.errors)
                            for field in self.render_fields.values() if field.errors]))
        return errors


    @property
    def render_fields(self):
        """
        The set of attributes that will be rendered, as a (ordered)
        dict of `{fieldname: Field}` pairs
        """
        if not self._render_fields:
            self._render_fields = OrderedDict([(field.key, field) for field in self._get_fields()])
        return self._render_fields

    def copy(self, *args):
        """return a copy of the fieldset. args is a list of field names or field
        objects to render in the new fieldset"""
        mr = self.bind(self.model, self.session)
        _fields = self._render_fields or self._fields
        _new_fields = []
        if args:
            for field in args:
                if isinstance(field, string_types):
                    if field in _fields:
                        field = _fields.get(field)
                    else:
                        raise AttributeError('%r as not field named %s' % (self, field))
                assert isinstance(field, fields.AbstractField), field
                field.bind(mr)
                _new_fields.append(field)
            mr._render_fields = OrderedDict([(field.key, field) for field in _new_fields])
        return mr

    def append(self, field):
        """Add a form Field. By default, this Field will be included in the rendered form or table."""
        if not isinstance(field, fields.AbstractField):
            raise ValueError('Can only add Field or AttributeField objects; got %s instead' % field)
        field.parent = self
        _fields = self._render_fields or self._fields
        _fields[field.name] = field

    def add(self, field):
        warnings.warn(DeprecationWarning('FieldSet.add is deprecated. Use FieldSet.append instead. Your validator will break in FA 1.5'))
        self.append(field)

    def extend(self, fields):
        """Add a list of fields. By default, each Field will be included in the
        rendered form or table."""
        for field in fields:
            self.append(field)

    def insert(self, field, new_field):
        """Insert a new field *before* an existing field.

        This is like the normal ``insert()`` function of ``list`` objects. It
        takes the place of the previous element, and pushes the rest forward.
        """
        fields_ = self._render_fields or self._fields
        if not isinstance(new_field, fields.Field):
            raise ValueError('Can only add Field objects; got %s instead' % field)
        if isinstance(field, fields.AbstractField):
            try:
                index = list(fields_.keys()).index(field.key)
            except ValueError:
                raise ValueError('%s not in fields' % field.key)
        else:
            raise TypeError('field must be a Field. Got %r' % field)
        new_field.parent = self
        items = list(fields_.items()) # prepare for Python 3
        items.insert(index, (new_field.name, new_field))
        if self._render_fields:
            self._render_fields = OrderedDict(items)
        else:
            self._fields = OrderedDict(items)

    def insert_after(self, field, new_field):
        """Insert a new field *after* an existing field.

        Use this if your business logic requires to add after a certain field,
        and not before.
        """
        fields_ = self._render_fields or self._fields
        if not isinstance(new_field, fields.Field):
            raise ValueError('Can only add Field objects; got %s instead' % field)
        if isinstance(field, fields.AbstractField):
            try:
                index = list(fields_.keys()).index(field.key)
            except ValueError:
                raise ValueError('%s not in fields' % field.key)
        else:
            raise TypeError('field must be a Field. Got %r' % field)
        new_field.parent = self
        items = list(fields_.items())
        new_item = (new_field.name, new_field)
        if index + 1 == len(items): # after the last element ?
            items.append(new_item)
        else:
            items.insert(index + 1, new_item)
        if self._render_fields:
            self._render_fields = OrderedDict(items)
        else:
            self._fields = OrderedDict(items)

    def to_dict(self, with_prefix=True, as_string=False):
        """This method intend to help you to work with json. Render fieldset as
        a dict. If ``with_prefix`` is False then the prefix ``{Model}-{pk}`` is
        not added. If ``as_string`` is True then all value are set using
        ``field.render_readonly()`` else the pythonic value is used"""
        _fields = self._render_fields or self._fields
        def get_value(f):
            if as_string:
                return f.render_readonly()
            else:
                return f.value
        if as_string:
            data = [(f, f.render_readonly()) for f in _fields.values()]
        else:
            data = [(f, f.value) for f in _fields.values() if not isinstance(f.renderer, fields.PasswordFieldRenderer)]

        if with_prefix:
            data = [(f.renderer.name, v) for f, v in data]
        else:
            data = [(f.name, v) for f, v in data]

        return dict(data)

    def _raw_fields(self, use_rendered=False):
        if use_rendered and self._render_fields:
            return self._render_fields.values()
        else:
            return self._fields.values()

    def _get_fields(self, pk=False, exclude=[], include=[], options=[], use_rendered=False):
        # sanity check
        if include and exclude:
            raise Exception('Specify at most one of include, exclude')

        # help people who meant configure(include=[X]) but just wrote configure(X), resulting in pk getting the positional argument
        if pk not in [True, False]:
            raise ValueError('pk option must be True or False, not %s' % pk)

        # verify that options that should be lists of Fields, are
        include = list(include)
        exclude = list(exclude)
        options = list(options)
        for iterable in ('include', 'exclude', 'options'):
            L = locals()[iterable]
            for field in L:
                if not isinstance(field, fields.AbstractField):
                    raise TypeError('non-AbstractField object `%s` found in `%s`' % (field, iterable))
                if field not in self._fields.values():
                    raise ValueError('Unrecognized Field `%r` in `%s` -- did you mean to call append() first?' % (field, iterable))

        # if include is given, those are the fields used.  otherwise, include those not explicitly (or implicitly) excluded.
        if not include:
            if not pk:
                exclude.extend([wrapper for wrapper in self._raw_fields(use_rendered) if wrapper.is_pk and not wrapper.is_collection])
            exclude.extend([wrapper for wrapper in self._raw_fields(use_rendered) if wrapper.is_raw_foreign_key])
            include = [field for field in self._raw_fields(use_rendered) if field not in exclude]

        # in the returned list, replace any fields in `include` w/ the corresponding one in `options`, if present.
        # this is a bit clunky because we want to
        #   1. preserve the order given in `include`
        #   2. not modify `include` (or `options`) directly; that could surprise the caller
        options_dict = dict([(wrapper, wrapper) for wrapper in options])
        L = []
        for wrapper in include:
            if wrapper in options_dict:
                L.append(options_dict[wrapper])
            else:
                L.append(wrapper)
        return L

    def __getattr__(self, attrname):
        try:
            return self._render_fields[attrname]
        except KeyError:
            try:
                return self._fields[attrname]
            except KeyError:
                raise AttributeError(attrname)

    __getitem__ = __getattr__

    def __setattr__(self, attrname, value):
        if attrname not in ('_fields', '__dict__', 'focus', 'model', 'session', 'data') and \
           (attrname in self._fields or isinstance(value, fields.AbstractField)):
            raise AttributeError('Do not set field attributes manually.  Use append() or configure() instead')
        object.__setattr__(self, attrname, value)

    def __delattr__(self, attrname):
        if attrname in self._render_fields:
            del self._render_fields[attrname]
        elif attrname in self._fields:
            raise RuntimeError("You try to delete a field but your form is not configured")
        else:
            raise AttributeError("field %s does not exist" % attrname)

    __delitem__ = __delattr__

    def __repr__(self):
        _fields = self._fields
        conf = ''
        if self._render_fields:
            conf = ' (configured)'
            _fields = self._render_fields
        return '<%s%s with %r>' % (self.__class__.__name__, conf,
                                   list(_fields.keys()))
Beispiel #9
0
class ApplyBatchImpl(object):
    def __init__(self, table, table_args, table_kwargs):
        self.table = table  # this is a Table object
        self.table_args = table_args
        self.table_kwargs = table_kwargs
        self.new_table = None
        self.column_transfers = OrderedDict(
            (c.name, {'expr': c}) for c in self.table.c
        )
        self._grab_table_elements()

    def _grab_table_elements(self):
        schema = self.table.schema
        self.columns = OrderedDict()
        for c in self.table.c:
            c_copy = c.copy(schema=schema)
            c_copy.unique = c_copy.index = False
            self.columns[c.name] = c_copy
        self.named_constraints = {}
        self.unnamed_constraints = []
        self.indexes = {}
        self.new_indexes = {}
        for const in self.table.constraints:
            if _is_type_bound(const):
                continue
            if const.name:
                self.named_constraints[const.name] = const
            else:
                self.unnamed_constraints.append(const)

        for idx in self.table.indexes:
            self.indexes[idx.name] = idx

        for k in self.table.kwargs:
            self.table_kwargs.setdefault(k, self.table.kwargs[k])

    def _transfer_elements_to_new_table(self):
        assert self.new_table is None, "Can only create new table once"

        m = MetaData()
        schema = self.table.schema

        self.new_table = new_table = Table(
            '_alembic_batch_temp', m,
            *(list(self.columns.values()) + list(self.table_args)),
            schema=schema,
            **self.table_kwargs)

        for const in list(self.named_constraints.values()) + \
                self.unnamed_constraints:

            const_columns = set([
                c.key for c in _columns_for_constraint(const)])

            if not const_columns.issubset(self.column_transfers):
                continue

            if isinstance(const, ForeignKeyConstraint):
                if _fk_is_self_referential(const):
                    # for self-referential constraint, refer to the
                    # *original* table name, and not _alembic_batch_temp.
                    # This is consistent with how we're handling
                    # FK constraints from other tables; we assume SQLite
                    # no foreign keys just keeps the names unchanged, so
                    # when we rename back, they match again.
                    const_copy = const.copy(
                        schema=schema, target_table=self.table)
                else:
                    # "target_table" for ForeignKeyConstraint.copy() is
                    # only used if the FK is detected as being
                    # self-referential, which we are handling above.
                    const_copy = const.copy(schema=schema)
            else:
                const_copy = const.copy(schema=schema, target_table=new_table)
            if isinstance(const, ForeignKeyConstraint):
                self._setup_referent(m, const)
            new_table.append_constraint(const_copy)

    def _gather_indexes_from_both_tables(self):
        idx = []
        idx.extend(self.indexes.values())
        for index in self.new_indexes.values():
            idx.append(
                Index(
                    index.name,
                    unique=index.unique,
                    *[self.new_table.c[col] for col in index.columns.keys()],
                    **index.kwargs)
            )
        return idx

    def _setup_referent(self, metadata, constraint):
        spec = constraint.elements[0]._get_colspec()
        parts = spec.split(".")
        tname = parts[-2]
        if len(parts) == 3:
            referent_schema = parts[0]
        else:
            referent_schema = None

        if tname != '_alembic_batch_temp':
            key = sql_schema._get_table_key(tname, referent_schema)
            if key in metadata.tables:
                t = metadata.tables[key]
                for elem in constraint.elements:
                    colname = elem._get_colspec().split(".")[-1]
                    if not t.c.contains_column(colname):
                        t.append_column(
                            Column(colname, sqltypes.NULLTYPE)
                        )
            else:
                Table(
                    tname, metadata,
                    *[Column(n, sqltypes.NULLTYPE) for n in
                        [elem._get_colspec().split(".")[-1]
                         for elem in constraint.elements]],
                    schema=referent_schema)

    def _create(self, op_impl):
        self._transfer_elements_to_new_table()

        op_impl.prep_table_for_batch(self.table)
        op_impl.create_table(self.new_table)

        try:
            op_impl._exec(
                self.new_table.insert(inline=True).from_select(
                    list(k for k, transfer in
                         self.column_transfers.items() if 'expr' in transfer),
                    select([
                        transfer['expr']
                        for transfer in self.column_transfers.values()
                        if 'expr' in transfer
                    ])
                )
            )
            op_impl.drop_table(self.table)
        except:
            op_impl.drop_table(self.new_table)
            raise
        else:
            op_impl.rename_table(
                "_alembic_batch_temp",
                self.table.name,
                schema=self.table.schema
            )
            self.new_table.name = self.table.name
            try:
                for idx in self._gather_indexes_from_both_tables():
                    op_impl.create_index(idx)
            finally:
                self.new_table.name = "_alembic_batch_temp"

    def alter_column(self, table_name, column_name,
                     nullable=None,
                     server_default=False,
                     name=None,
                     type_=None,
                     autoincrement=None,
                     **kw
                     ):
        existing = self.columns[column_name]
        existing_transfer = self.column_transfers[column_name]
        if name is not None and name != column_name:
            # note that we don't change '.key' - we keep referring
            # to the renamed column by its old key in _create().  neat!
            existing.name = name
            existing_transfer["name"] = name

        if type_ is not None:
            type_ = sqltypes.to_instance(type_)
            existing.type = type_
            existing_transfer["expr"] = cast(existing_transfer["expr"], type_)
        if nullable is not None:
            existing.nullable = nullable
        if server_default is not False:
            sql_schema.DefaultClause(server_default)._set_parent(existing)
        if autoincrement is not None:
            existing.autoincrement = bool(autoincrement)

    def add_column(self, table_name, column, **kw):
        # we copy the column because operations.add_column()
        # gives us a Column that is part of a Table already.
        self.columns[column.name] = column.copy(schema=self.table.schema)
        self.column_transfers[column.name] = {}

    def drop_column(self, table_name, column, **kw):
        del self.columns[column.name]
        del self.column_transfers[column.name]

    def add_constraint(self, const):
        if not const.name:
            raise ValueError("Constraint must have a name")
        if isinstance(const, sql_schema.PrimaryKeyConstraint):
            if self.table.primary_key in self.unnamed_constraints:
                self.unnamed_constraints.remove(self.table.primary_key)

        self.named_constraints[const.name] = const

    def drop_constraint(self, const):
        if not const.name:
            raise ValueError("Constraint must have a name")
        try:
            del self.named_constraints[const.name]
        except KeyError:
            raise ValueError("No such constraint: '%s'" % const.name)

    def create_index(self, idx):
        self.new_indexes[idx.name] = idx

    def drop_index(self, idx):
        try:
            del self.indexes[idx.name]
        except KeyError:
            raise ValueError("No such index: '%s'" % idx.name)

    def rename_table(self, *arg, **kw):
        raise NotImplementedError("TODO")
Beispiel #10
0
class ApplyBatchImpl(object):
    def __init__(self, table, table_args, table_kwargs, reflected):
        self.table = table  # this is a Table object
        self.table_args = table_args
        self.table_kwargs = table_kwargs
        self.temp_table_name = self._calc_temp_name(table.name)
        self.new_table = None
        self.column_transfers = OrderedDict((c.name, {
            "expr": c
        }) for c in self.table.c)
        self.reflected = reflected
        self._grab_table_elements()

    @classmethod
    def _calc_temp_name(cls, tablename):
        return ("_alembic_tmp_%s" % tablename)[0:50]

    def _grab_table_elements(self):
        schema = self.table.schema
        self.columns = OrderedDict()
        for c in self.table.c:
            c_copy = c.copy(schema=schema)
            c_copy.unique = c_copy.index = False
            # ensure that the type object was copied,
            # as we may need to modify it in-place
            if isinstance(c.type, SchemaEventTarget):
                assert c_copy.type is not c.type
            self.columns[c.name] = c_copy
        self.named_constraints = {}
        self.unnamed_constraints = []
        self.indexes = {}
        self.new_indexes = {}
        for const in self.table.constraints:
            if _is_type_bound(const):
                continue
            elif self.reflected and isinstance(const, CheckConstraint):
                # TODO: we are skipping reflected CheckConstraint because
                # we have no way to determine _is_type_bound() for these.
                pass
            elif const.name:
                self.named_constraints[const.name] = const
            else:
                self.unnamed_constraints.append(const)

        for idx in self.table.indexes:
            self.indexes[idx.name] = idx

        for k in self.table.kwargs:
            self.table_kwargs.setdefault(k, self.table.kwargs[k])

    def _transfer_elements_to_new_table(self):
        assert self.new_table is None, "Can only create new table once"

        m = MetaData()
        schema = self.table.schema

        self.new_table = new_table = Table(self.temp_table_name,
                                           m,
                                           *(list(self.columns.values()) +
                                             list(self.table_args)),
                                           schema=schema,
                                           **self.table_kwargs)

        for const in (list(self.named_constraints.values()) +
                      self.unnamed_constraints):

            const_columns = set(
                [c.key for c in _columns_for_constraint(const)])

            if not const_columns.issubset(self.column_transfers):
                continue

            if isinstance(const, ForeignKeyConstraint):
                if _fk_is_self_referential(const):
                    # for self-referential constraint, refer to the
                    # *original* table name, and not _alembic_batch_temp.
                    # This is consistent with how we're handling
                    # FK constraints from other tables; we assume SQLite
                    # no foreign keys just keeps the names unchanged, so
                    # when we rename back, they match again.
                    const_copy = const.copy(schema=schema,
                                            target_table=self.table)
                else:
                    # "target_table" for ForeignKeyConstraint.copy() is
                    # only used if the FK is detected as being
                    # self-referential, which we are handling above.
                    const_copy = const.copy(schema=schema)
            else:
                const_copy = const.copy(schema=schema, target_table=new_table)
            if isinstance(const, ForeignKeyConstraint):
                self._setup_referent(m, const)
            new_table.append_constraint(const_copy)

    def _gather_indexes_from_both_tables(self):
        idx = []
        idx.extend(self.indexes.values())
        for index in self.new_indexes.values():
            idx.append(
                Index(index.name,
                      unique=index.unique,
                      *[self.new_table.c[col] for col in index.columns.keys()],
                      **index.kwargs))
        return idx

    def _setup_referent(self, metadata, constraint):
        spec = constraint.elements[0]._get_colspec()
        parts = spec.split(".")
        tname = parts[-2]
        if len(parts) == 3:
            referent_schema = parts[0]
        else:
            referent_schema = None

        if tname != self.temp_table_name:
            key = sql_schema._get_table_key(tname, referent_schema)
            if key in metadata.tables:
                t = metadata.tables[key]
                for elem in constraint.elements:
                    colname = elem._get_colspec().split(".")[-1]
                    if not t.c.contains_column(colname):
                        t.append_column(Column(colname, sqltypes.NULLTYPE))
            else:
                Table(tname,
                      metadata,
                      *[
                          Column(n, sqltypes.NULLTYPE) for n in [
                              elem._get_colspec().split(".")[-1]
                              for elem in constraint.elements
                          ]
                      ],
                      schema=referent_schema)

    def _create(self, op_impl):
        self._transfer_elements_to_new_table()

        op_impl.prep_table_for_batch(self.table)
        op_impl.create_table(self.new_table)

        try:
            op_impl._exec(
                self.new_table.insert(inline=True).from_select(
                    list(k for k, transfer in self.column_transfers.items()
                         if "expr" in transfer),
                    select([
                        transfer["expr"]
                        for transfer in self.column_transfers.values()
                        if "expr" in transfer
                    ]),
                ))
            op_impl.drop_table(self.table)
        except:
            op_impl.drop_table(self.new_table)
            raise
        else:
            op_impl.rename_table(self.temp_table_name,
                                 self.table.name,
                                 schema=self.table.schema)
            self.new_table.name = self.table.name
            try:
                for idx in self._gather_indexes_from_both_tables():
                    op_impl.create_index(idx)
            finally:
                self.new_table.name = self.temp_table_name

    def alter_column(self,
                     table_name,
                     column_name,
                     nullable=None,
                     server_default=False,
                     name=None,
                     type_=None,
                     autoincrement=None,
                     **kw):
        existing = self.columns[column_name]
        existing_transfer = self.column_transfers[column_name]
        if name is not None and name != column_name:
            # note that we don't change '.key' - we keep referring
            # to the renamed column by its old key in _create().  neat!
            existing.name = name
            existing_transfer["name"] = name

        if type_ is not None:
            type_ = sqltypes.to_instance(type_)
            # old type is being discarded so turn off eventing
            # rules. Alternatively we can
            # erase the events set up by this type, but this is simpler.
            # we also ignore the drop_constraint that will come here from
            # Operations.implementation_for(alter_column)
            if isinstance(existing.type, SchemaEventTarget):
                existing.type._create_events = (
                    existing.type.create_constraint) = False

            if existing.type._type_affinity is not type_._type_affinity:
                existing_transfer["expr"] = cast(existing_transfer["expr"],
                                                 type_)

            existing.type = type_

            # we *dont* however set events for the new type, because
            # alter_column is invoked from
            # Operations.implementation_for(alter_column) which already
            # will emit an add_constraint()

        if nullable is not None:
            existing.nullable = nullable
        if server_default is not False:
            if server_default is None:
                existing.server_default = None
            else:
                sql_schema.DefaultClause(server_default)._set_parent(existing)
        if autoincrement is not None:
            existing.autoincrement = bool(autoincrement)

    def add_column(self, table_name, column, **kw):
        # we copy the column because operations.add_column()
        # gives us a Column that is part of a Table already.
        self.columns[column.name] = column.copy(schema=self.table.schema)
        self.column_transfers[column.name] = {}

    def drop_column(self, table_name, column, **kw):
        if column.name in self.table.primary_key.columns:
            _remove_column_from_collection(self.table.primary_key.columns,
                                           column)
        del self.columns[column.name]
        del self.column_transfers[column.name]

    def add_constraint(self, const):
        if not const.name:
            raise ValueError("Constraint must have a name")
        if isinstance(const, sql_schema.PrimaryKeyConstraint):
            if self.table.primary_key in self.unnamed_constraints:
                self.unnamed_constraints.remove(self.table.primary_key)

        self.named_constraints[const.name] = const

    def drop_constraint(self, const):
        if not const.name:
            raise ValueError("Constraint must have a name")
        try:
            const = self.named_constraints.pop(const.name)
        except KeyError:
            if _is_type_bound(const):
                # type-bound constraints are only included in the new
                # table via their type object in any case, so ignore the
                # drop_constraint() that comes here via the
                # Operations.implementation_for(alter_column)
                return
            raise ValueError("No such constraint: '%s'" % const.name)
        else:
            if isinstance(const, PrimaryKeyConstraint):
                for col in const.columns:
                    self.columns[col.name].primary_key = False

    def create_index(self, idx):
        self.new_indexes[idx.name] = idx

    def drop_index(self, idx):
        try:
            del self.indexes[idx.name]
        except KeyError:
            raise ValueError("No such index: '%s'" % idx.name)

    def rename_table(self, *arg, **kw):
        raise NotImplementedError("TODO")
Beispiel #11
0
class ApplyBatchImpl(object):
    def __init__(self, table, table_args, table_kwargs):
        self.table = table  # this is a Table object
        self.table_args = table_args
        self.table_kwargs = table_kwargs
        self.new_table = None
        self.column_transfers = OrderedDict(
            (c.name, {'expr': c}) for c in self.table.c
        )
        self._grab_table_elements()

    def _grab_table_elements(self):
        schema = self.table.schema
        self.columns = OrderedDict()
        for c in self.table.c:
            c_copy = c.copy(schema=schema)
            c_copy.unique = c_copy.index = False
            self.columns[c.name] = c_copy
        self.named_constraints = {}
        self.unnamed_constraints = []
        self.indexes = {}
        for const in self.table.constraints:
            if _is_type_bound(const):
                continue
            if const.name:
                self.named_constraints[const.name] = const
            else:
                self.unnamed_constraints.append(const)

        for idx in self.table.indexes:
            self.indexes[idx.name] = idx

    def _transfer_elements_to_new_table(self):
        assert self.new_table is None, "Can only create new table once"

        m = MetaData()
        schema = self.table.schema
        self.new_table = new_table = Table(
            '_alembic_batch_temp', m,
            *(list(self.columns.values()) + list(self.table_args)),
            schema=schema,
            **self.table_kwargs)

        for const in list(self.named_constraints.values()) + \
                self.unnamed_constraints:

            const_columns = set([
                c.key for c in _columns_for_constraint(const)])

            if not const_columns.issubset(self.column_transfers):
                continue
            const_copy = const.copy(schema=schema, target_table=new_table)
            if isinstance(const, ForeignKeyConstraint):
                self._setup_referent(m, const)
            new_table.append_constraint(const_copy)

        for index in self.indexes.values():
            Index(index.name,
                  unique=index.unique,
                  *[new_table.c[col] for col in index.columns.keys()],
                  **index.kwargs)

    def _setup_referent(self, metadata, constraint):
        spec = constraint.elements[0]._get_colspec()
        parts = spec.split(".")
        tname = parts[-2]
        if len(parts) == 3:
            referent_schema = parts[0]
        else:
            referent_schema = None
        if tname != '_alembic_batch_temp':
            Table(
                tname, metadata,
                *[Column(n, sqltypes.NULLTYPE) for n in
                    [elem._get_colspec().split(".")[-1]
                     for elem in constraint.elements]],
                schema=referent_schema)

    def _create(self, op_impl):
        self._transfer_elements_to_new_table()

        op_impl.prep_table_for_batch(self.table)
        op_impl.create_table(self.new_table)

        try:
            op_impl._exec(
                self.new_table.insert(inline=True).from_select(
                    list(k for k, transfer in
                         self.column_transfers.items() if 'expr' in transfer),
                    select([
                        transfer['expr']
                        for transfer in self.column_transfers.values()
                        if 'expr' in transfer
                    ])
                )
            )
            op_impl.drop_table(self.table)
        except:
            op_impl.drop_table(self.new_table)
            raise
        else:
            op_impl.rename_table(
                "_alembic_batch_temp",
                self.table.name,
                schema=self.table.schema
            )

    def alter_column(self, table_name, column_name,
                     nullable=None,
                     server_default=False,
                     name=None,
                     type_=None,
                     autoincrement=None,
                     **kw
                     ):
        existing = self.columns[column_name]
        existing_transfer = self.column_transfers[column_name]
        if name is not None and name != column_name:
            # note that we don't change '.key' - we keep referring
            # to the renamed column by its old key in _create().  neat!
            existing.name = name
            existing_transfer["name"] = name

        if type_ is not None:
            type_ = sqltypes.to_instance(type_)
            existing.type = type_
            existing_transfer["expr"] = cast(existing_transfer["expr"], type_)
        if nullable is not None:
            existing.nullable = nullable
        if server_default is not False:
            existing.server_default = server_default
        if autoincrement is not None:
            existing.autoincrement = bool(autoincrement)

    def add_column(self, table_name, column, **kw):
        # we copy the column because operations.add_column()
        # gives us a Column that is part of a Table already.
        self.columns[column.name] = column.copy(schema=self.table.schema)
        self.column_transfers[column.name] = {}

    def drop_column(self, table_name, column, **kw):
        del self.columns[column.name]
        del self.column_transfers[column.name]

    def add_constraint(self, const):
        if not const.name:
            raise ValueError("Constraint must have a name")
        self.named_constraints[const.name] = const

    def drop_constraint(self, const):
        if not const.name:
            raise ValueError("Constraint must have a name")
        try:
            del self.named_constraints[const.name]
        except KeyError:
            raise ValueError("No such constraint: '%s'" % const.name)

    def add_index(self, idx):
        self.indexes[idx.name] = idx

    def drop_index(self, idx):
        try:
            del self.indexes[idx.name]
        except KeyError:
            raise ValueError("No such index: '%s'" % idx.name)

    def rename_table(self, *arg, **kw):
        raise NotImplementedError("TODO")
Beispiel #12
0
class ApplyBatchImpl(object):
    def __init__(
            self,
            impl,
            table,
            table_args,
            table_kwargs,
            reflected,
            partial_reordering=(),
    ):
        self.impl = impl
        self.table = table  # this is a Table object
        self.table_args = table_args
        self.table_kwargs = table_kwargs
        self.temp_table_name = self._calc_temp_name(table.name)
        self.new_table = None

        self.partial_reordering = partial_reordering  # tuple of tuples
        self.add_col_ordering = ()  # tuple of tuples

        self.column_transfers = OrderedDict((c.name, {
            "expr": c
        }) for c in self.table.c)
        self.existing_ordering = list(self.column_transfers)

        self.reflected = reflected
        self._grab_table_elements()

    @classmethod
    def _calc_temp_name(cls, tablename):
        return ("_alembic_tmp_%s" % tablename)[0:50]

    def _grab_table_elements(self):
        schema = self.table.schema
        self.columns = OrderedDict()
        for c in self.table.c:
            c_copy = c.copy(schema=schema)
            c_copy.unique = c_copy.index = False
            # ensure that the type object was copied,
            # as we may need to modify it in-place
            if isinstance(c.type, SchemaEventTarget):
                assert c_copy.type is not c.type
            self.columns[c.name] = c_copy
        self.named_constraints = {}
        self.unnamed_constraints = []
        self.col_named_constraints = {}
        self.indexes = {}
        self.new_indexes = {}

        for const in self.table.constraints:
            if _is_type_bound(const):
                continue
            elif self.reflected and isinstance(const, CheckConstraint):
                # TODO: we are skipping reflected CheckConstraint because
                # we have no way to determine _is_type_bound() for these.
                pass
            elif const.name:
                self.named_constraints[const.name] = const
            else:
                self.unnamed_constraints.append(const)

        if not self.reflected:
            for col in self.table.c:
                for const in col.constraints:
                    if const.name:
                        self.col_named_constraints[const.name] = (col, const)

        for idx in self.table.indexes:
            self.indexes[idx.name] = idx

        for k in self.table.kwargs:
            self.table_kwargs.setdefault(k, self.table.kwargs[k])

    def _adjust_self_columns_for_partial_reordering(self):
        pairs = set()

        col_by_idx = list(self.columns)

        if self.partial_reordering:
            for tuple_ in self.partial_reordering:
                for index, elem in enumerate(tuple_):
                    if index > 0:
                        pairs.add((tuple_[index - 1], elem))
        else:
            for index, elem in enumerate(self.existing_ordering):
                if index > 0:
                    pairs.add((col_by_idx[index - 1], elem))

        pairs.update(self.add_col_ordering)

        # this can happen if some columns were dropped and not removed
        # from existing_ordering.  this should be prevented already, but
        # conservatively making sure this didn't happen
        pairs = [p for p in pairs if p[0] != p[1]]

        sorted_ = list(
            topological.sort(pairs, col_by_idx, deterministic_order=True))
        self.columns = OrderedDict((k, self.columns[k]) for k in sorted_)
        self.column_transfers = OrderedDict(
            (k, self.column_transfers[k]) for k in sorted_)

    def _transfer_elements_to_new_table(self):
        assert self.new_table is None, "Can only create new table once"

        m = MetaData()
        schema = self.table.schema

        if self.partial_reordering or self.add_col_ordering:
            self._adjust_self_columns_for_partial_reordering()

        self.new_table = new_table = Table(self.temp_table_name,
                                           m,
                                           *(list(self.columns.values()) +
                                             list(self.table_args)),
                                           schema=schema,
                                           **self.table_kwargs)

        for const in (list(self.named_constraints.values()) +
                      self.unnamed_constraints):

            const_columns = set(
                [c.key for c in _columns_for_constraint(const)])

            if not const_columns.issubset(self.column_transfers):
                continue

            if isinstance(const, ForeignKeyConstraint):
                if _fk_is_self_referential(const):
                    # for self-referential constraint, refer to the
                    # *original* table name, and not _alembic_batch_temp.
                    # This is consistent with how we're handling
                    # FK constraints from other tables; we assume SQLite
                    # no foreign keys just keeps the names unchanged, so
                    # when we rename back, they match again.
                    const_copy = const.copy(schema=schema,
                                            target_table=self.table)
                else:
                    # "target_table" for ForeignKeyConstraint.copy() is
                    # only used if the FK is detected as being
                    # self-referential, which we are handling above.
                    const_copy = const.copy(schema=schema)
            else:
                const_copy = const.copy(schema=schema, target_table=new_table)
            if isinstance(const, ForeignKeyConstraint):
                self._setup_referent(m, const)
            new_table.append_constraint(const_copy)

    def _gather_indexes_from_both_tables(self):
        idx = []
        idx.extend(self.indexes.values())
        for index in self.new_indexes.values():
            idx.append(
                Index(index.name,
                      unique=index.unique,
                      *[self.new_table.c[col] for col in index.columns.keys()],
                      **index.kwargs))
        return idx

    def _setup_referent(self, metadata, constraint):
        spec = constraint.elements[0]._get_colspec()
        parts = spec.split(".")
        tname = parts[-2]
        if len(parts) == 3:
            referent_schema = parts[0]
        else:
            referent_schema = None

        if tname != self.temp_table_name:
            key = sql_schema._get_table_key(tname, referent_schema)
            if key in metadata.tables:
                t = metadata.tables[key]
                for elem in constraint.elements:
                    colname = elem._get_colspec().split(".")[-1]
                    if colname not in t.c:
                        t.append_column(Column(colname, sqltypes.NULLTYPE))
            else:
                Table(tname,
                      metadata,
                      *[
                          Column(n, sqltypes.NULLTYPE) for n in [
                              elem._get_colspec().split(".")[-1]
                              for elem in constraint.elements
                          ]
                      ],
                      schema=referent_schema)

    def _create(self, op_impl):
        self._transfer_elements_to_new_table()

        op_impl.prep_table_for_batch(self, self.table)
        op_impl.create_table(self.new_table)

        try:
            op_impl._exec(
                _insert_inline(self.new_table).from_select(
                    list(k for k, transfer in self.column_transfers.items()
                         if "expr" in transfer),
                    _select(*[
                        transfer["expr"]
                        for transfer in self.column_transfers.values()
                        if "expr" in transfer
                    ]),
                ))
            op_impl.drop_table(self.table)
        except:
            op_impl.drop_table(self.new_table)
            raise
        else:
            op_impl.rename_table(self.temp_table_name,
                                 self.table.name,
                                 schema=self.table.schema)
            self.new_table.name = self.table.name
            try:
                for idx in self._gather_indexes_from_both_tables():
                    op_impl.create_index(idx)
            finally:
                self.new_table.name = self.temp_table_name

    def alter_column(self,
                     table_name,
                     column_name,
                     nullable=None,
                     server_default=False,
                     name=None,
                     type_=None,
                     autoincrement=None,
                     comment=False,
                     **kw):
        existing = self.columns[column_name]
        existing_transfer = self.column_transfers[column_name]
        if name is not None and name != column_name:
            # note that we don't change '.key' - we keep referring
            # to the renamed column by its old key in _create().  neat!
            existing.name = name
            existing_transfer["name"] = name

        if type_ is not None:
            type_ = sqltypes.to_instance(type_)
            # old type is being discarded so turn off eventing
            # rules. Alternatively we can
            # erase the events set up by this type, but this is simpler.
            # we also ignore the drop_constraint that will come here from
            # Operations.implementation_for(alter_column)
            if isinstance(existing.type, SchemaEventTarget):
                existing.type._create_events = (
                    existing.type.create_constraint) = False

            self.impl.cast_for_batch_migrate(existing, existing_transfer,
                                             type_)

            existing.type = type_

            # we *dont* however set events for the new type, because
            # alter_column is invoked from
            # Operations.implementation_for(alter_column) which already
            # will emit an add_constraint()

        if nullable is not None:
            existing.nullable = nullable
        if server_default is not False:
            if server_default is None:
                existing.server_default = None
            else:
                sql_schema.DefaultClause(server_default)._set_parent(existing)
        if autoincrement is not None:
            existing.autoincrement = bool(autoincrement)

        if comment is not False:
            existing.comment = comment

    def _setup_dependencies_for_add_column(self, colname, insert_before,
                                           insert_after):
        index_cols = self.existing_ordering
        col_indexes = {name: i for i, name in enumerate(index_cols)}

        if not self.partial_reordering:
            if insert_after:
                if not insert_before:
                    if insert_after in col_indexes:
                        # insert after an existing column
                        idx = col_indexes[insert_after] + 1
                        if idx < len(index_cols):
                            insert_before = index_cols[idx]
                    else:
                        # insert after a column that is also new
                        insert_before = dict(
                            self.add_col_ordering)[insert_after]
            if insert_before:
                if not insert_after:
                    if insert_before in col_indexes:
                        # insert before an existing column
                        idx = col_indexes[insert_before] - 1
                        if idx >= 0:
                            insert_after = index_cols[idx]
                    else:
                        # insert before a column that is also new
                        insert_after = dict(
                            (b, a)
                            for a, b in self.add_col_ordering)[insert_before]

        if insert_before:
            self.add_col_ordering += ((colname, insert_before), )
        if insert_after:
            self.add_col_ordering += ((insert_after, colname), )

        if (not self.partial_reordering and not insert_before
                and not insert_after and col_indexes):
            self.add_col_ordering += ((index_cols[-1], colname), )

    def add_column(self,
                   table_name,
                   column,
                   insert_before=None,
                   insert_after=None,
                   **kw):
        self._setup_dependencies_for_add_column(column.name, insert_before,
                                                insert_after)
        # we copy the column because operations.add_column()
        # gives us a Column that is part of a Table already.
        self.columns[column.name] = column.copy(schema=self.table.schema)
        self.column_transfers[column.name] = {}

    def drop_column(self, table_name, column, **kw):
        if column.name in self.table.primary_key.columns:
            _remove_column_from_collection(self.table.primary_key.columns,
                                           column)
        del self.columns[column.name]
        del self.column_transfers[column.name]
        self.existing_ordering.remove(column.name)

    def create_column_comment(self, column):
        """the batch table creation function will issue create_column_comment
        on the real "impl" as part of the create table process.

        That is, the Column object will have the comment on it already,
        so when it is received by add_column() it will be a normal part of
        the CREATE TABLE and doesn't need an extra step here.

        """

    def add_constraint(self, const):
        if not const.name:
            raise ValueError("Constraint must have a name")
        if isinstance(const, sql_schema.PrimaryKeyConstraint):
            if self.table.primary_key in self.unnamed_constraints:
                self.unnamed_constraints.remove(self.table.primary_key)

        self.named_constraints[const.name] = const

    def drop_constraint(self, const):
        if not const.name:
            raise ValueError("Constraint must have a name")
        try:
            if const.name in self.col_named_constraints:
                col, const = self.col_named_constraints.pop(const.name)

                for col_const in list(self.columns[col.name].constraints):
                    if col_const.name == const.name:
                        self.columns[col.name].constraints.remove(col_const)
            else:
                const = self.named_constraints.pop(const.name)
        except KeyError:
            if _is_type_bound(const):
                # type-bound constraints are only included in the new
                # table via their type object in any case, so ignore the
                # drop_constraint() that comes here via the
                # Operations.implementation_for(alter_column)
                return
            raise ValueError("No such constraint: '%s'" % const.name)
        else:
            if isinstance(const, PrimaryKeyConstraint):
                for col in const.columns:
                    self.columns[col.name].primary_key = False

    def create_index(self, idx):
        self.new_indexes[idx.name] = idx

    def drop_index(self, idx):
        try:
            del self.indexes[idx.name]
        except KeyError:
            raise ValueError("No such index: '%s'" % idx.name)

    def rename_table(self, *arg, **kw):
        raise NotImplementedError("TODO")
Beispiel #13
0
    def get(self):
        args = self.parser.parse_args()
        if args.get('summary'):
            logs = RequestLog.query.order_by(RequestLog.created).all()

            if len(logs) < 1:
                return {}

            first_d = logs[0].created.date()
            last_d = logs[-1].created.date()

            result = OrderedDict()

            t_date = ddate(first_d.year, first_d.month, first_d.day)
            while t_date != last_d:
                result[str(t_date)] = 0
                t_date += timedelta(days=1)

            for log in logs:
                date = log.created.date()
                sdate = str(date)
                if result.get(sdate):
                    result[sdate] += 1
                else:
                    result[sdate] = 1

            if args.get('csv'):
                csv_tmp = tempfile.NamedTemporaryFile(mode='w+',
                                                      prefix='logs_csv_',
                                                      delete=False)
                csv_writer = csv.writer(csv_tmp)
                csv_writer.writerow(['DATE', 'N_REQUESTS'])

                for k, v in result.items():
                    csv_writer.writerow([k, v])

                csv_tmp.close()

                tmp_send = BytesIO()
                with open(csv_tmp.name, mode='rb') as f:
                    tmp_send.write(f.read())

                tmp_send.seek(0)
                response = send_file(tmp_send,
                                     mimetype='text/csv',
                                     as_attachment=True,
                                     attachment_filename='logs.csv')

                os.remove(csv_tmp.name)
                return response

            else:
                return result

        else:
            startdate = datetime.today().date() - timedelta(days=7)
            logs = RequestLog.query.filter(
                RequestLog.created > startdate).all()
            result = []

            for log in logs:
                result.append({
                    "client_id":
                    clientserializer.dumps(log.client_id),
                    "test_id":
                    testserializer.dumps(log.test_id),
                    "result":
                    log.result,
                    "error":
                    log.error,
                    "timestamp":
                    log.created.isoformat()
                })

            return {"results": result, "startdate": startdate.isoformat()}
Beispiel #14
0
class FieldSet(BaseFieldSet):
    """FieldSet aware of zope schema. See :class:`formalchemy.forms.FieldSet` for full api."""

    __sa__ = False
    _fields_mapping = {
        schema.TextLine: fatypes.Unicode,
        schema.Text: fatypes.Unicode,
        schema.Int: fatypes.Integer,
        schema.Bool: fatypes.Boolean,
        schema.Float: fatypes.Float,
        schema.Date: fatypes.Date,
        schema.Datetime: fatypes.DateTime,
        schema.Time: fatypes.Time,
        schema.Choice: fatypes.Unicode,
        schema.List: fatypes.List,
        schema.Password: fatypes.Unicode,
    }

    def __init__(self, model, **kwargs):
        BaseFieldSet.__init__(self, model, **kwargs)
        self.iface = model
        self.rebind(model)
        self._fields = OrderedDict()
        self._render_fields = OrderedDict()
        self._bound_pk = None
        for name, field in schema.getFieldsInOrder(self.iface):
            klass = field.__class__
            try:
                t = self._fields_mapping[klass]
            except KeyError:
                raise NotImplementedError('%s is not mapped to a type' % klass)
            else:
                self.append(Field(name=name, type=t))
                self._fields[name].label_text = field.title or name
                if field.description:
                    self._fields[name].set(instructions=field.description)
                if field.required:
                    self._fields[name].validators.append(validators.required)
                if klass is schema.Password:
                    self._fields[name].set(
                        renderer=fields.PasswordFieldRenderer)
                if klass is schema.Text:
                    self._fields[name].set(
                        renderer=fields.TextAreaFieldRenderer)
                if klass is schema.List:
                    value_type = self.iface[name].value_type
                    if isinstance(value_type, schema.Choice):
                        self._fields[name].set(options=value_type,
                                               multiple=True)
                    else:
                        self._fields[name].set(multiple=True)
                elif klass is schema.Choice:
                    self._fields[name].set(renderer=fields.SelectFieldRenderer,
                                           options=self.iface[name])

    def bind(self, model, session=None, data=None, request=None):
        if not (model is not None or session or data):
            raise Exception(
                'must specify at least one of {model, session, data}')
        # copy.copy causes a stacktrace on python 2.5.2/OSX + pylons.  unable to reproduce w/ simpler sample.
        mr = object.__new__(self.__class__)
        mr.__dict__ = dict(self.__dict__)
        # two steps so bind's error checking can work
        mr.rebind(model, session, data)
        mr._request = request
        mr._fields = OrderedDict([(key, renderer.bind(mr))
                                  for key, renderer in self._fields.items()])
        if self._render_fields:
            mr._render_fields = OrderedDict([
                (field.key, field) for field in
                [field.bind(mr) for field in self._render_fields.values()]
            ])
        return mr

    def gen_model(self, model=None, dict_like=False, **kwargs):
        if model and self.iface.providedBy(model):
            return model
        factory = gen_model(self.iface, model, dict_like=dict_like)
        model = factory(context=model, **kwargs)
        return model

    def rebind(self, model, session=None, data=None):
        if model is not self.iface:
            if model and not self.iface.providedBy(model):
                if getattr(model, '__implemented__', None) is not None:
                    raise ValueError('%r does not provide %r' %
                                     (model, self.iface))
                model = self.gen_model(model)
        self.model = model
        self._bound_pk = fields._pk(model)
        if data is None:
            self.data = None
        elif hasattr(data, 'getall') and hasattr(data, 'getone'):
            self.data = data
        else:
            try:
                self.data = SimpleMultiDict(data)
            except:
                raise Exception(
                    'unsupported data object %s. currently only dicts and Paste multidicts are supported'
                    % self.data)
Beispiel #15
0
class FieldSet(DefaultRenderers):
    """
    A `FieldSet` is bound to a SQLAlchemy mapped instance (or class, for
    creating new instances) and can render a form for editing that instance,
    perform validation, and sync the form data back to the bound instance.

    `FieldSets` are responsible for generating HTML fields from a given
    `model`.

    You can derive your own subclasses from `FieldSet` to provide a customized
    `render` and/or `configure`.

    You can write `render` by manually sticking strings together if that's what you want,
    but we recommend using a templating package for clarity and maintainability.
    !FormAlchemy includes the Tempita templating package as formalchemy.tempita;
    see http://pythonpaste.org/tempita/ for documentation.

    `formalchemy.forms.template_text_tempita` is the default template used by `FieldSet.`
    !FormAlchemy also includes a Mako version, `formalchemy.forms.template_text_mako`,
    and will use that instead if Mako is available.  The rendered HTML is identical
    but (we suspect) Mako is faster.

    Usage:

        - `model`:
              a SQLAlchemy mapped class or instance.  New object creation
              should be done by passing the class, which will need a default
              (no-parameter) constructor.  After construction or binding of
              the :class:`~formalchemy.forms.FieldSet`, the instantiated object will be available as
              the `.model` attribute.

        - `session=None`:
              the session to use for queries (for relations). If `model` is associated
              with a session, that will be used by default. (Objects mapped with a
              `scoped_session
              <http://www.sqlalchemy.org/docs/05/session.html#contextual-thread-local-sessions>`_
              will always have a session. Other objects will
              also have a session if they were loaded by a Query.)

        - `data=None`:
              dictionary-like object of user-submitted data to validate and/or
              sync to the `model`. Scalar attributes should have a single
              value in the dictionary; multi-valued relations should have a
              list, even if there are zero or one values submitted.  Currently,
              pylons request.params() objects and plain dictionaries are known
              to work.

        - `request=None`:
              WebOb-like object that can be taken in place of `data`.
              FormAlchemy will make sure it's a POST, and use its 'POST'
              attribute as the data.  Also, the request object will be
              available to renderers as the `.request` attribute.

        - `prefix=None`:
              the prefix to prepend to html name attributes. This is useful to avoid
              field name conflicts when there are two fieldsets creating objects
              from the same model in one html page.  (This is not needed when
              editing existing objects, since the object primary key is used as part
              of the field name.)


        Only the `model` parameter is required.

        After binding, :class:`~formalchemy.forms.FieldSet`'s `model` attribute will always be an instance.
        If you bound to a class, `FormAlchemy` will call its constructor with no
        arguments to create an appropriate instance.

        .. NOTE::

          This instance will not be added to the current session, even if you are using `Session.mapper`.

        All of these parameters may be overridden by the `bind` or `rebind`
        methods.  The `bind` method returns a new instance bound as specified,
        while `rebind` modifies the current :class:`~formalchemy.forms.FieldSet` and has
        no return value. (You may not `bind` to a different type of SQLAlchemy
        model than the initial one -- if you initially bind to a `User`, you
        must subsequently bind `User`'s to that :class:`~formalchemy.forms.FieldSet`.)

        Typically, you will configure a :class:`~formalchemy.forms.FieldSet` once in
        your common form library, then `bind` specific instances later for editing. (The
        `bind` method is thread-safe; `rebind` is not.)  Thus:

        load stuff:

        >>> from formalchemy.tests import FieldSet, User, session

        now, in `library.py`

        >>> fs = FieldSet(User)
        >>> fs.configure(options=[]) # put all configuration stuff here

        and in `controller.py`

        >>> from library import fs
        >>> user = session.query(User).first()
        >>> fs2 = fs.bind(user)
        >>> html = fs2.render()

        The `render_fields` attribute is an OrderedDict of all the `Field`'s
        that have been configured, keyed by name. The order of the fields
        is the order in `include`, or the order they were declared
        in the SQLAlchemy model class if no `include` is specified.

        The `_fields` attribute is an OrderedDict of all the `Field`'s
        the ModelRenderer knows about, keyed by name, in their
        unconfigured state.  You should not normally need to access
        `_fields` directly.

        (Note that although equivalent `Field`'s (fields referring to
        the same attribute on the SQLAlchemy model) will equate with
        the == operator, they are NOT necessarily the same `Field`
        instance.  Stick to referencing `Field`'s from their parent
        `FieldSet` to always get the "right" instance.)
    """
    __sa__ = True
    engine = _render = _render_readonly = None

    prettify = staticmethod(prettify)

    def __init__(self,
                 model,
                 session=None,
                 data=None,
                 prefix=None,
                 format=u'%(model)s-%(pk)s-%(name)s',
                 request=None):
        self._fields = OrderedDict()
        self._render_fields = OrderedDict()
        self.model = self.session = None
        self.readonly = False
        self.validator = None
        self.focus = True
        self._request = request
        self._format = format
        self._prefix = prefix
        self._errors = []

        if not model:
            raise Exception('model parameter may not be None')
        self._original_cls = isinstance(model, type) and model or type(model)

        if self.__sa__:
            FieldSet.rebind(self, model, session, data, request)

            cls = isinstance(self.model, type) and self.model or type(
                self.model)
            try:
                class_mapper(cls)
            except:
                # this class is not managed by SA.  extract any raw Fields defined on it.
                keys = sorted(cls.__dict__.keys(), key=lambda a: a.lower())
                for key in keys:
                    field = cls.__dict__[key]
                    if isinstance(field, fields.Field):
                        if field.name and field.name != key:
                            raise Exception(
                                'Fields in a non-mapped class have the same name as their attribute.  Do not manually give them a name.'
                            )
                        field.name = field.key = key
                        self.append(field)
                if not self._fields:
                    raise Exception(
                        "not bound to a SA instance, and no manual Field definitions found"
                    )
            else:
                # SA class.
                # load synonyms so we can ignore them
                ignore_keys = set()
                for p in class_mapper(cls).iterate_properties:
                    if isinstance(p, SynonymProperty):
                        ignore_keys.add(p.name)
                        # Can't ignore the original, this hides synonymized relationships when the ID it points to is not also synonymed
                        # ignore_keys.add(p.key)
                    elif hasattr(p, '_is_polymorphic_discriminator'
                                 ) and p._is_polymorphic_discriminator:
                        ignore_keys.add(p.key)
                    elif isinstance(p, CompositeProperty):
                        for p in p.props:
                            ignore_keys.add(p.key)

                # attributes we're interested in
                attrs = []
                for p in class_mapper(cls).iterate_properties:
                    attr = _get_attribute(cls, p)
                    if ((isinstance(p, SynonymProperty) or
                         (attr.property.key not in ignore_keys
                          and p.key not in ignore_keys)) and
                            not isinstance(attr.impl, DynamicAttributeImpl)):
                        attrs.append(attr)
                # sort relations last before storing in the OrderedDict
                L = [fields.AttributeField(attr, self) for attr in attrs]
                L.sort(key=lambda a: a.is_relation)
                self._fields.update((field.key, field) for field in L)

    def configure(self,
                  pk=False,
                  focus=True,
                  readonly=False,
                  global_validator=None,
                  exclude=[],
                  include=[],
                  options=[]):
        """
        The `configure` method specifies a set of attributes to be rendered.
        By default, all attributes are rendered except primary keys and
        foreign keys.  But, relations `based on` foreign keys `will` be
        rendered.  For example, if an `Order` has a `user_id` FK and a `user`
        relation based on it, `user` will be rendered (as a select box of
        `User`'s, by default) but `user_id` will not.

        Parameters:
          * `pk=False`:
                set to True to include primary key columns
          * `exclude=[]`:
                an iterable of attributes to exclude.  Other attributes will
                be rendered normally
          * `include=[]`:
                an iterable of attributes to include.  Other attributes will
                not be rendered
          * `options=[]`:
                an iterable of modified attributes.  The set of attributes to
                be rendered is unaffected
          * `global_validator=None`:
                global_validator` should be a function that performs
                validations that need to know about the entire form.
          * `focus=True`:
                the attribute (e.g., `fs.orders`) whose rendered input element
                gets focus. Default value is True, meaning, focus the first
                element. False means do not focus at all.
          * `readonly=False`:
                if true, the fieldset will be rendered as a table (tbody)
                of names+values instead of a group of input elements.
                Opening and closing table tags are not included.
          * `modify=False`:
                if true, the fieldset will be further modified by this call.
                Otherwise the raw fields will be used.

        Only one of {`include`, `exclude`} may be specified.

        Note that there is no option to include foreign keys.  This is
        deliberate.  Use `include` if you really need to manually edit FKs.

        If `include` is specified, fields will be rendered in the order given
        in `include`.  Otherwise, fields will be rendered in alphabetical
        order.

        Examples: given a `FieldSet` `fs` bound to a `User` instance as a
        model with primary key `id` and attributes `name` and `email`, and a
        relation `orders` of related Order objects, the default will be to
        render `name`, `email`, and `orders`. To render the orders list as
        checkboxes instead of a select, you could specify::

        >>> from formalchemy.tests import FieldSet, User
        >>> fs = FieldSet(User)
        >>> fs.configure(options=[fs.orders.checkbox()])

        To render only name and email,

        >>> fs.configure(include=[fs.name, fs.email])

        or

        >>> fs.configure(exclude=[fs.orders])

        Of course, you can include modifications to a field in the `include`
        parameter, such as here, to render name and options-as-checkboxes:

        >>> fs.configure(include=[fs.name, fs.orders.checkbox()])

        Calling `configure` multiple times will only leave the last call's
        effects in place. If you want to further modify a form, use
        `reconfigure`.

        """
        self.focus = focus
        self.readonly = readonly
        self.validator = global_validator
        self._render_fields = OrderedDict([
            (field.key, field) for field in self._get_fields(
                pk, exclude, include, options, use_rendered=False)
        ])

    def reconfigure(self,
                    pk=False,
                    focus=True,
                    readonly=False,
                    global_validator=None,
                    exclude=[],
                    include=[],
                    options=[]):
        """
        Like `configure`, but does not undo the effects of a previous call
        to `configure` or `reconfigure`.
        """
        self.focus = focus
        self.readonly = readonly
        self.validator = global_validator
        self._render_fields = OrderedDict([
            (field.key, field) for field in self._get_fields(
                pk, exclude, include, options, use_rendered=True)
        ])

    def bind(self,
             model=None,
             session=None,
             data=None,
             request=None,
             with_prefix=True):
        """
        Return a copy of this FieldSet or Grid, bound to the given
        `model`, `session`, and `data`. The parameters to this method are the
        same as in the constructor.

        Often you will create and `configure` a FieldSet or Grid at application
        startup, then `bind` specific instances to it for actual editing or display.
        """
        if not (model is not None or session or data or request):
            raise Exception(
                'must specify at least one of {model, session, data, request}')

        if not model:
            if not self.model:
                raise Exception(
                    'model must be specified when none is already set')
            model = fields._pk(self.model) is None and type(
                self.model) or self.model

        # copy.copy causes a stacktrace on python 2.5.2/OSX + pylons.  unable to reproduce w/ simpler sample.
        mr = object.__new__(self.__class__)
        mr.__dict__ = dict(self.__dict__)
        # two steps so bind's error checking can work
        FieldSet.rebind(mr,
                        model,
                        session,
                        data,
                        request,
                        with_prefix=with_prefix)
        mr._fields = OrderedDict([(key, renderer.bind(mr))
                                  for key, renderer in self._fields.items()])
        if self._render_fields:
            mr._render_fields = OrderedDict([
                (field.key, field) for field in
                [field.bind(mr) for field in self._render_fields.values()]
            ])
        mr._request = request
        return mr

    def rebind(self,
               model=None,
               session=None,
               data=None,
               request=None,
               with_prefix=True):
        """
        Like `bind`, but acts on this instance.  No return value.
        Not all parameters are treated the same; specifically, what happens if they are NOT specified is different:

        * if `model` is not specified, the old model is used
        * if `session` is not specified, FA tries to re-guess session from the model
        * if `data` is not specified, it is rebound to None
        * if `request` is specified and not `data` request.POST is used as data.
          `request` is also saved to be access by renderers (as
          `fs.FIELD.renderer.request`).
        * if `with_prefix` is False then a prefix ``{Model}-{pk}`` is added to each data keys
        """
        if data is None and request is not None:
            if hasattr(request, 'environ') and hasattr(request, 'POST'):
                if request.environ.get('REQUEST_METHOD', '').upper() == 'POST':
                    data = request.POST or None

        original_model = model
        if model:
            if isinstance(model, type):
                try:
                    model = model()
                except Exception as e:
                    model_error = str(e)
                    msg = ("%s appears to be a class, not an instance, but "
                           "FormAlchemy cannot instantiate it. "
                           "(Make sure all constructor parameters are "
                           "optional!). The error was:\n%s")
                    raise Exception(msg % (model, model_error))

                # take object out of session, if present
                try:
                    _obj_session = object_session(model)
                except (AttributeError, UnmappedInstanceError):
                    pass  # non-SA object; doesn't need session
                else:
                    if _obj_session:
                        _obj_session.expunge(model)
            else:
                try:
                    session_ = object_session(model)
                except:
                    # non SA class
                    if fields._pk(
                            model) is None and model is not self._original_cls:
                        error = (
                            'Mapped instances to be bound must either have '
                            'a primary key set or not be in a Session.  When '
                            'creating a new object, bind the class instead '
                            '[i.e., bind(User), not bind(User())].')
                        raise Exception(error)
                else:
                    if session_:
                        # for instances of mapped classes, require that the instance
                        # have a PK already
                        try:
                            class_mapper(type(model))
                        except:
                            pass
                        else:
                            if fields._pk(model) is None:
                                error = (
                                    'Mapped instances to be bound must either have '
                                    'a primary key set or not be in a Session.  When '
                                    'creating a new object, bind the class instead '
                                    '[i.e., bind(User), not bind(User())]')
                                raise Exception(error)
            if (self.model and type(self.model) != type(model)
                    and not issubclass(model.__class__, self._original_cls)):
                raise ValueError(
                    'You can only bind to another object of the same type or subclass you originally bound to (%s), not %s'
                    % (type(self.model), type(model)))
            self.model = model
            self._bound_pk = fields._pk(model)

        if data is not None and not with_prefix:
            if isinstance(data, multidict.UnicodeMultiDict):
                encoding = data.encoding
            else:
                encoding = config.encoding
            pk = fields._pk(self.model) or ''
            prefix = '%s-%s' % (self._original_cls.__name__, pk)
            if self._prefix:
                prefix = '%s-%s' % (self._prefix, prefix)
            data = SimpleMultiDict([('%s-%s' % (prefix, k), v)
                                    for k, v in data.items()],
                                   encoding=encoding)

        if data is None:
            self.data = None
        elif isinstance(data, multidict.UnicodeMultiDict):
            self.data = data
        elif isinstance(data, multidict.MultiDict):
            self.data = multidict.UnicodeMultiDict(multi=data,
                                                   encoding=config.encoding)
        elif hasattr(data, 'getall') and hasattr(data, 'getone'):
            self.data = data
        elif isinstance(data, (dict, list)):
            self.data = SimpleMultiDict(data, encoding=config.encoding)
        else:
            raise Exception(
                'unsupported data object %s.  currently only dicts and Paste multidicts are supported'
                % self.data)

        if not self.__sa__:
            return

        if session:
            self.session = session
        elif model:
            if '_obj_session' in locals():
                # model may be a temporary object, expunged from its session -- grab the existing reference
                self.session = _obj_session
            else:
                try:
                    o_session = object_session(model)
                except (AttributeError, UnmappedInstanceError):
                    pass  # non-SA object
                else:
                    if o_session:
                        self.session = o_session
        # if we didn't just instantiate (in which case object_session will be None),
        # the session should be the same as the object_session
        if self.session and model == original_model:
            try:
                o_session = object_session(self.model)
            except (AttributeError, UnmappedInstanceError):
                pass  # non-SA object
            else:
                if o_session and self.session is not o_session:
                    raise Exception(
                        'You may not explicitly bind to a session when your model already belongs to a different one'
                    )

    def validate(self):
        """
        Validate attributes and `global_validator`.
        If validation fails, the validator should raise `ValidationError`.
        """
        if self.readonly:
            raise ValidationError('Cannot validate a read-only FieldSet')
        if self.data is None:
            raise ValidationError('Cannot validate without binding data')
        success = True
        for field in self.render_fields.values():
            success = field._validate() and success
        # run this _after_ the field validators, since each field validator
        # resets its error list. we want to allow the global validator to add
        # errors to individual fields.
        if self.validator:
            self._errors = []
            try:
                self.validator(self)
            except ValidationError as e:
                self._errors.append(e.message)
                success = False
        return success

    def sync(self):
        """
        Sync (copy to the corresponding attributes) the data passed to the constructor or `bind` to the `model`.
        """
        if self.readonly:
            raise Exception('Cannot sync a read-only FieldSet')
        if self.data is None:
            raise Exception("No data bound; cannot sync")
        for field in self.render_fields.values():
            field.sync()
        if self.session:
            self.session.add(self.model)

    def render(self, **kwargs):
        if fields._pk(self.model) != self._bound_pk and self.data is not None:
            msg = (
                "Primary key of model has changed since binding, "
                "probably due to sync()ing a new instance (from %r to %r). "
                "You can solve this by either binding to a model "
                "with the original primary key again, or by binding data to None."
            )
            raise exceptions.PkError(msg %
                                     (self._bound_pk, fields._pk(self.model)))
        engine = self.engine or config.engine
        if 'request' not in kwargs:
            kwargs['request'] = self._request
        if self.readonly:
            template = 'fieldset_readonly'
        else:
            template = 'fieldset'
        return engine(template, fieldset=self, **kwargs)

    @property
    def errors(self):
        """
        A dictionary of validation failures.  Always empty before `validate()` is run.
        Dictionary keys are attributes; values are lists of messages given to `ValidationError`.
        Global errors (not specific to a single attribute) are under the key `None`.
        """
        errors = {}
        if self._errors:
            errors[None] = [helpers.literal.escape(m) for m in self._errors]
        errors.update(
            dict([(field, field.errors)
                  for field in self.render_fields.values() if field.errors]))
        return errors

    @property
    def render_fields(self):
        """
        The set of attributes that will be rendered, as a (ordered)
        dict of `{fieldname: Field}` pairs
        """
        if not self._render_fields:
            self._render_fields = OrderedDict([
                (field.key, field) for field in self._get_fields()
            ])
        return self._render_fields

    def copy(self, *args):
        """return a copy of the fieldset. args is a list of field names or field
        objects to render in the new fieldset"""
        mr = self.bind(self.model, self.session)
        _fields = self._render_fields or self._fields
        _new_fields = []
        if args:
            for field in args:
                if isinstance(field, string_types):
                    if field in _fields:
                        field = _fields.get(field)
                    else:
                        raise AttributeError('%r as not field named %s' %
                                             (self, field))
                assert isinstance(field, fields.AbstractField), field
                field.bind(mr)
                _new_fields.append(field)
            mr._render_fields = OrderedDict([(field.key, field)
                                             for field in _new_fields])
        return mr

    def append(self, field):
        """Add a form Field. By default, this Field will be included in the rendered form or table."""
        if not isinstance(field, fields.AbstractField):
            raise ValueError(
                'Can only add Field or AttributeField objects; got %s instead'
                % field)
        field.parent = self
        _fields = self._render_fields or self._fields
        _fields[field.name] = field

    def add(self, field):
        warnings.warn(
            DeprecationWarning(
                'FieldSet.add is deprecated. Use FieldSet.append instead. Your validator will break in FA 1.5'
            ))
        self.append(field)

    def extend(self, fields):
        """Add a list of fields. By default, each Field will be included in the
        rendered form or table."""
        for field in fields:
            self.append(field)

    def insert(self, field, new_field):
        """Insert a new field *before* an existing field.

        This is like the normal ``insert()`` function of ``list`` objects. It
        takes the place of the previous element, and pushes the rest forward.
        """
        fields_ = self._render_fields or self._fields
        if not isinstance(new_field, fields.Field):
            raise ValueError('Can only add Field objects; got %s instead' %
                             field)
        if isinstance(field, fields.AbstractField):
            try:
                index = list(fields_.keys()).index(field.key)
            except ValueError:
                raise ValueError('%s not in fields' % field.key)
        else:
            raise TypeError('field must be a Field. Got %r' % field)
        new_field.parent = self
        items = list(fields_.items())  # prepare for Python 3
        items.insert(index, (new_field.name, new_field))
        if self._render_fields:
            self._render_fields = OrderedDict(items)
        else:
            self._fields = OrderedDict(items)

    def insert_after(self, field, new_field):
        """Insert a new field *after* an existing field.

        Use this if your business logic requires to add after a certain field,
        and not before.
        """
        fields_ = self._render_fields or self._fields
        if not isinstance(new_field, fields.Field):
            raise ValueError('Can only add Field objects; got %s instead' %
                             field)
        if isinstance(field, fields.AbstractField):
            try:
                index = list(fields_.keys()).index(field.key)
            except ValueError:
                raise ValueError('%s not in fields' % field.key)
        else:
            raise TypeError('field must be a Field. Got %r' % field)
        new_field.parent = self
        items = list(fields_.items())
        new_item = (new_field.name, new_field)
        if index + 1 == len(items):  # after the last element ?
            items.append(new_item)
        else:
            items.insert(index + 1, new_item)
        if self._render_fields:
            self._render_fields = OrderedDict(items)
        else:
            self._fields = OrderedDict(items)

    def to_dict(self, with_prefix=True, as_string=False):
        """This method intend to help you to work with json. Render fieldset as
        a dict. If ``with_prefix`` is False then the prefix ``{Model}-{pk}`` is
        not added. If ``as_string`` is True then all value are set using
        ``field.render_readonly()`` else the pythonic value is used"""
        _fields = self._render_fields or self._fields

        def get_value(f):
            if as_string:
                return f.render_readonly()
            else:
                return f.value

        if as_string:
            data = [(f, f.render_readonly()) for f in _fields.values()]
        else:
            data = [(f, f.value) for f in _fields.values()
                    if not isinstance(f.renderer, fields.PasswordFieldRenderer)
                    ]

        if with_prefix:
            data = [(f.renderer.name, v) for f, v in data]
        else:
            data = [(f.name, v) for f, v in data]

        return dict(data)

    def _raw_fields(self, use_rendered=False):
        if use_rendered and self._render_fields:
            return self._render_fields.values()
        else:
            return self._fields.values()

    def _get_fields(self,
                    pk=False,
                    exclude=[],
                    include=[],
                    options=[],
                    use_rendered=False):
        # sanity check
        if include and exclude:
            raise Exception('Specify at most one of include, exclude')

        # help people who meant configure(include=[X]) but just wrote configure(X), resulting in pk getting the positional argument
        if pk not in [True, False]:
            raise ValueError('pk option must be True or False, not %s' % pk)

        # verify that options that should be lists of Fields, are
        include = list(include)
        exclude = list(exclude)
        options = list(options)
        for iterable in ('include', 'exclude', 'options'):
            L = locals()[iterable]
            for field in L:
                if not isinstance(field, fields.AbstractField):
                    raise TypeError(
                        'non-AbstractField object `%s` found in `%s`' %
                        (field, iterable))
                if field not in self._fields.values():
                    raise ValueError(
                        'Unrecognized Field `%r` in `%s` -- did you mean to call append() first?'
                        % (field, iterable))

        # if include is given, those are the fields used.  otherwise, include those not explicitly (or implicitly) excluded.
        if not include:
            if not pk:
                exclude.extend([
                    wrapper for wrapper in self._raw_fields(use_rendered)
                    if wrapper.is_pk and not wrapper.is_collection
                ])
            exclude.extend([
                wrapper for wrapper in self._raw_fields(use_rendered)
                if wrapper.is_raw_foreign_key
            ])
            include = [
                field for field in self._raw_fields(use_rendered)
                if field not in exclude
            ]

        # in the returned list, replace any fields in `include` w/ the corresponding one in `options`, if present.
        # this is a bit clunky because we want to
        #   1. preserve the order given in `include`
        #   2. not modify `include` (or `options`) directly; that could surprise the caller
        options_dict = dict([(wrapper, wrapper) for wrapper in options])
        L = []
        for wrapper in include:
            if wrapper in options_dict:
                L.append(options_dict[wrapper])
            else:
                L.append(wrapper)
        return L

    def __getattr__(self, attrname):
        try:
            return self._render_fields[attrname]
        except KeyError:
            try:
                return self._fields[attrname]
            except KeyError:
                raise AttributeError(attrname)

    __getitem__ = __getattr__

    def __setattr__(self, attrname, value):
        if attrname not in ('_fields', '__dict__', 'focus', 'model', 'session', 'data') and \
           (attrname in self._fields or isinstance(value, fields.AbstractField)):
            raise AttributeError(
                'Do not set field attributes manually.  Use append() or configure() instead'
            )
        object.__setattr__(self, attrname, value)

    def __delattr__(self, attrname):
        if attrname in self._render_fields:
            del self._render_fields[attrname]
        elif attrname in self._fields:
            raise RuntimeError(
                "You try to delete a field but your form is not configured")
        else:
            raise AttributeError("field %s does not exist" % attrname)

    __delitem__ = __delattr__

    def __repr__(self):
        _fields = self._fields
        conf = ''
        if self._render_fields:
            conf = ' (configured)'
            _fields = self._render_fields
        return '<%s%s with %r>' % (self.__class__.__name__, conf,
                                   list(_fields.keys()))
Beispiel #16
0
class ApplyBatchImpl(object):
    def __init__(self, table, table_args, table_kwargs, reflected):
        self.table = table  # this is a Table object
        self.table_args = table_args
        self.table_kwargs = table_kwargs
        self.temp_table_name = self._calc_temp_name(table.name)
        self.new_table = None
        self.column_transfers = OrderedDict(
            (c.name, {"expr": c}) for c in self.table.c
        )
        self.reflected = reflected
        self._grab_table_elements()

    @classmethod
    def _calc_temp_name(cls, tablename):
        return ("_alembic_tmp_%s" % tablename)[0:50]

    def _grab_table_elements(self):
        schema = self.table.schema
        self.columns = OrderedDict()
        for c in self.table.c:
            c_copy = c.copy(schema=schema)
            c_copy.unique = c_copy.index = False
            # ensure that the type object was copied,
            # as we may need to modify it in-place
            if isinstance(c.type, SchemaEventTarget):
                assert c_copy.type is not c.type
            self.columns[c.name] = c_copy
        self.named_constraints = {}
        self.unnamed_constraints = []
        self.indexes = {}
        self.new_indexes = {}
        for const in self.table.constraints:
            if _is_type_bound(const):
                continue
            elif self.reflected and isinstance(const, CheckConstraint):
                # TODO: we are skipping reflected CheckConstraint because
                # we have no way to determine _is_type_bound() for these.
                pass
            elif const.name:
                self.named_constraints[const.name] = const
            else:
                self.unnamed_constraints.append(const)

        for idx in self.table.indexes:
            self.indexes[idx.name] = idx

        for k in self.table.kwargs:
            self.table_kwargs.setdefault(k, self.table.kwargs[k])

    def _transfer_elements_to_new_table(self):
        assert self.new_table is None, "Can only create new table once"

        m = MetaData()
        schema = self.table.schema

        self.new_table = new_table = Table(
            self.temp_table_name,
            m,
            *(list(self.columns.values()) + list(self.table_args)),
            schema=schema,
            **self.table_kwargs
        )

        for const in (
            list(self.named_constraints.values()) + self.unnamed_constraints
        ):

            const_columns = set(
                [c.key for c in _columns_for_constraint(const)]
            )

            if not const_columns.issubset(self.column_transfers):
                continue

            if isinstance(const, ForeignKeyConstraint):
                if _fk_is_self_referential(const):
                    # for self-referential constraint, refer to the
                    # *original* table name, and not _alembic_batch_temp.
                    # This is consistent with how we're handling
                    # FK constraints from other tables; we assume SQLite
                    # no foreign keys just keeps the names unchanged, so
                    # when we rename back, they match again.
                    const_copy = const.copy(
                        schema=schema, target_table=self.table
                    )
                else:
                    # "target_table" for ForeignKeyConstraint.copy() is
                    # only used if the FK is detected as being
                    # self-referential, which we are handling above.
                    const_copy = const.copy(schema=schema)
            else:
                const_copy = const.copy(schema=schema, target_table=new_table)
            if isinstance(const, ForeignKeyConstraint):
                self._setup_referent(m, const)
            new_table.append_constraint(const_copy)

    def _gather_indexes_from_both_tables(self):
        idx = []
        idx.extend(self.indexes.values())
        for index in self.new_indexes.values():
            idx.append(
                Index(
                    index.name,
                    unique=index.unique,
                    *[self.new_table.c[col] for col in index.columns.keys()],
                    **index.kwargs
                )
            )
        return idx

    def _setup_referent(self, metadata, constraint):
        spec = constraint.elements[0]._get_colspec()
        parts = spec.split(".")
        tname = parts[-2]
        if len(parts) == 3:
            referent_schema = parts[0]
        else:
            referent_schema = None

        if tname != self.temp_table_name:
            key = sql_schema._get_table_key(tname, referent_schema)
            if key in metadata.tables:
                t = metadata.tables[key]
                for elem in constraint.elements:
                    colname = elem._get_colspec().split(".")[-1]
                    if not t.c.contains_column(colname):
                        t.append_column(Column(colname, sqltypes.NULLTYPE))
            else:
                Table(
                    tname,
                    metadata,
                    *[
                        Column(n, sqltypes.NULLTYPE)
                        for n in [
                            elem._get_colspec().split(".")[-1]
                            for elem in constraint.elements
                        ]
                    ],
                    schema=referent_schema
                )

    def _create(self, op_impl):
        self._transfer_elements_to_new_table()

        op_impl.prep_table_for_batch(self.table)
        op_impl.create_table(self.new_table)

        try:
            op_impl._exec(
                self.new_table.insert(inline=True).from_select(
                    list(
                        k
                        for k, transfer in self.column_transfers.items()
                        if "expr" in transfer
                    ),
                    select(
                        [
                            transfer["expr"]
                            for transfer in self.column_transfers.values()
                            if "expr" in transfer
                        ]
                    ),
                )
            )
            op_impl.drop_table(self.table)
        except:
            op_impl.drop_table(self.new_table)
            raise
        else:
            op_impl.rename_table(
                self.temp_table_name, self.table.name, schema=self.table.schema
            )
            self.new_table.name = self.table.name
            try:
                for idx in self._gather_indexes_from_both_tables():
                    op_impl.create_index(idx)
            finally:
                self.new_table.name = self.temp_table_name

    def alter_column(
        self,
        table_name,
        column_name,
        nullable=None,
        server_default=False,
        name=None,
        type_=None,
        autoincrement=None,
        **kw
    ):
        existing = self.columns[column_name]
        existing_transfer = self.column_transfers[column_name]
        if name is not None and name != column_name:
            # note that we don't change '.key' - we keep referring
            # to the renamed column by its old key in _create().  neat!
            existing.name = name
            existing_transfer["name"] = name

        if type_ is not None:
            type_ = sqltypes.to_instance(type_)
            # old type is being discarded so turn off eventing
            # rules. Alternatively we can
            # erase the events set up by this type, but this is simpler.
            # we also ignore the drop_constraint that will come here from
            # Operations.implementation_for(alter_column)
            if isinstance(existing.type, SchemaEventTarget):
                existing.type._create_events = (
                    existing.type.create_constraint
                ) = False

            if existing.type._type_affinity is not type_._type_affinity:
                existing_transfer["expr"] = cast(
                    existing_transfer["expr"], type_
                )

            existing.type = type_

            # we *dont* however set events for the new type, because
            # alter_column is invoked from
            # Operations.implementation_for(alter_column) which already
            # will emit an add_constraint()

        if nullable is not None:
            existing.nullable = nullable
        if server_default is not False:
            if server_default is None:
                existing.server_default = None
            else:
                sql_schema.DefaultClause(server_default)._set_parent(existing)
        if autoincrement is not None:
            existing.autoincrement = bool(autoincrement)

    def add_column(self, table_name, column, **kw):
        # we copy the column because operations.add_column()
        # gives us a Column that is part of a Table already.
        self.columns[column.name] = column.copy(schema=self.table.schema)
        self.column_transfers[column.name] = {}

    def drop_column(self, table_name, column, **kw):
        if column.name in self.table.primary_key.columns:
            _remove_column_from_collection(
                self.table.primary_key.columns, column
            )
        del self.columns[column.name]
        del self.column_transfers[column.name]

    def add_constraint(self, const):
        if not const.name:
            raise ValueError("Constraint must have a name")
        if isinstance(const, sql_schema.PrimaryKeyConstraint):
            if self.table.primary_key in self.unnamed_constraints:
                self.unnamed_constraints.remove(self.table.primary_key)

        self.named_constraints[const.name] = const

    def drop_constraint(self, const):
        if not const.name:
            raise ValueError("Constraint must have a name")
        try:
            const = self.named_constraints.pop(const.name)
        except KeyError:
            if _is_type_bound(const):
                # type-bound constraints are only included in the new
                # table via their type object in any case, so ignore the
                # drop_constraint() that comes here via the
                # Operations.implementation_for(alter_column)
                return
            raise ValueError("No such constraint: '%s'" % const.name)
        else:
            if isinstance(const, PrimaryKeyConstraint):
                for col in const.columns:
                    self.columns[col.name].primary_key = False

    def create_index(self, idx):
        self.new_indexes[idx.name] = idx

    def drop_index(self, idx):
        try:
            del self.indexes[idx.name]
        except KeyError:
            raise ValueError("No such index: '%s'" % idx.name)

    def rename_table(self, *arg, **kw):
        raise NotImplementedError("TODO")
Beispiel #17
0
class FieldSet(BaseFieldSet):
    """FieldSet aware of zope schema. See :class:`formalchemy.forms.FieldSet` for full api."""

    __sa__ = False
    _fields_mapping = {
        schema.TextLine: fatypes.Unicode,
        schema.Text: fatypes.Unicode,
        schema.Int: fatypes.Integer,
        schema.Bool: fatypes.Boolean,
        schema.Float: fatypes.Float,
        schema.Date: fatypes.Date,
        schema.Datetime: fatypes.DateTime,
        schema.Time: fatypes.Time,
        schema.Choice: fatypes.Unicode,
        schema.List: fatypes.List,
        schema.Password: fatypes.Unicode,
    }

    def __init__(self, model, **kwargs):
        BaseFieldSet.__init__(self, model, **kwargs)
        self.iface = model
        self.rebind(model)
        self._fields = OrderedDict()
        self._render_fields = OrderedDict()
        self._bound_pk = None
        for name, field in schema.getFieldsInOrder(self.iface):
            klass = field.__class__
            try:
                t = self._fields_mapping[klass]
            except KeyError:
                raise NotImplementedError('%s is not mapped to a type' % klass)
            else:
                self.append(Field(name=name, type=t))
                self._fields[name].label_text = field.title or name
                if field.description:
                    self._fields[name].set(instructions=field.description)
                if field.required:
                    self._fields[name].validators.append(validators.required)
                if klass is schema.Password:
                    self._fields[name].set(renderer=fields.PasswordFieldRenderer)
                if klass is schema.Text:
                    self._fields[name].set(renderer=fields.TextAreaFieldRenderer)
                if klass is schema.List:
                    value_type = self.iface[name].value_type
                    if isinstance(value_type, schema.Choice):
                        self._fields[name].set(options=value_type, multiple=True)
                    else:
                        self._fields[name].set(multiple=True)
                elif klass is schema.Choice:
                    self._fields[name].set(renderer=fields.SelectFieldRenderer,
                                           options=self.iface[name])

    def bind(self, model, session=None, data=None, request=None):
        if not (model is not None or session or data):
            raise Exception('must specify at least one of {model, session, data}')
        # copy.copy causes a stacktrace on python 2.5.2/OSX + pylons.  unable to reproduce w/ simpler sample.
        mr = object.__new__(self.__class__)
        mr.__dict__ = dict(self.__dict__)
        # two steps so bind's error checking can work
        mr.rebind(model, session, data)
        mr._request = request
        mr._fields = OrderedDict([(key, renderer.bind(mr)) for key, renderer in self._fields.items()])
        if self._render_fields:
            mr._render_fields = OrderedDict([(field.key, field) for field in
                                             [field.bind(mr) for field in self._render_fields.values()]])
        return mr

    def gen_model(self, model=None, dict_like=False, **kwargs):
        if model and self.iface.providedBy(model):
            return model
        factory = gen_model(self.iface, model, dict_like=dict_like)
        model = factory(context=model, **kwargs)
        return model

    def rebind(self, model, session=None, data=None):
        if model is not self.iface:
            if model and not self.iface.providedBy(model):
                if getattr(model, '__implemented__', None) is not None:
                    raise ValueError('%r does not provide %r' % (model, self.iface))
                model = self.gen_model(model)
        self.model = model
        self._bound_pk = fields._pk(model)
        if data is None:
            self.data = None
        elif hasattr(data, 'getall') and hasattr(data, 'getone'):
            self.data = data
        else:
            try:
                self.data = SimpleMultiDict(data)
            except:
                raise Exception('unsupported data object %s. currently only dicts and Paste multidicts are supported' % self.data)
Beispiel #18
0
class ApplyBatchImpl(object):
    def __init__(self, table, table_args, table_kwargs):
        self.table = table  # this is a Table object
        self.table_args = table_args
        self.table_kwargs = table_kwargs
        self.new_table = None
        self.column_transfers = OrderedDict((c.name, {
            'expr': c
        }) for c in self.table.c)
        self._grab_table_elements()

    def _grab_table_elements(self):
        schema = self.table.schema
        self.columns = OrderedDict()
        for c in self.table.c:
            c_copy = c.copy(schema=schema)
            c_copy.unique = c_copy.index = False
            self.columns[c.name] = c_copy
        self.named_constraints = {}
        self.unnamed_constraints = []
        self.indexes = {}
        for const in self.table.constraints:
            if const.name:
                self.named_constraints[const.name] = const
            else:
                self.unnamed_constraints.append(const)

        for idx in self.table.indexes:
            self.indexes[idx.name] = idx

    def _transfer_elements_to_new_table(self):
        assert self.new_table is None, "Can only create new table once"

        m = MetaData()
        schema = self.table.schema
        self.new_table = new_table = Table('_alembic_batch_temp',
                                           m,
                                           *(list(self.columns.values()) +
                                             list(self.table_args)),
                                           schema=schema,
                                           **self.table_kwargs)

        for const in list(self.named_constraints.values()) + \
                self.unnamed_constraints:

            const_columns = set(
                [c.key for c in self._constraint_columns(const)])

            if not const_columns.issubset(self.column_transfers):
                continue
            const_copy = const.copy(schema=schema, target_table=new_table)
            if isinstance(const, ForeignKeyConstraint):
                self._setup_referent(m, const)
            new_table.append_constraint(const_copy)

        for index in self.indexes.values():
            Index(index.name,
                  unique=index.unique,
                  *[new_table.c[col] for col in index.columns.keys()],
                  **index.kwargs)

    def _constraint_columns(self, constraint):
        if isinstance(constraint, ForeignKeyConstraint):
            return [fk.parent for fk in constraint.elements]
        else:
            return list(constraint.columns)

    def _setup_referent(self, metadata, constraint):
        spec = constraint.elements[0]._get_colspec()
        parts = spec.split(".")
        tname = parts[-2]
        if len(parts) == 3:
            referent_schema = parts[0]
        else:
            referent_schema = None
        if tname != '_alembic_batch_temp':
            Table(tname,
                  metadata,
                  *[
                      Column(n, sqltypes.NULLTYPE) for n in [
                          elem._get_colspec().split(".")[-1]
                          for elem in constraint.elements
                      ]
                  ],
                  schema=referent_schema)

    def _create(self, op_impl):
        self._transfer_elements_to_new_table()

        op_impl.prep_table_for_batch(self.table)
        op_impl.create_table(self.new_table)

        try:
            op_impl._exec(
                self.new_table.insert(inline=True).from_select(
                    list(k for k, transfer in self.column_transfers.items()
                         if 'expr' in transfer),
                    select([
                        transfer['expr']
                        for transfer in self.column_transfers.values()
                        if 'expr' in transfer
                    ])))
            op_impl.drop_table(self.table)
        except:
            op_impl.drop_table(self.new_table)
            raise
        else:
            op_impl.rename_table("_alembic_batch_temp",
                                 self.table.name,
                                 schema=self.table.schema)

    def alter_column(self,
                     table_name,
                     column_name,
                     nullable=None,
                     server_default=False,
                     name=None,
                     type_=None,
                     autoincrement=None,
                     **kw):
        existing = self.columns[column_name]
        existing_transfer = self.column_transfers[column_name]
        if name is not None and name != column_name:
            # note that we don't change '.key' - we keep referring
            # to the renamed column by its old key in _create().  neat!
            existing.name = name
            existing_transfer["name"] = name

        if type_ is not None:
            type_ = sqltypes.to_instance(type_)
            existing.type = type_
            existing_transfer["expr"] = cast(existing_transfer["expr"], type_)
        if nullable is not None:
            existing.nullable = nullable
        if server_default is not False:
            existing.server_default = server_default
        if autoincrement is not None:
            existing.autoincrement = bool(autoincrement)

    def add_column(self, table_name, column, **kw):
        # we copy the column because operations.add_column()
        # gives us a Column that is part of a Table already.
        self.columns[column.name] = column.copy(schema=self.table.schema)
        self.column_transfers[column.name] = {}

    def drop_column(self, table_name, column, **kw):
        del self.columns[column.name]
        del self.column_transfers[column.name]

    def add_constraint(self, const):
        if not const.name:
            raise ValueError("Constraint must have a name")
        self.named_constraints[const.name] = const

    def drop_constraint(self, const):
        if not const.name:
            raise ValueError("Constraint must have a name")
        try:
            del self.named_constraints[const.name]
        except KeyError:
            raise ValueError("No such constraint: '%s'" % const.name)

    def add_index(self, idx):
        self.indexes[idx.name] = idx

    def drop_index(self, idx):
        try:
            del self.indexes[idx.name]
        except KeyError:
            raise ValueError("No such index: '%s'" % idx.name)

    def rename_table(self, *arg, **kw):
        raise NotImplementedError("TODO")