コード例 #1
0
def _compare_columns(schema, tname, conn_table, metadata_table, diffs,
                     autogen_context):
    name = '%s.%s' % (schema, tname) if schema else tname
    metadata_cols_by_name = dict((c.name, c) for c in metadata_table.c)
    conn_col_names = set(conn_table)
    metadata_col_names = OrderedSet(sorted(metadata_cols_by_name))

    for cname in metadata_col_names.difference(conn_col_names):
        diffs.append(
            ("add_column", schema, tname, metadata_cols_by_name[cname]))
        log.info("Detected added column '%s.%s'", name, cname)

    for cname in conn_col_names.difference(metadata_col_names):
        diffs.append(
            ("remove_column", schema, tname,
             sa_schema.Column(cname,
                              conn_table[cname]['type'],
                              nullable=conn_table[cname]['nullable'],
                              server_default=conn_table[cname]['default'])))
        log.info("Detected removed column '%s.%s'", name, cname)

    for colname in metadata_col_names.intersection(conn_col_names):
        metadata_col = metadata_cols_by_name[colname]
        conn_col = conn_table[colname]
        col_diff = []
        _compare_type(schema, tname, colname, conn_col, metadata_col, col_diff,
                      autogen_context)
        _compare_nullable(schema, tname, colname, conn_col,
                          metadata_col.nullable, col_diff, autogen_context)
        _compare_server_default(schema, tname, colname, conn_col, metadata_col,
                                col_diff, autogen_context)
        if col_diff:
            diffs.append(col_diff)
コード例 #2
0
class _QueryPlan(object):

    def __init__(self):
        self._queries = OrderedSet({None})
        self._columns = defaultdict(OrderedSet)
        self._children = defaultdict(OrderedSet)

    def add_query(self, query, parent_query):
        self._queries.add(query)
        self._children[parent_query].add(query)

    def add_expr(self, query, column):
        self._columns[query].add(column)

    def query_id(self, query):
        return list(self._queries).index(query)

    def column_id(self, query, column):
        return list(self._columns[query]).index(column)

    def query_columns(self, query):
        return tuple(self._columns.get(query) or ())

    def query_children(self, query):
        return tuple(self._children.get(query) or ())

    def process_rows(self, rows, session):
        children = self.query_children(None)

        results = [[((self.query_id(None), row),) for row in rows]]
        results.extend(child.__execute__(self, None, rows, session)
                       for child in children)

        return {0: [tuple(chain(*r)) for r in zip(*results)]}
コード例 #3
0
ファイル: test_event.py プロジェクト: slamora/devicehub-teal
def test_update_components_event_multiple():
    computer = Desktop(serial_number='sn1',
                       model='ml1',
                       manufacturer='mr1',
                       chassis=ComputerChassis.Tower)
    hdd = HardDrive(serial_number='foo', manufacturer='bar', model='foo-bar')
    computer.components.add(hdd)

    ready = models.ReadyToUse()
    assert not ready.devices
    assert not ready.components

    # Add
    computer.events_multiple.add(ready)
    assert ready.devices == OrderedSet([computer])
    assert next(iter(ready.components)) == hdd

    # Remove
    computer.events_multiple.remove(ready)
    assert not ready.devices
    assert not ready.components

    # init / replace collection
    ready.devices = OrderedSet([computer])
    assert ready.devices
    assert ready.components
コード例 #4
0
ファイル: compare.py プロジェクト: nathanwang000/Legitur
def _compare_columns(schema, tname, object_filters, conn_table, metadata_table,
                     diffs, autogen_context, inspector):
    name = '%s.%s' % (schema, tname) if schema else tname
    metadata_cols_by_name = dict((c.name, c) for c in metadata_table.c)
    conn_col_names = dict((c.name, c) for c in conn_table.c)
    metadata_col_names = OrderedSet(sorted(metadata_cols_by_name))

    for cname in metadata_col_names.difference(conn_col_names):
        if _run_filters(metadata_cols_by_name[cname], cname, "column", False,
                        None, object_filters):
            diffs.append(
                ("add_column", schema, tname, metadata_cols_by_name[cname]))
            log.info("Detected added column '%s.%s'", name, cname)

    for cname in set(conn_col_names).difference(metadata_col_names):
        if _run_filters(conn_table.c[cname], cname, "column", True, None,
                        object_filters):
            diffs.append(("remove_column", schema, tname, conn_table.c[cname]))
            log.info("Detected removed column '%s.%s'", name, cname)

    for colname in metadata_col_names.intersection(conn_col_names):
        metadata_col = metadata_cols_by_name[colname]
        conn_col = conn_table.c[colname]
        if not _run_filters(metadata_col, colname, "column", False, conn_col,
                            object_filters):
            continue
        col_diff = []
        _compare_type(schema, tname, colname, conn_col, metadata_col, col_diff,
                      autogen_context)
        _compare_nullable(schema, tname, colname, conn_col,
                          metadata_col.nullable, col_diff, autogen_context)
        _compare_server_default(schema, tname, colname, conn_col, metadata_col,
                                col_diff, autogen_context)
        if col_diff:
            diffs.append(col_diff)
コード例 #5
0
ファイル: compare.py プロジェクト: BaeFlower/everybuddi
def _compare_columns(schema, tname, object_filters, conn_table, metadata_table, diffs, autogen_context, inspector):
    name = "%s.%s" % (schema, tname) if schema else tname
    metadata_cols_by_name = dict((c.name, c) for c in metadata_table.c)
    conn_col_names = dict((c.name, c) for c in conn_table.c)
    metadata_col_names = OrderedSet(sorted(metadata_cols_by_name))

    for cname in metadata_col_names.difference(conn_col_names):
        if _run_filters(metadata_cols_by_name[cname], cname, "column", False, None, object_filters):
            diffs.append(("add_column", schema, tname, metadata_cols_by_name[cname]))
            log.info("Detected added column '%s.%s'", name, cname)

    for cname in set(conn_col_names).difference(metadata_col_names):
        if _run_filters(conn_table.c[cname], cname, "column", True, None, object_filters):
            diffs.append(("remove_column", schema, tname, conn_table.c[cname]))
            log.info("Detected removed column '%s.%s'", name, cname)

    for colname in metadata_col_names.intersection(conn_col_names):
        metadata_col = metadata_cols_by_name[colname]
        conn_col = conn_table.c[colname]
        if not _run_filters(metadata_col, colname, "column", False, conn_col, object_filters):
            continue
        col_diff = []
        _compare_type(schema, tname, colname, conn_col, metadata_col, col_diff, autogen_context)
        _compare_nullable(schema, tname, colname, conn_col, metadata_col.nullable, col_diff, autogen_context)
        _compare_server_default(schema, tname, colname, conn_col, metadata_col, col_diff, autogen_context)
        if col_diff:
            diffs.append(col_diff)
コード例 #6
0
ファイル: database.py プロジェクト: ReneHollander/wien-wahl
    def load(self):
        header = OrderedSet(default_header)
        parties = {}
        for party in self.session.query(self.Party).all():
            parties[party.nr] = party.abbr
            header.add(party.abbr)

        query = "SELECT constituency.nr AS cnr, district.nr AS dnr, judicaldistrict.nr AS jdnr, judicaldistrict.electivecnt, " \
                "judicaldistrict.votecnt, judicaldistrict.invalidcnt, votes.pnr, votes.cnt " \
                "FROM constituency " \
                "INNER JOIN district ON constituency.nr = district.cnr " \
                "INNER JOIN judicaldistrict ON district.nr = judicaldistrict.dnr " \
                "AND judicaldistrict.enr = '" + str(self.enr) + "' " \
                                                                "INNER JOIN votes ON votes.enr = '" + str(self.enr) + "' " \
                                                                                                                      "AND votes.dnr = district.nr " \
                                                                                                                      "AND votes.jdnr = judicaldistrict.nr;"
        result = self.session.execute(query)

        data = {}
        for row in result:
            key = str(row["cnr"]) + str(row["dnr"]) + str(row["jdnr"])
            if key not in data:
                data[key] = {
                    "WK": row["cnr"],
                    "BZ": row["dnr"],
                    "SPR": row["jdnr"],
                    "WBER": row["electivecnt"],
                    "ABG": row["votecnt"],
                    "UNG": row["invalidcnt"],
                    "T": 4,
                    "WV": 1,
                }
            data[key][parties[row["pnr"]]] = row["cnt"]

        return header, list(data.values())
コード例 #7
0
ファイル: test_device.py プロジェクト: eReuse/devicehub-teal
def test_add_remove():
    # Original state:
    # pc has c1 and c2
    # pc2 has c3
    # c4 is not with any pc
    user = User.query.filter().first()
    values = yaml2json('pc-components.db')
    pc = values['device']
    c1, c2 = (d.Component(**c) for c in values['components'])
    pc = d.Desktop(**pc, components=OrderedSet([c1, c2]))
    db.session.add(pc)
    c3 = d.Component(serial_number='nc1', owner_id=user.id)
    pc2 = d.Desktop(serial_number='s2',
                    components=OrderedSet([c3]),
                    chassis=ComputerChassis.Microtower)
    c4 = d.Component(serial_number='c4s', owner_id=user.id)
    db.session.add(pc2)
    db.session.add(c4)
    db.session.commit()

    # Test:
    # pc has only c3
    actions = Sync.add_remove(device=pc, components={c3, c4})
    db.session.add_all(actions)
    db.session.commit()  # We enforce the appliance of order_by
    assert len(actions) == 1
    assert isinstance(actions[0], Remove)
    assert actions[0].device == pc2
    assert actions[0].components == OrderedSet([c3])
コード例 #8
0
ファイル: sync.py プロジェクト: slamora/devicehub-teal
    def add_remove(device: Computer, components: Set[Component]) -> OrderedSet:
        """
        Generates the Add and Remove events (but doesn't add them to
        session).

        :param device: A device which ``components`` attribute contains
                       the old list of components. The components that
                       are not in ``components`` will be Removed.
        :param components: List of components that are potentially to
                           be Added. Some of them can already exist
                           on the device, in which case they won't
                           be re-added.
        :return: A list of Add / Remove events.
        """
        # Note that we create the Remove events before the Add ones
        events = OrderedSet()
        old_components = set(device.components)

        adding = components - old_components
        if adding:
            # For the components we are adding, let's remove them from their old parents
            def g_parent(component: Component) -> Device:
                return component.parent or Device(
                    id=0)  # Computer with id 0 is our Identity

            for parent, _components in groupby(sorted(adding, key=g_parent),
                                               key=g_parent):
                if parent.id != 0:  # Is not Computer Identity
                    events.add(
                        Remove(device=parent,
                               components=OrderedSet(_components)))
        return events
コード例 #9
0
ファイル: database.py プロジェクト: ReneHollander/wien-wahl
    def load(self):
        header = OrderedSet(default_header)
        parties = {}
        for party in self.session.query(self.Party).all():
            parties[party.nr] = party.abbr
            header.add(party.abbr)

        query = "SELECT constituency.nr AS cnr, district.nr AS dnr, judicaldistrict.nr AS jdnr, judicaldistrict.electivecnt, " \
                "judicaldistrict.votecnt, judicaldistrict.invalidcnt, votes.pnr, votes.cnt " \
                "FROM constituency " \
                "INNER JOIN district ON constituency.nr = district.cnr " \
                "INNER JOIN judicaldistrict ON district.nr = judicaldistrict.dnr " \
                "AND judicaldistrict.enr = '" + str(self.enr) + "' " \
                                                                "INNER JOIN votes ON votes.enr = '" + str(self.enr) + "' " \
                                                                                                                      "AND votes.dnr = district.nr " \
                                                                                                                      "AND votes.jdnr = judicaldistrict.nr;"
        result = self.session.execute(query)

        data = {}
        for row in result:
            key = str(row["cnr"]) + str(row["dnr"]) + str(row["jdnr"])
            if key not in data:
                data[key] = {
                    "WK": row["cnr"],
                    "BZ": row["dnr"],
                    "SPR": row["jdnr"],
                    "WBER": row["electivecnt"],
                    "ABG": row["votecnt"],
                    "UNG": row["invalidcnt"],
                    "T": 4,
                    "WV": 1,
                }
            data[key][parties[row["pnr"]]] = row["cnt"]

        return header, list(data.values())
コード例 #10
0
def test_add_remove():
    # Original state:
    # pc has c1 and c2
    # pc2 has c3
    # c4 is not with any pc
    values = file('pc-components.db')
    pc = values['device']
    c1, c2 = (Component(**c) for c in values['components'])
    pc = Computer(**pc, components=OrderedSet([c1, c2]))
    db.session.add(pc)
    c3 = Component(serial_number='nc1')
    pc2 = Computer(serial_number='s2', components=OrderedSet([c3]))
    c4 = Component(serial_number='c4s')
    db.session.add(pc2)
    db.session.add(c4)
    db.session.commit()

    # Test:
    # pc has only c3
    events = Sync.add_remove(device=pc, components={c3, c4})
    db.session.add_all(events)
    db.session.commit()  # We enforce the appliance of order_by
    assert len(events) == 1
    assert isinstance(events[0], Remove)
    assert events[0].device == pc2
    assert events[0].components == OrderedSet([c3])
コード例 #11
0
ファイル: compare.py プロジェクト: Mayank0010/Doormed
def _compare_columns(
    schema,
    tname,
    conn_table,
    metadata_table,
    modify_table_ops,
    autogen_context,
    inspector,
):
    name = "%s.%s" % (schema, tname) if schema else tname
    metadata_cols_by_name = dict(
        (c.name, c) for c in metadata_table.c if not c.system)
    conn_col_names = dict(
        (c.name, c) for c in conn_table.c
        if autogen_context.run_name_filters(c.name, "column", {
            "table_name": tname,
            "schema_name": schema
        }))
    metadata_col_names = OrderedSet(sorted(metadata_cols_by_name))

    for cname in metadata_col_names.difference(conn_col_names):
        if autogen_context.run_object_filters(metadata_cols_by_name[cname],
                                              cname, "column", False, None):
            modify_table_ops.ops.append(
                ops.AddColumnOp.from_column_and_tablename(
                    schema, tname, metadata_cols_by_name[cname]))
            log.info("Detected added column '%s.%s'", name, cname)

    for colname in metadata_col_names.intersection(conn_col_names):
        metadata_col = metadata_cols_by_name[colname]
        conn_col = conn_table.c[colname]
        if not autogen_context.run_object_filters(metadata_col, colname,
                                                  "column", False, conn_col):
            continue
        alter_column_op = ops.AlterColumnOp(tname, colname, schema=schema)

        comparators.dispatch("column")(
            autogen_context,
            alter_column_op,
            schema,
            tname,
            colname,
            conn_col,
            metadata_col,
        )

        if alter_column_op.has_changes():
            modify_table_ops.ops.append(alter_column_op)

    yield

    for cname in set(conn_col_names).difference(metadata_col_names):
        if autogen_context.run_object_filters(conn_table.c[cname], cname,
                                              "column", True, None):
            modify_table_ops.ops.append(
                ops.DropColumnOp.from_column_and_tablename(
                    schema, tname, conn_table.c[cname]))
            log.info("Detected removed column '%s.%s'", name, cname)
コード例 #12
0
 def test_dont_barf_on_already_reflected(self):
     diffs = []
     from sqlalchemy.util import OrderedSet
     inspector = Inspector.from_engine(self.bind)
     autogenerate._compare_tables(OrderedSet(['extra', 'user']),
                                  OrderedSet(), inspector, MetaData(),
                                  diffs, self.autogen_context)
     eq_([(rec[0], rec[1].name) for rec in diffs],
         [('remove_table', 'extra'), ('remove_table', u'user')])
コード例 #13
0
 def test_dont_barf_on_already_reflected(self):
     from sqlalchemy.util import OrderedSet
     inspector = Inspector.from_engine(self.bind)
     uo = ops.UpgradeOps(ops=[])
     autogenerate.compare._compare_tables(
         OrderedSet([(None, 'extra'), (None, 'user')]), OrderedSet(),
         inspector, uo, self.autogen_context)
     eq_([(rec[0], rec[1].name) for rec in uo.as_diffs()],
         [('remove_table', 'extra'), ('remove_table', 'user')])
コード例 #14
0
    def test_transient_to_persistent_collection(self):
        User, Address, addresses, users = (self.classes.User,
                                self.classes.Address,
                                self.tables.addresses,
                                self.tables.users)

        mapper(User, users, properties={
            'addresses':relationship(Address,
                        backref='user',
                        collection_class=OrderedSet,
                                order_by=addresses.c.id,
                                 cascade="all, delete-orphan")
        })
        mapper(Address, addresses)

        load = self.load_tracker(User)
        self.load_tracker(Address, load)

        u = User(id=7, name='fred', addresses=OrderedSet([
            Address(id=1, email_address='fred1'),
            Address(id=2, email_address='fred2'),
        ]))
        sess = create_session()
        sess.add(u)
        sess.flush()
        sess.expunge_all()

        eq_(load.called, 0)

        u = User(id=7, name='fred', addresses=OrderedSet([
            Address(id=3, email_address='fred3'),
            Address(id=4, email_address='fred4'),
        ]))

        u = sess.merge(u)

        # 1. merges User object.  updates into session.
        # 2.,3. merges Address ids 3 & 4, saves into session.
        # 4.,5. loads pre-existing elements in "addresses" collection,
        # marks as deleted, Address ids 1 and 2.
        eq_(load.called, 5)

        eq_(u,
            User(id=7, name='fred', addresses=OrderedSet([
                Address(id=3, email_address='fred3'),
                Address(id=4, email_address='fred4'),
            ]))
        )
        sess.flush()
        sess.expunge_all()
        eq_(sess.query(User).one(),
            User(id=7, name='fred', addresses=OrderedSet([
                Address(id=3, email_address='fred3'),
                Address(id=4, email_address='fred4'),
            ]))
        )
コード例 #15
0
ファイル: sync.py プロジェクト: eReuse/devicehub-teal
    def run(self,
            device: Device,
            components: Iterable[Component] or None) -> (Device, OrderedSet):
        """Synchronizes the device and components with the database.

        Identifies if the device and components exist in the database
        and updates / inserts them as necessary.

        Passed-in parameters have to be transient, or said differently,
        not-db-synced objects, or otherwise they would end-up being
        added in the session. `Learn more... <http://docs.sqlalchemy.org/
        en/latest/orm/session_state_management.html#quickie-intro-to
        -object-states>`_.

        This performs Add / Remove as necessary.

        :param device: The device to add / update to the database.
        :param components: Components that are inside of the device.
                           This method performs Add and Remove actions
                           so the device ends up with these components.
                           Components are added / updated accordingly.
                           If this is empty, all components are removed.
                           If this is None, it means that we are not
                           providing info about the components, in which
                           case we keep the already existing components
                           of the device –we don't touch them.
        :return: A tuple of:

                 1. The device from the database (with an ID) whose
                    ``components`` field contain the db version
                    of the passed-in components.
                 2. A list of Add / Remove (not yet added to session).
        """
        db_device = self.execute_register(device)
        db_components, actions = OrderedSet(), OrderedSet()
        if components is not None:  # We have component info (see above)
            if not isinstance(db_device, Computer):
                # Until a good reason is given, we synthetically forbid
                # non-computers with components
                raise ValidationError('Only computers can have components.')
            blacklist = set()  # type: Set[int]
            not_new_components = set()
            for component in components:
                db_component, is_new = self.execute_register_component(component,
                                                                       blacklist,
                                                                       parent=db_device)
                db_components.add(db_component)
                if not is_new:
                    not_new_components.add(db_component)
            # We only want to perform Add/Remove to not new components
            actions = self.add_remove(db_device, not_new_components)
            db_device.components = db_components
        return db_device, actions
コード例 #16
0
    def test_detached_to_persistent_collection(self):
        users, Address, addresses, User = (self.tables.users,
                                           self.classes.Address,
                                           self.tables.addresses,
                                           self.classes.User)

        mapper(User,
               users,
               properties={
                   'addresses':
                   relationship(Address,
                                backref='user',
                                order_by=addresses.c.id,
                                collection_class=OrderedSet)
               })
        mapper(Address, addresses)
        load = self.load_tracker(User)
        self.load_tracker(Address, load)

        a = Address(id=1, email_address='fred1')
        u = User(id=7,
                 name='fred',
                 addresses=OrderedSet([
                     a,
                     Address(id=2, email_address='fred2'),
                 ]))
        sess = create_session()
        sess.add(u)
        sess.flush()
        sess.expunge_all()

        u.name = 'fred jones'
        u.addresses.add(Address(id=3, email_address='fred3'))
        u.addresses.remove(a)

        eq_(load.called, 0)
        u = sess.merge(u)
        eq_(load.called, 4)
        sess.flush()
        sess.expunge_all()

        eq_(
            sess.query(User).first(),
            User(id=7,
                 name='fred jones',
                 addresses=OrderedSet([
                     Address(id=2, email_address='fred2'),
                     Address(id=3, email_address='fred3')
                 ])))
コード例 #17
0
def test_sync_run_components_empty():
    """
    Syncs a device that has an empty components list. The system should
    remove all the components from the device.
    """
    s = file('pc-components.db')
    pc = Computer(**s['device'], components=OrderedSet(Component(**c) for c in s['components']))
    db.session.add(pc)
    db.session.commit()

    # Create a new transient non-db synced object
    pc = Computer(**s['device'])
    db_pc, _ = Sync().run(pc, components=OrderedSet())
    assert not db_pc.components
    assert not pc.components
コード例 #18
0
ファイル: compare.py プロジェクト: Mayank0010/Doormed
def _autogen_for_tables(autogen_context, upgrade_ops, schemas):
    inspector = autogen_context.inspector

    conn_table_names = set()

    version_table_schema = (
        autogen_context.migration_context.version_table_schema)
    version_table = autogen_context.migration_context.version_table

    for schema_name in schemas:
        tables = set(inspector.get_table_names(schema=schema_name))
        if schema_name == version_table_schema:
            tables = tables.difference(
                [autogen_context.migration_context.version_table])

        conn_table_names.update(
            (schema_name, tname)
            for tname in tables if autogen_context.run_name_filters(
                tname, "table", {"schema_name": schema_name}))

    metadata_table_names = OrderedSet([
        (table.schema, table.name) for table in autogen_context.sorted_tables
    ]).difference([(version_table_schema, version_table)])

    _compare_tables(
        conn_table_names,
        metadata_table_names,
        inspector,
        upgrade_ops,
        autogen_context,
    )
コード例 #19
0
def _produce_net_changes(connection, metadata, diffs, autogen_context,
                            object_filters=(),
                            include_schemas=False):
    inspector = Inspector.from_engine(connection)
    # TODO: not hardcode alembic_version here ?
    conn_table_names = set()
    if include_schemas:
        schemas = set(inspector.get_schema_names())
        # replace default schema name with None
        schemas.discard("information_schema")
        # replace the "default" schema with None
        schemas.add(None)
        schemas.discard(connection.dialect.default_schema_name)
    else:
        schemas = [None]

    for s in schemas:
        tables = set(inspector.get_table_names(schema=s)).\
                difference(['alembic_version'])
        conn_table_names.update(zip([s] * len(tables), tables))

    metadata_table_names = OrderedSet([(table.schema, table.name)
                                for table in metadata.sorted_tables])

    _compare_tables(conn_table_names, metadata_table_names,
                    object_filters,
                    inspector, metadata, diffs, autogen_context)
コード例 #20
0
ファイル: compare.py プロジェクト: 2531699560/flask2
def _autogen_for_tables(autogen_context, upgrade_ops, schemas):
    inspector = autogen_context.inspector

    conn_table_names = set()

    version_table_schema = (
        autogen_context.migration_context.version_table_schema)
    version_table = autogen_context.migration_context.version_table

    for s in schemas:
        tables = set(inspector.get_table_names(schema=s))
        if s == version_table_schema:
            tables = tables.difference(
                [autogen_context.migration_context.version_table])
        conn_table_names.update(zip([s] * len(tables), tables))

    metadata_table_names = OrderedSet([
        (table.schema, table.name) for table in autogen_context.sorted_tables
    ]).difference([(version_table_schema, version_table)])

    _compare_tables(
        conn_table_names,
        metadata_table_names,
        inspector,
        upgrade_ops,
        autogen_context,
    )
コード例 #21
0
ファイル: views.py プロジェクト: forste/devicehub-teal
    def post(self):
        """
        Performs a Snapshot.

        See `Snapshot` section in docs for more info.
        """
        s = request.get_json()
        # Note that if we set the device / components into the snapshot
        # model object, when we flush them to the db we will flush
        # snapshot, and we want to wait to flush snapshot at the end
        device = s.pop('device')  # type: Computer
        components = s.pop('components') \
            if s['software'] == SnapshotSoftware.Workbench else None  # type: List[Component]
        snapshot = Snapshot(**s)

        # Remove new events from devices so they don't interfere with sync
        events_device = set(e for e in device.events_one)
        events_components = tuple(
            set(e for e in component.events_one) for component in components)
        device.events_one.clear()
        for component in components:
            component.events_one.clear()

        # noinspection PyArgumentList
        assert not device.events_one
        assert all(not c.events_one for c in components)
        db_device, remove_events = self.resource_def.sync.run(
            device, components)
        snapshot.device = db_device
        snapshot.events |= remove_events | events_device
        # commit will change the order of the components by what
        # the DB wants. Let's get a copy of the list so we preserve order
        ordered_components = OrderedSet(x for x in snapshot.components)

        for event in events_device:
            if isinstance(event, WorkbenchRate):
                # todo process workbench rate
                event.data_storage = 2
                event.graphic_card = 4
                event.processor = 1
                event.algorithm_software = RatingSoftware.Ereuse
                event.algorithm_version = StrictVersion('1.0')

        # Add the new events to the db-existing devices and components
        db_device.events_one |= events_device
        for component, events in zip(ordered_components, events_components):
            component.events_one |= events
            snapshot.events |= events

        db.session.add(snapshot)
        db.session.commit()
        # todo we are setting snapshot dirty again with this components but
        # we do not want to update it.
        # The real solution is https://stackoverflow.com/questions/
        # 24480581/set-the-insert-order-of-a-many-to-many-sqlalchemy-
        # flask-app-sqlite-db?noredirect=1&lq=1
        snapshot.components = ordered_components
        ret = self.schema.jsonify(snapshot)  # transform it back
        ret.status_code = 201
        return ret
コード例 #22
0
ファイル: snapshot.py プロジェクト: eReuse/devicehub-teal
    def build(self):
        device = self.snapshot_json.pop('device')  # type: Computer
        components = None
        if self.snapshot_json['software'] == (SnapshotSoftware.Workbench or SnapshotSoftware.WorkbenchAndroid):
            components = self.snapshot_json.pop('components', None)  # type: List[Component]
            if isinstance(device, Computer) and device.hid:
                device.add_mac_to_hid(components_snap=components)
        snapshot = Snapshot(**self.snapshot_json)

        # Remove new actions from devices so they don't interfere with sync
        actions_device = set(e for e in device.actions_one)
        device.actions_one.clear()
        if components:
            actions_components = tuple(set(e for e in c.actions_one) for c in components)
            for component in components:
                component.actions_one.clear()

        assert not device.actions_one
        assert all(not c.actions_one for c in components) if components else True
        db_device, remove_actions = self.resource_def.sync.run(device, components)

        del device  # Do not use device anymore
        snapshot.device = db_device
        snapshot.actions |= remove_actions | actions_device  # Set actions to snapshot
        # commit will change the order of the components by what
        # the DB wants. Let's get a copy of the list so we preserve order
        ordered_components = OrderedSet(x for x in snapshot.components)

        # Add the new actions to the db-existing devices and components
        db_device.actions_one |= actions_device
        if components:
            for component, actions in zip(ordered_components, actions_components):
                component.actions_one |= actions
                snapshot.actions |= actions

        if snapshot.software == SnapshotSoftware.Workbench:
            # Check ownership of (non-component) device to from current.user
            if db_device.owner_id != g.user.id:
                raise InsufficientPermission()
            # Compute ratings
            try:
                rate_computer, price = RateComputer.compute(db_device)
            except CannotRate:
                pass
            else:
                snapshot.actions.add(rate_computer)
                if price:
                    snapshot.actions.add(price)
        elif snapshot.software == SnapshotSoftware.WorkbenchAndroid:
            pass  # TODO try except to compute RateMobile
        # Check if HID is null and add Severity:Warning to Snapshot
        if snapshot.device.hid is None:
            snapshot.severity = Severity.Warning

        db.session.add(snapshot)
        db.session().final_flush()
        ret = self.schema.jsonify(snapshot)  # transform it back
        ret.status_code = 201
        db.session.commit()
        return ret
コード例 #23
0
ファイル: test_device.py プロジェクト: eReuse/devicehub-teal
def test_component_similar_one():
    user = User.query.filter().first()
    snapshot = yaml2json('pc-components.db')
    pc = snapshot['device']
    snapshot['components'][0]['serial_number'] = snapshot['components'][1][
        'serial_number'] = None
    pc = d.Desktop(**pc,
                   components=OrderedSet(
                       d.Component(**c) for c in snapshot['components']))
    component1, component2 = pc.components  # type: d.Component
    db.session.add(pc)
    db.session.flush()
    # Let's create a new component named 'A' similar to 1
    componentA = d.Component(model=component1.model,
                             manufacturer=component1.manufacturer,
                             owner_id=user.id)
    similar_to_a = componentA.similar_one(pc, set())
    assert similar_to_a == component1
    # d.Component B does not have the same model
    componentB = d.Component(model='nope',
                             manufacturer=component1.manufacturer)
    with pytest.raises(ResourceNotFound):
        assert componentB.similar_one(pc, set())
    # If we blacklist component A we won't get anything
    with pytest.raises(ResourceNotFound):
        assert componentA.similar_one(pc, blacklist={componentA.id})
コード例 #24
0
ファイル: test_device.py プロジェクト: slamora/devicehub-teal
def test_get_devices(app: Devicehub, user: UserClient):
    """Checks GETting multiple devices."""
    with app.app_context():
        pc = d.Desktop(model='p1mo',
                       manufacturer='p1ma',
                       serial_number='p1s',
                       chassis=ComputerChassis.Tower)
        pc.components = OrderedSet([
            d.NetworkAdapter(model='c1mo',
                             manufacturer='c1ma',
                             serial_number='c1s'),
            d.GraphicCard(model='c2mo', manufacturer='c2ma', memory=1500)
        ])
        pc1 = d.Desktop(model='p2mo',
                        manufacturer='p2ma',
                        serial_number='p2s',
                        chassis=ComputerChassis.Tower)
        pc2 = d.Laptop(model='p3mo',
                       manufacturer='p3ma',
                       serial_number='p3s',
                       chassis=ComputerChassis.Netbook)
        db.session.add_all((pc, pc1, pc2))
        db.session.commit()
    devices, _ = user.get(res=d.Device)
    assert tuple(dev['id'] for dev in devices['items']) == (1, 2, 3, 4, 5)
    assert tuple(
        dev['type']
        for dev in devices['items']) == (d.Desktop.t, d.Desktop.t, d.Laptop.t,
                                         d.NetworkAdapter.t, d.GraphicCard.t)
コード例 #25
0
def test_get_device(app: Devicehub, user: UserClient):
    """Checks GETting a Desktop with its components."""
    with app.app_context():
        pc = Desktop(model='p1mo', manufacturer='p1ma', serial_number='p1s')
        pc.components = OrderedSet([
            NetworkAdapter(model='c1mo', manufacturer='c1ma', serial_number='c1s'),
            GraphicCard(model='c2mo', manufacturer='c2ma', memory=1500)
        ])
        db.session.add(pc)
        db.session.add(Test(device=pc,
                            elapsed=timedelta(seconds=4),
                            error=False,
                            author=User(email='*****@*****.**')))
        db.session.commit()
    pc, _ = user.get(res=Device, item=1)
    assert len(pc['events']) == 1
    assert pc['events'][0]['type'] == 'Test'
    assert pc['events'][0]['device'] == 1
    assert pc['events'][0]['elapsed'] == 4
    assert not pc['events'][0]['error']
    assert UUID(pc['events'][0]['author'])
    assert 'events_components' not in pc, 'events_components are internal use only'
    assert 'events_one' not in pc, 'they are internal use only'
    assert 'author' not in pc
    assert tuple(c['id'] for c in pc['components']) == (2, 3)
    assert pc['hid'] == 'p1ma-p1s-p1mo'
    assert pc['model'] == 'p1mo'
    assert pc['manufacturer'] == 'p1ma'
    assert pc['serialNumber'] == 'p1s'
    assert pc['type'] == 'Desktop'
コード例 #26
0
ファイル: compare.py プロジェクト: zzzeek/alembic
def _autogen_for_tables(
    autogen_context: "AutogenContext",
    upgrade_ops: "UpgradeOps",
    schemas: Union[Set[None], Set[Optional[str]]],
) -> None:
    inspector = autogen_context.inspector

    conn_table_names: Set[Tuple[Optional[str], str]] = set()

    version_table_schema = (
        autogen_context.migration_context.version_table_schema)
    version_table = autogen_context.migration_context.version_table

    for schema_name in schemas:
        tables = set(inspector.get_table_names(schema=schema_name))
        if schema_name == version_table_schema:
            tables = tables.difference(
                [autogen_context.migration_context.version_table])

        conn_table_names.update(
            (schema_name, tname)
            for tname in tables if autogen_context.run_name_filters(
                tname, "table", {"schema_name": schema_name}))

    metadata_table_names = OrderedSet([
        (table.schema, table.name) for table in autogen_context.sorted_tables
    ]).difference([(version_table_schema, version_table)])

    _compare_tables(
        conn_table_names,
        metadata_table_names,
        inspector,
        upgrade_ops,
        autogen_context,
    )
コード例 #27
0
ファイル: compare.py プロジェクト: marcosptf/fedora
def _compare_columns(schema, tname, conn_table, metadata_table,
                     modify_table_ops, autogen_context, inspector):
    name = '%s.%s' % (schema, tname) if schema else tname
    metadata_cols_by_name = dict((c.name, c) for c in metadata_table.c)
    conn_col_names = dict((c.name, c) for c in conn_table.c)
    metadata_col_names = OrderedSet(sorted(metadata_cols_by_name))

    for cname in metadata_col_names.difference(conn_col_names):
        if autogen_context.run_filters(
                metadata_cols_by_name[cname], cname,
                "column", False, None):
            modify_table_ops.ops.append(
                ops.AddColumnOp.from_column_and_tablename(
                    schema, tname, metadata_cols_by_name[cname])
            )
            log.info("Detected added column '%s.%s'", name, cname)

    for colname in metadata_col_names.intersection(conn_col_names):
        metadata_col = metadata_cols_by_name[colname]
        conn_col = conn_table.c[colname]
        if not autogen_context.run_filters(
                metadata_col, colname, "column", False,
                conn_col):
            continue
        alter_column_op = ops.AlterColumnOp(
            tname, colname, schema=schema)

        comparators.dispatch("column")(
            autogen_context, alter_column_op,
            schema, tname, colname, conn_col, metadata_col
        )

        if alter_column_op.has_changes():
            modify_table_ops.ops.append(alter_column_op)

    yield

    for cname in set(conn_col_names).difference(metadata_col_names):
        if autogen_context.run_filters(
                conn_table.c[cname], cname,
                "column", True, None):
            modify_table_ops.ops.append(
                ops.DropColumnOp.from_column_and_tablename(
                    schema, tname, conn_table.c[cname]
                )
            )
            log.info("Detected removed column '%s.%s'", name, cname)
コード例 #28
0
ファイル: autogenerate.py プロジェクト: daqing15/alembic
def _compare_columns(schema, tname, conn_table, metadata_table,
                                diffs, autogen_context):
    name = '%s.%s' % (schema, tname) if schema else tname
    metadata_cols_by_name = dict((c.name, c) for c in metadata_table.c)
    conn_col_names = set(conn_table)
    metadata_col_names = OrderedSet(sorted(metadata_cols_by_name))

    for cname in metadata_col_names.difference(conn_col_names):
        diffs.append(
            ("add_column", schema, tname, metadata_cols_by_name[cname])
        )
        log.info("Detected added column '%s.%s'", name, cname)

    for cname in conn_col_names.difference(metadata_col_names):
        diffs.append(
            ("remove_column", schema, tname, sa_schema.Column(
                cname,
                conn_table[cname]['type'],
                nullable=conn_table[cname]['nullable'],
                server_default=conn_table[cname]['default']
            ))
        )
        log.info("Detected removed column '%s.%s'", name, cname)

    for colname in metadata_col_names.intersection(conn_col_names):
        metadata_col = metadata_cols_by_name[colname]
        conn_col = conn_table[colname]
        col_diff = []
        _compare_type(schema, tname, colname,
            conn_col,
            metadata_col,
            col_diff, autogen_context
        )
        _compare_nullable(schema, tname, colname,
            conn_col,
            metadata_col.nullable,
            col_diff, autogen_context
        )
        _compare_server_default(schema, tname, colname,
            conn_col,
            metadata_col,
            col_diff, autogen_context
        )
        if col_diff:
            diffs.append(col_diff)
コード例 #29
0
    def test_transient_to_pending_collection(self):
        User, Address, addresses, users = (self.classes.User,
                                           self.classes.Address,
                                           self.tables.addresses,
                                           self.tables.users)

        mapper(User,
               users,
               properties={
                   'addresses':
                   relationship(Address,
                                backref='user',
                                collection_class=OrderedSet)
               })
        mapper(Address, addresses)
        load = self.load_tracker(User)
        self.load_tracker(Address, load)

        u = User(id=7,
                 name='fred',
                 addresses=OrderedSet([
                     Address(id=1, email_address='fred1'),
                     Address(id=2, email_address='fred2'),
                 ]))
        eq_(load.called, 0)

        sess = create_session()
        sess.merge(u)
        eq_(load.called, 3)

        merged_users = [e for e in sess if isinstance(e, User)]
        eq_(len(merged_users), 1)
        assert merged_users[0] is not u

        sess.flush()
        sess.expunge_all()

        eq_(
            sess.query(User).one(),
            User(id=7,
                 name='fred',
                 addresses=OrderedSet([
                     Address(id=1, email_address='fred1'),
                     Address(id=2, email_address='fred2'),
                 ])))
コード例 #30
0
ファイル: dvc_database.py プロジェクト: OSUPychron/pychron
 def find_references(self, times, atypes, hours=10, exclude=None):
     with self.session_ctx() as sess:
         # delta = 60 * 60 * hours  # seconds
         delta = timedelta(hours=hours)
         refs = OrderedSet()
         ex = None
         for ti in times:
             low = ti - delta
             high = ti + delta
             # rs = self.get_analyses_data_range(low, high, atypes, exclude=ex, exclude_uuids=exclude)
             rs = self.get_analyses_by_date_range(low, high,
                                                  analysis_type=atypes, exclude=ex, exclude_uuids=exclude)
             refs.update(rs)
             ex = [r.idanalysisTbl for r in refs]
             # print rs
             # print ti, low, high, rs, refs
         # print 'refs', refs
         return [ri.record_view for ri in refs]
コード例 #31
0
ファイル: base.py プロジェクト: wisedier/flask-sqlalchemy-api
class DeclarativeBase(object):
    exclude_modules = OrderedSet(['db', 'models'])

    @declared_attr
    def __tablename__(self):
        names = self.__module__.split('.') + inflection.underscore(self.__name__).split('_')
        names = list(OrderedSet(names) - self.exclude_modules)
        names[-1] = inflection.pluralize(names[-1])
        return '_'.join(names)
コード例 #32
0
ファイル: base.py プロジェクト: arizona95/Brain
    def __tablename__(self):
        """Returns snake_case table name
        """

        names = self.__module__.split('.') + inflection.underscore(
            self.__name__).split('_')
        names = list(OrderedSet(names) - self.exclude_modules)
        names[-1] = inflection.pluralize(names[-1])
        return '_'.join(names)
コード例 #33
0
def test_sync_execute_register_tag_does_not_exist():
    """
    Ensures not being able to register if the tag does not exist,
    even if the device has HID or it existed before.

    Tags have to be created before trying to link them through a Snapshot.
    """
    pc = Computer(**file('pc-components.db')['device'], tags=OrderedSet([Tag()]))
    with raises(ResourceNotFound):
        Sync().execute_register(pc)
コード例 #34
0
ファイル: views.py プロジェクト: eReuse/devicehub-teal
    def merge_devices(self, dev1_id: int, dev2_id: int) -> Device:
        """Merge the current device with `with_device` (dev2_id) by
        adding all `with_device` actions under the current device, (dev1_id).

        This operation is highly costly as it forces refreshing
        many models in session.
        """
        # base_device = Device.query.filter_by(id=dev1_id, owner_id=g.user.id).one()
        self.base_device = Device.query.filter_by(id=dev1_id, owner_id=g.user.id).one()
        self.with_device = Device.query.filter_by(id=dev2_id, owner_id=g.user.id).one()

        if self.base_device.allocated or self.with_device.allocated:
            # Validation than any device is allocated
            msg = 'The device is allocated, please deallocated before merge.'
            raise ValidationError(msg)

        if not self.base_device.type == self.with_device.type:
            # Validation than we are speaking of the same kind of devices
            raise ValidationError('The devices is not the same type.')

        # Adding actions of self.with_device
        with_actions_one = [a for a in self.with_device.actions
                            if isinstance(a, actions.ActionWithOneDevice)]
        with_actions_multiple = [a for a in self.with_device.actions
                                 if isinstance(a, actions.ActionWithMultipleDevices)]

        # Moving the tags from `with_device` to `base_device`
        # Union of tags the device had plus the (potentially) new ones
        self.base_device.tags.update([x for x in self.with_device.tags])
        self.with_device.tags.clear()  # We don't want to add the transient dummy tags
        db.session.add(self.with_device)

        # Moving the actions from `with_device` to `base_device`
        for action in with_actions_one:
            if action.parent:
                action.parent = self.base_device
            else:
                self.base_device.actions_one.add(action)
        for action in with_actions_multiple:
            if action.parent:
                action.parent = self.base_device
            else:
                self.base_device.actions_multiple.add(action)

        # Keeping the components of with_device
        components = OrderedSet(c for c in self.with_device.components)
        self.base_device.components = components

        # Properties from with_device
        self.merge()

        db.session().add(self.base_device)
        db.session().final_flush()
        return self.base_device
コード例 #35
0
ファイル: dvc_database.py プロジェクト: waffle-iron/pychron
 def find_references(self, times, atypes, hours=10, exclude=None):
     with self.session_ctx() as sess:
         # delta = 60 * 60 * hours  # seconds
         delta = timedelta(hours=hours)
         refs = OrderedSet()
         ex = None
         for ti in times:
             low = ti - delta
             high = ti + delta
             # rs = self.get_analyses_data_range(low, high, atypes, exclude=ex, exclude_uuids=exclude)
             rs = self.get_analyses_by_date_range(low,
                                                  high,
                                                  analysis_type=atypes,
                                                  exclude=ex,
                                                  exclude_uuids=exclude)
             refs.update(rs)
             ex = [r.idanalysisTbl for r in refs]
             # print rs
             # print ti, low, high, rs, refs
         # print 'refs', refs
         return [ri.record_view for ri in refs]
コード例 #36
0
 def __init__(self):
     self._queries = OrderedSet({None})
     self._columns = defaultdict(OrderedSet)
     self._children = defaultdict(OrderedSet)