Exemple #1
0
    def test_delete_existing_object(self):
        """
            Test the method _delete_object(self, obj) when the object exists
            Test scenario:
            When the data is in the database, the delete operation should
            complete successfully
        """

        self.aMox.StubOutWithMock(session, 'query')
        session.query(model.PowerVCMapping).AndReturn(query)

        self.aMox.StubOutWithMock(query, 'filter_by')
        query.filter_by(id=self.powerVCMapping['id']).AndReturn(query)

        self.aMox.StubOutWithMock(query, 'one')
        query.one().AndReturn(self.powerVCMapping)

        self.aMox.StubOutWithMock(session, 'begin')
        session.begin(subtransactions=True).AndReturn(transaction(None, None))

        self.aMox.StubOutWithMock(session, 'delete')
        returnValue = session.delete(self.powerVCMapping).AndReturn(True)

        self.aMox.ReplayAll()

        self.powervcagentdb._delete_object(self.powerVCMapping)

        self.aMox.VerifyAll()

        self.assertEqual(returnValue, True)

        self.aMox.UnsetStubs()
    def test_delete_existing_object(self):
        """
            Test the method _delete_object(self, obj) when the object exists
            Test scenario:
            When the data is in the database, the delete operation should
            complete successfully
        """

        self.aMox.StubOutWithMock(session, 'query')
        session.query(model.PowerVCMapping).AndReturn(query)

        self.aMox.StubOutWithMock(query, 'filter_by')
        query.filter_by(id=self.powerVCMapping['id']).AndReturn(query)

        self.aMox.StubOutWithMock(query, 'one')
        query.one().AndReturn(self.powerVCMapping)

        self.aMox.StubOutWithMock(session, 'begin')
        session.begin(subtransactions=True).AndReturn(transaction(None, None))

        self.aMox.StubOutWithMock(session, 'delete')
        returnValue = session.delete(self.powerVCMapping).AndReturn(True)

        self.aMox.ReplayAll()

        self.powervcagentdb._delete_object(self.powerVCMapping)

        self.aMox.VerifyAll()

        self.assertEqual(returnValue, True)

        self.aMox.UnsetStubs()
def memoize_bed(string, sql):
    """Function to memoize a BED12 file for fast access"""

    # records = dict()
    beds = []

    engine = create_engine("sqlite:///{}".format(sql))
    DBBASE.metadata.create_all(engine)

    session = Session(bind=engine, autocommit=True, autoflush=True)

    counter = 0
    with open(string, "rb") as parser:
        pos = parser.tell()  # This will be 0 as we are at the beginning
        for line in parser:
            fields = line.split(b"\t")
            if len(fields) != 12 or (fields and fields[0][0] == b"#"):
                header = True
            else:
                header = False

            if not header:
                name = fields[3].decode()
                if "ID=" in name:
                    groups = dict(re.findall("([^(;|=)]*)=([^;]*)", name))
                    name = groups["ID"]
                beds.append(BedIndex(name, pos))
                counter += 1
            pos += len(line)

            if len(beds) > 10**6:
                session.begin(subtransactions=True)
                session.add_all(beds)
                session.commit()
                beds = []

    session.begin(subtransactions=True)
    session.add_all(beds)
    session.commit()
    session.close()
    res = engine.execute("select count(*) from bed").fetchone()
    assert res[0] == counter, (res[0], counter)
Exemple #4
0
    def test_create_object(self):
        """
            Test the method def _create_object(self, obj_type, sync_key,
            local_id=None, pvc_id=None)
        """

        obj_type = "Network"
        sync_key = utils.gen_network_sync_key(
            self.fakePowerVCNetwork.powerNetInstance)
        local_id = self.fakeOSNetwork.fakeOSNetworkInstance['id']
        pvc_id = self.fakePowerVCNetwork.powerNetInstance['id']

        inputPowerVCMObj = model.PowerVCMapping(obj_type, sync_key)

        self.aMox.StubOutWithMock(session, 'begin')
        session.begin(subtransactions=True).AndReturn(transaction(None, None))

        self.aMox.StubOutWithMock(model, 'PowerVCMapping')
        model.PowerVCMapping(obj_type, sync_key).AndReturn(inputPowerVCMObj)

        self.aMox.StubOutWithMock(session, 'add')
        session.add(inputPowerVCMObj).AndReturn("")

        self.aMox.ReplayAll()

        self.powervcagentdb._create_object(obj_type,
                                           sync_key,
                                           update_data=None,
                                           local_id=local_id,
                                           pvc_id=pvc_id)

        self.aMox.VerifyAll()

        self.assertEqual(self.powerVCMapping.local_id,
                         inputPowerVCMObj.local_id)
        self.assertEqual(self.powerVCMapping.pvc_id, inputPowerVCMObj.pvc_id)
        self.assertEqual(self.powerVCMapping.status, inputPowerVCMObj.status)
        self.aMox.UnsetStubs()
    def test_create_object(self):
        """
            Test the method def _create_object(self, obj_type, sync_key,
            local_id=None, pvc_id=None)
        """

        obj_type = "Network"
        sync_key = utils.gen_network_sync_key(
            self.fakePowerVCNetwork.powerNetInstance)
        local_id = self.fakeOSNetwork.fakeOSNetworkInstance['id']
        pvc_id = self.fakePowerVCNetwork.powerNetInstance['id']

        inputPowerVCMObj = model.PowerVCMapping(obj_type, sync_key)

        self.aMox.StubOutWithMock(session, 'begin')
        session.begin(subtransactions=True).AndReturn(transaction(None, None))

        self.aMox.StubOutWithMock(model, 'PowerVCMapping')
        model.PowerVCMapping(obj_type, sync_key).AndReturn(inputPowerVCMObj)

        self.aMox.StubOutWithMock(session, 'add')
        session.add(inputPowerVCMObj).AndReturn("")

        self.aMox.ReplayAll()

        self.powervcagentdb._create_object(
            obj_type, sync_key, update_data=None,
            local_id=local_id, pvc_id=pvc_id)

        self.aMox.VerifyAll()

        self.assertEqual(
            self.powerVCMapping.local_id, inputPowerVCMObj.local_id)
        self.assertEqual(self.powerVCMapping.pvc_id, inputPowerVCMObj.pvc_id)
        self.assertEqual(self.powerVCMapping.status, inputPowerVCMObj.status)
        self.aMox.UnsetStubs()
Exemple #6
0
class Reindexer:
    def __init__(self, clear, progressive, batch_size):
        # type: (bool, bool, int) -> None
        self.clear = clear
        self.progressive = progressive
        self.batch_size = int(batch_size or 0)

        self.index_service = get_service("indexing")
        self.index = self.index_service.app_state.indexes["default"]
        self.adapted = self.index_service.adapted
        self.session = Session(bind=db.session.get_bind(None, None), autocommit=True)
        self.indexed = set()  # type: Set[str]
        self.cleared = set()  # type: Set[str]

        strategy = progressive_mode if self.progressive else single_transaction
        self.strategy = strategy(self.index, clear=self.clear)

    def reindex_all(self):
        next(self.strategy)  # starts generator

        indexed_classes = self.index_service.app_state.indexed_classes
        for cls in sorted(indexed_classes, key=lambda c: c.__name__):
            self.reindex_class(cls)

        try:
            self.strategy.send(STOP)
        except StopIteration:
            pass

        try:
            self.strategy.close()
        except StopIteration:
            pass

    def reindex_class(self, cls):
        # type: (Entity) -> None
        current_object_type = cls._object_type()

        if not self.clear and current_object_type not in self.cleared:
            self.strategy.send(current_object_type)
            self.cleared.add(current_object_type)

        adapter = self.adapted.get(current_object_type)

        if not adapter or not adapter.indexable:
            return

        name = cls.__name__

        with self.session.begin():
            query = self.session.query(cls).options(sa.orm.lazyload("*"))
            try:
                count = query.count()
            except Exception as e:
                current_app.logger.error(
                    "Indexing error on class {}: {}".format(name, repr(e))
                )
                return

            print("*" * 79)
            print(f"{name}")
            if count == 0:
                print("*" * 79)
                print(f"{name}")
                return

            print("*" * 79)
            print(f"{name}")

            with tqdm(total=count) as bar:
                self.reindex_batch(query, current_object_type, adapter, bar)

            if not self.batch_size:
                self.strategy.send(COMMIT)

        self.strategy.send(COMMIT)

    def reindex_batch(self, query, current_object_type, adapter, bar):
        count = 0
        for obj in query.yield_per(1000):
            count += 1
            if obj.object_type != current_object_type:
                # may happen if obj is a subclass and its parent class
                # is also indexable
                bar.update()
                continue

            object_key = obj.object_key

            if object_key in self.indexed:
                bar.update()
                continue

            document = self.index_service.get_document(obj, adapter)
            self.strategy.send(document)
            self.indexed.add(object_key)

            if self.batch_size and (count % self.batch_size) == 0:
                bar.update()
                self.strategy.send(COMMIT)

            bar.update()
Exemple #7
0
class Reindexer:
    def __init__(self, clear, progressive, batch_size):
        # type: (bool, bool, int) -> None
        self.clear = clear
        self.progressive = progressive
        self.batch_size = int(batch_size or 0)

        self.index_service = get_service("indexing")
        self.index = self.index_service.app_state.indexes["default"]
        # pyre-fixme[16]: `Service` has no attribute `adapted`.
        self.adapted = self.index_service.adapted
        self.session = Session(bind=db.session.get_bind(None, None),
                               autocommit=True)
        self.indexed = set()  # type: Set[str]
        self.cleared = set()  # type: Set[str]

        strategy = progressive_mode if self.progressive else single_transaction
        self.strategy = strategy(self.index, clear=self.clear)

    def reindex_all(self):
        next(self.strategy)  # starts generator

        indexed_classes = self.index_service.app_state.indexed_classes
        for cls in sorted(indexed_classes, key=lambda c: c.__name__):
            self.reindex_class(cls)

        try:
            self.strategy.send(STOP)
        except StopIteration:
            pass

        try:
            self.strategy.close()
        except StopIteration:
            pass

    def reindex_class(self, cls):
        # type: (Entity) -> None
        current_object_type = cls._object_type()

        if not self.clear and current_object_type not in self.cleared:
            self.strategy.send(current_object_type)
            self.cleared.add(current_object_type)

        adapter = self.adapted.get(current_object_type)

        if not adapter or not adapter.indexable:
            return

        # pyre-fixme[16]: `Entity` has no attribute `__name__`.
        name = cls.__name__

        with self.session.begin():
            query = self.session.query(cls).options(sa.orm.lazyload("*"))
            try:
                count = query.count()
            except Exception as e:
                current_app.logger.error(
                    f"Indexing error on class {name}: {repr(e)}")
                return

            print("*" * 79)
            print(f"{name}")
            if count == 0:
                print("*" * 79)
                print(f"{name}")
                return

            print("*" * 79)
            print(f"{name}")

            with tqdm(total=count) as bar:
                self.reindex_batch(query, current_object_type, adapter, bar)

            if not self.batch_size:
                self.strategy.send(COMMIT)

        self.strategy.send(COMMIT)

    def reindex_batch(self, query, current_object_type, adapter, bar):
        count = 0
        for obj in query.yield_per(1000):
            count += 1
            if obj.object_type != current_object_type:
                # may happen if obj is a subclass and its parent class
                # is also indexable
                bar.update()
                continue

            object_key = obj.object_key

            if object_key in self.indexed:
                bar.update()
                continue

            document = self.index_service.get_document(obj, adapter)
            self.strategy.send(document)
            self.indexed.add(object_key)

            if self.batch_size and (count % self.batch_size) == 0:
                bar.update()
                self.strategy.send(COMMIT)

            bar.update()
Exemple #8
0
class InboxSession(object):
    """ Inbox custom ORM (with SQLAlchemy compatible API).

    Parameters
    ----------
    engine : <sqlalchemy.engine.Engine>
        A configured database engine to use for this session
    versioned : bool
        Do you want to enable the transaction log?
    ignore_soft_deletes : bool
        Whether or not to ignore soft-deleted objects in query results.
    namespace_id : int
        Namespace to limit query results with.
    """
    def __init__(self, engine, versioned=True, ignore_soft_deletes=True,
                 namespace_id=None):
        # TODO: support limiting on namespaces
        assert engine, "Must set the database engine"

        args = dict(bind=engine, autoflush=True, autocommit=False)
        self.ignore_soft_deletes = ignore_soft_deletes
        if ignore_soft_deletes:
            args['query_cls'] = InboxQuery
        self._session = Session(**args)

        if versioned:
            from inbox.models.transaction import create_revisions

            @event.listens_for(self._session, 'after_flush')
            def after_flush(session, flush_context):
                """
                Hook to log revision snapshots. Must be post-flush in order to
                grab object IDs on new objects.
                """
                create_revisions(session)

    def query(self, *args, **kwargs):
        q = self._session.query(*args, **kwargs)
        if self.ignore_soft_deletes:
            return q.options(IgnoreSoftDeletesOption())
        else:
            return q

    def add(self, instance):
        if not self.ignore_soft_deletes or not instance.is_deleted:
            self._session.add(instance)
        else:
            raise Exception("Why are you adding a deleted object?")

    def add_all(self, instances):
        if True not in [i.is_deleted for i in instances] or \
                not self.ignore_soft_deletes:
            self._session.add_all(instances)
        else:
            raise Exception("Why are you adding a deleted object?")

    def delete(self, instance):
        if self.ignore_soft_deletes:
            instance.mark_deleted()
            # just to make sure
            self._session.add(instance)
        else:
            self._session.delete(instance)

    def begin(self):
        self._session.begin()

    def commit(self):
        self._session.commit()

    def rollback(self):
        self._session.rollback()

    def flush(self):
        self._session.flush()

    def close(self):
        self._session.close()

    def expunge(self, obj):
        self._session.expunge(obj)

    def merge(self, obj):
        return self._session.merge(obj)

    @property
    def no_autoflush(self):
        return self._session.no_autoflush
Exemple #9
0
def reindex(clear=False, progressive=False, batch_size=None):
    """
  Reindex all content; optionally clear index before. All is done in a
  single transaction by default.

  :param clear: clear index content.
  :param progressive: don't run in a single transaction.
  :param batch_size: number of documents to process before writing to the
                     index. Unused in single transaction mode. If `None` then
                     all documents of same content type are written at once.
  """
    svc = current_app.services["indexing"]
    adapted = svc.adapted
    index = svc.app_state.indexes["default"]
    session = Session(bind=current_app.db.session.get_bind(None, None), autocommit=True)

    setattr(session, "_model_changes", {})  # please flask-sqlalchemy <= 1.0
    indexed = set()
    cleared = set()
    if batch_size is not None:
        batch_size = int(batch_size)
    strategy_kw = dict(clear=clear, progressive=progressive, batch_size=batch_size)
    strategy = progressive_mode if progressive else single_transaction
    strategy = strategy(index, **strategy_kw)
    next(strategy)  # starts generator
    count_indexed = 0

    for cls in sorted(svc.app_state.indexed_classes, key=lambda c: c.__name__):
        current_object_type = cls._object_type()

        if not clear and current_object_type not in cleared:
            strategy.send(current_object_type)
            cleared.add(current_object_type)

        adapter = adapted.get(current_object_type)

        if not adapter or not adapter.indexable:
            continue

        name = cls.__name__

        with session.begin():
            q = session.query(cls).options(sa.orm.lazyload("*"))
            count = q.count()

            if count == 0:
                print("{}: 0".format(name))
                continue

            widgets = [
                name,
                ": ",
                pb.Counter(),
                "/{}".format(count),
                " ",
                pb.Timer(),
                " ",
                pb.Percentage(),
                " ",
                pb.Bar(),
                " ",
                pb.ETA(),
            ]
            progress = pb.ProgressBar(widgets=widgets, maxval=count)
            progress.start()
            count_current = 0

            for obj in q.yield_per(1000):
                if obj.object_type != current_object_type:
                    # may happen if obj is a subclass and its parent class is also
                    # indexable
                    continue

                object_key = obj.object_key

                if object_key in indexed:
                    continue
                document = svc.get_document(obj, adapter)
                strategy.send(document)
                indexed.add(object_key)
                count_indexed += 1
                count_current += 1
                try:
                    progress.update(count_current)
                except ValueError:
                    pass

                if batch_size is not None and (count_current % batch_size) == 0:
                    strategy.send(COMMIT)

            if batch_size is None:
                strategy.send(COMMIT)

            progress.finish()

        strategy.send(COMMIT)

    try:
        strategy.send(STOP)
    except StopIteration:
        pass

    try:
        strategy.close()
    except StopIteration:
        pass
Exemple #10
0
def reindex(clear=False, progressive=False, batch_size=None):
  """
  Reindex all content; optionally clear index before. All is done in a
  single transaction by default.

  :param clear: clear index content.
  :param progressive: don't run in a single transaction.
  :param batch_size: number of documents to process before writing to the
                     index. Unused in single transaction mode. If `None` then
                     all documents of same content type are written at once.
  """
  svc = current_app.services['indexing']
  adapted = svc.adapted
  index = svc.app_state.indexes['default']
  session = Session(bind=current_app.db.session.get_bind(None, None),
                    autocommit=True)

  setattr(session, '_model_changes', {}) # please flask-sqlalchemy <= 1.0
  indexed = set()
  cleared = set()
  if batch_size is not None:
    batch_size = int(batch_size)
  strategy_kw = dict(clear=clear, progressive=progressive,
                     batch_size=batch_size)
  strategy = progressive_mode if progressive else single_transaction
  strategy = strategy(index, **strategy_kw)
  next(strategy) # starts generator
  count_indexed = 0

  for cls in sorted(svc.app_state.indexed_classes, key=lambda c: c.__name__):
    current_object_type = cls._object_type()

    if not clear and current_object_type not in cleared:
      strategy.send(current_object_type)
      cleared.add(current_object_type)

    adapter = adapted.get(current_object_type)

    if not adapter or not adapter.indexable:
      continue

    name = cls.__name__

    with session.begin():
      q = session.query(cls).options(sa.orm.lazyload('*'))
      count = q.count()

      if count == 0:
        print("{}: 0".format(name))
        continue

      widgets = [name,
                 ': ', pb.Counter(), '/{}'.format(count),
                 ' ', pb.Timer(),
                 ' ', pb.Percentage(),
                 ' ', pb.Bar(),
                 ' ', pb.ETA(),
                 ]
      progress = pb.ProgressBar(widgets=widgets, maxval=count)
      progress.start()
      count_current = 0

      for obj in q.yield_per(1000):
        if obj.object_type != current_object_type:
          # may happen if obj is a subclass and its parent class is also
          # indexable
          continue

        object_key = obj.object_key

        if object_key in indexed:
          continue
        document = svc.get_document(obj, adapter)
        strategy.send(document)
        indexed.add(object_key)
        count_indexed += 1
        count_current += 1
        try:
          progress.update(count_current)
        except ValueError:
          pass

        if batch_size is not None and (count_current % batch_size) == 0:
          strategy.send(COMMIT)

      if batch_size is None:
        strategy.send(COMMIT)

      progress.finish()

    strategy.send(COMMIT)

  try:
    strategy.send(STOP)
  except StopIteration:
    pass

  try:
    strategy.close()
  except StopIteration:
    pass
Exemple #11
0
class Catalog(object):
    def __init__(self, engine):
        self.engine = engine
        self.session = Session(bind=self.engine)

    @subtransaction
    def add_item(self, code, description, long_description=None):
        citem = models.Item(code=code,
                            description=description,
                            long_description=long_description)
        self.session.add(citem)
        return citem

    @subtransaction
    def add_items(self, items):
        for item in items:
            # KeyErrors not caught if missing required field

            try:
                long_desc = item['long description']
            except KeyError:
                long_desc = None

            citem = models.Item(code=item['code'],
                                description=item['description'],
                                description_lower=item['description'].lower(),
                                long_description=long_desc)

            primary=True
            for cat in item['categories']:
                q = self.session.query(models.Category).\
                  filter(models.Category.name == cat)
                cater = q.first()

                icater = models.ItemCategory(item=citem, category=cater,
                                             primary=primary)
                citem.categories.append(icater)
                primary=False
            self.session.add(citem)

    def add_category(self, name):
        self.add_categories([name])

    @subtransaction
    def add_categories(self, names):
        for name in names:
            cater = models.Category(name=name)
            self.session.add(cater)

    # def remove_category: should make sure that zero use
    # def rename_category

    @subtransaction
    def add_item_category(self, item_id, category_id, primary=False):
        catitem = models.ItemCategory(item_id=item_id, category_id=category_id,
                                      primary=primary)
        self.session.add(catitem)
        return catitem

    # def remove_item_category

    def get_item(self, code, as_object=False):
        q = self.session.query(models.Item).filter(models.Item.code == code)
        item = q.first()
        if item is None:
            return None
        if as_object is True:
            return item
        return (item.id, item.code, item.description, item.long_description)

    def remove_item(self, code):
        self.session.begin(subtransactions=True)
        q = self.session.query(models.Item).filter(models.Item.code == code).\
          delete()
        self.session.commit()

    @subtransaction
    def update_item(self, code, description=None, long_description=None,
                    new_code=None):
        q = self.session.query(models.Item).filter(models.Item.code == code)
        item = q.first()

        if new_code is not None:
            item.code = new_code

        if description is not None:
            item.description = description
            item.description_lower = description.lower()

        if long_description is not None:
            item.long_description = long_description

    def get_stock(self, code, as_object=False):
        q = self.session.query(models.Item, models.StockItem).\
          with_entities(models.StockItem).\
          filter(models.Item.code == code).\
          filter(models.Item.id == models.StockItem.item_id)
        res = q.first()
        if res is None:
            return None

        if as_object is True:
            return res
        return (res.id, res.price, res.count)

    @subtransaction
    def add_stock(self, items):

        for item in items:
            q = self.session.query(models.Item).\
              filter(models.Item.code == item['code'])

            citem = q.first()
            stock = models.StockItem(item=citem, count=item['count'],
                                    price=item['price'])
            self.session.add(stock)

    @subtransaction
    def update_stock(self, code, count, price):
        q = self.session.query(models.Item, models.StockItem).\
              with_entities(models.StockItem).\
              join(models.StockItem.item).\
              filter(models.Item.code == code)

        stock = q.first()
        if stock is None:
            q = self.session.query(models.Item).\
              filter(models.Item.code == code)
            item = q.first()
            if item is not None:
                stock = models.StockItem(item=item, count=count, price=price)
                self.session.add(stock)
            else:
                raise KeyError('Unknown item code')
        else:
            stock.count += count
            stock.price = price

    @subtransaction
    def add_items_with_stock(self, items):
        categories = set()
        for item in items:
            for x in item['categories']:
                categories.add(x)
        self.add_categories(categories)

        self.add_items(items)
        self.add_stock(items)

    def list_items(self, sort_key='description',
                   ascending=True, page=1, page_size=10):
        sq = self.session.query(models.Reservation.stock_item_id,
                                func.sum(models.Reservation.count).\
                                label('reserved')).\
                                group_by(models.Reservation.stock_item_id).\
                                subquery()
        q = self.session.query(models.Item, models.Category,
                               models.ItemCategory, models.StockItem,
                               sq.c.reserved).\
          with_entities(models.Item.code, models.Item.description,
                        models.Category.name, models.StockItem.price,
                        models.StockItem.count, sq.c.reserved).\
          join(models.StockItem).\
          join(models.ItemCategory,
               models.Item.id == models.ItemCategory.item_id).\
          join(models.Category,
               models.Category.id == models.ItemCategory.category_id).\
          filter(models.ItemCategory.primary == True).\
          outerjoin(sq, models.StockItem.id == sq.c.stock_item_id)

        q = ordering(q, ascending, sort_key)
        q = pagination(q, page, page_size)

        res = [item_to_json(*x) for x in q]
        return res

    def search_items(self, prefix, price_range, sort_key='description',
                     ascending=True, page=1, page_size=10):
        sq = self.session.query(models.Reservation.stock_item_id,
                                func.sum(models.Reservation.count).\
                                label('reserved')).\
                                group_by(models.Reservation.stock_item_id).\
                                subquery()

        q = self.session.query(models.Item, models.Category,
                               models.ItemCategory, models.StockItem,
                               sq.c.reserved).\
          with_entities(models.Item.code, models.Item.description,
                        models.Category.name, models.StockItem.price,
                        models.StockItem.count,
                        sq.c.reserved).\
          join(models.StockItem).\
          join(models.ItemCategory,
               models.Item.id == models.ItemCategory.item_id).\
          join(models.Category,
               models.Category.id == models.ItemCategory.category_id).\
          filter(models.StockItem.price.between(*price_range),
                 models.Item.description_lower.like('{:s}%'.format(prefix.lower())),
                 models.ItemCategory.primary == True).\
          outerjoin(sq, models.StockItem.id == sq.c.stock_item_id)

        q = ordering(q, ascending, sort_key)
        q = pagination(q, page, page_size)

        res = [item_to_json(*x) for x in q]
        return res

    def list_items_by_prices(self, prices, sort_key='price', prefix=None,
                             ascending=True, page=1, page_size=10):
        pgs = pg_cases(prices)
        pg_case = sqla.case(pgs, else_ = -1).label('price_group')

        sq = self.session.query(models.Reservation.stock_item_id,
                                func.sum(models.Reservation.count).\
                                label('reserved')).\
                                group_by(models.Reservation.stock_item_id).\
                                subquery()

        q = self.session.query(models.Item, models.Category,
                               models.ItemCategory, models.StockItem,
                               sq.c.reserved).\
                               with_entities(pg_case, models.Item.code,
                                             models.Item.description,
                                             models.Category.name,
                                             models.StockItem.price,
                                             models.StockItem.count,
                                             sq.c.reserved)
        if prefix is not None:
            q = q.filter(models.Item.description_lower.like('{:s}%'.format(prefix.lower())))

        q = q.join(models.StockItem.item).\
          outerjoin(sq, models.StockItem.id == sq.c.stock_item_id).\
          filter(models.ItemCategory.item_id == models.Item.id,
                 models.ItemCategory.category_id == models.Category.id,
                 models.ItemCategory.primary == True,
                 pg_case >= 0)

        q = pg_ordering(q, ascending)
        q = pagination(q, page, page_size)
        def to_dict(x):
            tmp = item_to_json(*x[1:])
            tmp.update({'price_group': x[0]})
            return tmp
        res = [to_dict(x) for x in q]
        return res

    def _get_reservations(self, stock_id):
        q = self.session.query(func.sum(models.Reservation.count)).\
          filter(models.StockItem.id == stock_id).\
          filter(models.StockItem.id == models.Reservation.stock_item_id)

        res = q.first()
        if res is None or res[0] is None:
            return 0

        return res[0]

    def _get_reservation(self, basket_item_id):
        q = self.session.query(models.Basket, models.BasketItem,
                               models.Reservation).\
          with_entities(models.Reservation).\
          join(models.Reservation.basket_item).\
          filter(models.BasketItem.id == basket_item_id)
        res = q.first()
        return res

    @subtransaction
    def _update_reservation(self, stock, basket_item):
        reservations = self._get_reservations(basket_item.stock_item_id)
        # can reserve (scount - reservations)
        reservation = self._get_reservation(basket_item.id)

        if reservation is not None:
            rcount = min(basket_item.count,
                         stock.count - reservations + reservation.count)
            reservation.count = rcount
        else:
            rcount = min(basket_item.count, stock.count - reservations)
            reservation = models.Reservation(stock_item=stock,
                                             basket_item=basket_item,
                                             count=rcount)
            self.session.add(reservation)
Exemple #12
0
def reindex(clear=False):
  """
  Reindex all content; optionally clear index before. All is done in a
  single transaction.
  """
  svc = current_app.services['indexing']
  adapted = svc.adapted
  index = svc.app_state.indexes['default']
  session = Session(bind=current_app.db.session.get_bind(None, None),
                    autocommit=True)

  setattr(session, '_model_changes', {}) # please flask-sqlalchemy <= 1.0

  indexed = set()
  cleared = set()

  with AsyncWriter(index) as writer:
    if clear:
      print "*" * 80
      print "WILL CLEAR INDEX BEFORE REINDEXING"
      print "*" * 80
      writer.writer.mergetype = CLEAR

    for cls in sorted(svc.app_state.indexed_classes, key=lambda c: c.__name__):
      current_object_type = cls._object_type()

      if not clear and  current_object_type not in cleared:
        writer.delete_by_term('object_type', current_object_type)
        cleared.add(current_object_type)

      adapter = adapted.get(current_object_type)

      if not adapter or not adapter.indexable:
        continue

      name = cls.__name__

      with session.begin():
        q = session.query(cls).options(sa.orm.lazyload('*'))
        count = q.count()

        if count == 0:
          print "{}: 0".format(name)
          continue

        widgets = [name,
                   ': ', pb.Counter(), '/{}'.format(count),
                   ' ', pb.Timer(),
                   ' ',pb.Percentage(),
                   ' ', pb.Bar(),
                   ' ', pb.ETA(),
                   ]
        progress = pb.ProgressBar(widgets=widgets, maxval=count)
        progress.start()
        count_current = 0

        for obj in q.yield_per(1000):
          if obj.object_type != current_object_type:
            # may happen if obj is a subclass and its parent class is also
            # indexable
            continue

          object_key = obj.object_key

          if object_key in indexed:
            continue
          document = svc.get_document(obj, adapter)
          writer.add_document(**document)
          indexed.add(object_key)
          count_current += 1
          try:
            progress.update(count_current)
          except ValueError:
            pass

        progress.finish()
Exemple #13
0
def reindex(clear=False, progressive=False, batch_size=None):
    """Reindex all content; optionally clear index before.

    All is done in asingle transaction by default.

    :param clear: clear index content.
    :param progressive: don't run in a single transaction.
    :param batch_size: number of documents to process before writing to the
                     index. Unused in single transaction mode. If `None` then
                     all documents of same content type are written at once.
    """
    index_service = get_service("indexing")
    adapted = index_service.adapted
    index = index_service.app_state.indexes["default"]
    session = Session(bind=db.session.get_bind(None, None), autocommit=True)

    session._model_changes = {}  # please flask-sqlalchemy <= 1.0
    indexed = set()
    cleared = set()
    if batch_size is not None:
        batch_size = int(batch_size)

    strategy = progressive_mode if progressive else single_transaction
    strategy = strategy(index,
                        clear=clear,
                        progressive=progressive,
                        batch_size=batch_size)
    next(strategy)  # starts generator

    for cls in sorted(index_service.app_state.indexed_classes,
                      key=lambda c: c.__name__):
        current_object_type = cls._object_type()

        if not clear and current_object_type not in cleared:
            strategy.send(current_object_type)
            cleared.add(current_object_type)

        adapter = adapted.get(current_object_type)

        if not adapter or not adapter.indexable:
            continue

        name = cls.__name__

        with session.begin():
            query = session.query(cls).options(sa.orm.lazyload("*"))
            try:
                count = query.count()
            except Exception as e:
                current_app.logger.error(
                    "Indexing error on class {}: {}".format(name, repr(e)))
                continue

            print("*" * 79)
            print("{}".format(name))
            if count == 0:
                print("*" * 79)
                print("{}".format(name))
                continue

            print("*" * 79)
            print("{}".format(name))
            count_current = 0
            with tqdm(total=count) as bar:
                for obj in query.yield_per(1000):
                    if obj.object_type != current_object_type:
                        # may happen if obj is a subclass and its parent class
                        # is also indexable
                        bar.update()
                        continue

                    object_key = obj.object_key

                    if object_key in indexed:
                        bar.update()
                        continue
                    document = index_service.get_document(obj, adapter)
                    strategy.send(document)
                    indexed.add(object_key)

                    if batch_size is not None and (count_current %
                                                   batch_size) == 0:
                        bar.update()
                        strategy.send(COMMIT)

                    bar.update()

            if batch_size is None:
                strategy.send(COMMIT)

        strategy.send(COMMIT)

    try:
        strategy.send(STOP)
    except StopIteration:
        pass

    try:
        strategy.close()
    except StopIteration:
        pass
Exemple #14
0
# statement being rolled back.
session.rollback()

# commit #####################################################
#      Commit the transaction and expires the state of all
# instances present after the commit is complete..
session.commit()

# close ######################################################
#     Issues a expunge_all(), and releases any
# transactional/connection resources.
session.close()

# transaction demarcation in auto commit mode
session = Session(autocommit=True)
session.begin()
try:
    item1 = session.query(User).get(1)
    item2 = session.query(User).get(2)
    item1.foo = 'bar'
    item2.bar = 'foo'
    session.commit()
except:
    session.rollback()
    raise

with session.begin():
    item1 = session.query(User).get(1)
    item2 = session.query(User).get(2)
    item1.foo = 'bar'
    item2.bar = 'foo'