def _query_server_time(store): # Be careful, this opens up a new connection, queries the server # and closes the connection. That takes ~150ms date = store.execute(Select([StatementTimestamp()])).get_one()[0] # Storm removes tzinfo on it's datetime columns. Do the same here # or the comparison on testTimestamp will fail. return date.replace(tzinfo=None)
def close(self): """Closes the inventory process :raises: :exc:`AssertionError` if the inventory is already closed """ if not self.is_open(): # FIXME: We should be raising a better error here. raise AssertionError("You can not close an inventory which is " "already closed!") for item in self.inventory_items: if (item.actual_quantity is None or item.recorded_quantity == item.actual_quantity): continue # FIXME: We are setting this here because, when generating a # sintegra file, even if this item wasn't really adjusted (e.g. # adjustment_qty bellow is 0) it needs to be specified and not # setting this would result on self.get_cost returning 0. Maybe # we should resolve this in another way # We don't call item.adjust since it needs an invoice number item.is_adjusted = True self.close_date = StatementTimestamp() self.status = Inventory.STATUS_CLOSED
def _on_object_added(self, obj_info): store = obj_info.get("store") self.te = TransactionEntry(store=store, te_time=StatementTimestamp(), user_id=None, station_id=None)
def _update_te(self): user = get_current_user(self.store) station = get_current_station(self.store) self.te.dirty = True self.te.te_time = StatementTimestamp() self.te.user_id = user and user.id self.te.station_id = station and station.id
def _process_pending_objs(self): # Fields to update the transaction entry for modified objs user = get_current_user(self) station = get_current_station(self) station_id = station and station.id te_time = StatementTimestamp() user_id = user and user.id created_objs = set() modified_objs = set() deleted_objs = set() processed_objs = set() while self._need_process_pending(): created_objs.update(*self._created_object_sets) modified_objs.update(*self._modified_object_sets) deleted_objs.update(*self._deleted_object_sets) # Remove already processed objs (can happen when an obj is # added here again when processing the hooks bellow). modified_objs -= processed_objs | created_objs | deleted_objs created_objs -= processed_objs | deleted_objs deleted_objs -= processed_objs # Make sure while will be False on next iteration. Unless any # object is added when processing the hooks bellow. self._reset_pending_objs() for deleted_obj in deleted_objs: deleted_obj.on_delete() processed_objs.add(deleted_obj) for created_obj in created_objs: created_obj.on_create() processed_objs.add(created_obj) for modified_obj in modified_objs: # This is to support migration from domainv1 if hasattr(modified_obj, 'te_modified'): modified_obj.te_modified.te_time = te_time modified_obj.te_modified.station_id = station_id modified_obj.te_modified.user_id = user_id # And also from domainv2 else: modified_obj.te.dirty = True modified_obj.te.te_time = te_time modified_obj.te.station_id = station_id modified_obj.te.user_id = user_id modified_obj.on_update() processed_objs.add(modified_obj) # Invalidate the modified objects in other possible related # transactions autoreload_object(modified_obj)
def _on_object_added(self, obj_info): store = obj_info.get("store") store.block_implicit_flushes() user = get_current_user(store) station = get_current_station(store) store.unblock_implicit_flushes() self.te = TransactionEntry(store=store, te_time=StatementTimestamp(), user_id=user and user.id, station_id=station and station.id) store.add_created_object(self)