async def execute_command(self, connection: 'aiomongo.Connection', generator: Iterable[_Run], write_concern: WriteConcern) -> dict: """Execute using write commands. """ # nModified is only reported for write commands, not legacy ops. full_result = { 'writeErrors': [], 'writeConcernErrors': [], 'nInserted': 0, 'nUpserted': 0, 'nMatched': 0, 'nModified': 0, 'nRemoved': 0, 'upserted': [], } for run in generator: cmd = SON([(_COMMANDS[run.op_type], self.collection.name), ('ordered', self.ordered)]) if write_concern.document: cmd['writeConcern'] = write_concern.document if self.bypass_doc_val and connection.max_wire_version >= 4: cmd['bypassDocumentValidation'] = True results = await self._do_batched_write_command( self.namespace, run.op_type, cmd, run.ops, True, self.collection.codec_options, connection) _merge_command(run, full_result, results) # We're supposed to continue if errors are # at the write concern level (e.g. wtimeout) if self.ordered and full_result['writeErrors']: break if full_result['writeErrors'] or full_result['writeConcernErrors']: if full_result['writeErrors']: full_result['writeErrors'].sort( key=lambda error: error['index']) raise BulkWriteError(full_result) return full_result
def _execute_bulk(self, bulk): if not bulk.ops: raise InvalidOperation("No operations to execute") if bulk.executed: raise InvalidOperation("Bulk operations can only be executed once") bulk.executed = True if bulk.ordered: generator = bulk.gen_ordered() else: generator = bulk.gen_unordered() full_result = { "writeErrors": [], "writeConcernErrors": [], "nInserted": 0, "nUpserted": 0, "nMatched": 0, "nModified": 0, "nRemoved": 0, "upserted": [], } for run in generator: result = yield self._execute_batch_command(run.op_type, run.ops, bulk.ordered) _merge_command(run, full_result, result) if bulk.ordered and full_result["writeErrors"]: break if self.write_concern.acknowledged: if full_result["writeErrors"] or full_result["writeConcernErrors"]: if full_result["writeErrors"]: full_result["writeErrors"].sort( key=lambda error: error["index"]) raise BulkWriteError(full_result) defer.returnValue( BulkWriteResult(full_result, self.write_concern.acknowledged))
def _execute_bulk(self, bulk): if not bulk.ops: raise InvalidOperation("No operations to execute") if bulk.executed: raise InvalidOperation("Bulk operations can only be executed once") bulk.executed = True if bulk.ordered: generator = bulk.gen_ordered() else: generator = bulk.gen_unordered() full_result = { "writeErrors": [], "writeConcernErrors": [], "nInserted": 0, "nUpserted": 0, "nMatched": 0, "nModified": 0, "nRemoved": 0, "upserted": [], } for run in generator: result = yield self._execute_batch_command(run.op_type, run.ops, bulk.ordered) _merge_command(run, full_result, result) if bulk.ordered and full_result["writeErrors"]: break if self.write_concern.acknowledged: if full_result["writeErrors"] or full_result["writeConcernErrors"]: if full_result["writeErrors"]: full_result["writeErrors"].sort(key=lambda error: error["index"]) raise BulkWriteError(full_result) defer.returnValue(BulkWriteResult(full_result, self.write_concern.acknowledged))