def insert(collection_name, docs, check_keys, safe, last_error_args, continue_on_error, uuid_subtype): """Get an **insert** message. .. note:: As of PyMongo 2.6, this function is no longer used. It is being kept (with tests) for backwards compatibility with 3rd party libraries that may currently be using it, but will likely be removed in a future release. """ options = 0 if continue_on_error: options += 1 data = struct.pack("<i", options) data += bson._make_c_string(collection_name) encoded = [bson.BSON.encode(doc, check_keys, uuid_subtype) for doc in docs] if not encoded: raise InvalidOperation("cannot do an empty bulk insert") max_bson_size = max(list(map(len, encoded))) data += EMPTY.join(encoded) if safe: (_, insert_message) = __pack_message(2002, data) (request_id, error_message, _) = __last_error(collection_name, last_error_args) return (request_id, insert_message + error_message, max_bson_size) else: (request_id, insert_message) = __pack_message(2002, data) return (request_id, insert_message, max_bson_size)
def get_more(collection_name, num_to_return, cursor_id): """Get a **getMore** message. """ data = __ZERO data += bson._make_c_string(collection_name) data += struct.pack("<i", num_to_return) data += struct.pack("<q", cursor_id) return __pack_message(2005, data)
def send_message(self, operation, collection, message, query_opts=_ZERO): #print "sending %d to %s" % (operation, self) fullname = collection and bson._make_c_string(collection) or b"" message = query_opts + fullname + message # 16 is the size of the header in bytes header = struct.pack("<iiii", 16 + len(message), self.__id, 0, operation) self.transport.write(header + message) self.__id += 1
def delete(collection_name, spec, safe, last_error_args, uuid_subtype): """Get a **delete** message. """ data = __ZERO data += bson._make_c_string(collection_name) data += __ZERO encoded = bson.BSON.encode(spec, False, uuid_subtype) data += encoded if safe: (_, remove_message) = __pack_message(2006, data) (request_id, error_message, _) = __last_error(collection_name, last_error_args) return (request_id, remove_message + error_message, len(encoded)) else: (request_id, remove_message) = __pack_message(2006, data) return (request_id, remove_message, len(encoded))
def query(options, collection_name, num_to_skip, num_to_return, query, field_selector=None, uuid_subtype=OLD_UUID_SUBTYPE): """Get a **query** message. """ data = struct.pack("<I", options) data += bson._make_c_string(collection_name) data += struct.pack("<i", num_to_skip) data += struct.pack("<i", num_to_return) encoded = bson.BSON.encode(query, False, uuid_subtype) data += encoded max_bson_size = len(encoded) if field_selector is not None: encoded = bson.BSON.encode(field_selector, False, uuid_subtype) data += encoded max_bson_size = max(len(encoded), max_bson_size) (request_id, query_message) = __pack_message(2004, data) return (request_id, query_message, max_bson_size)
def update(collection_name, upsert, multi, spec, doc, safe, last_error_args, check_keys, uuid_subtype): """Get an **update** message. """ options = 0 if upsert: options += 1 if multi: options += 2 data = __ZERO data += bson._make_c_string(collection_name) data += struct.pack("<i", options) data += bson.BSON.encode(spec, False, uuid_subtype) encoded = bson.BSON.encode(doc, check_keys, uuid_subtype) data += encoded if safe: (_, update_message) = __pack_message(2001, data) (request_id, error_message, _) = __last_error(collection_name, last_error_args) return (request_id, update_message + error_message, len(encoded)) else: (request_id, update_message) = __pack_message(2001, data) return (request_id, update_message, len(encoded))
def _do_batched_insert(collection_name, docs, check_keys, safe, last_error_args, continue_on_error, uuid_subtype, client): """Insert `docs` using multiple batches. """ def _insert_message(insert_message, send_safe): """Build the insert message with header and GLE. """ request_id, final_message = __pack_message(2002, insert_message) if send_safe: request_id, error_message, _ = __last_error( collection_name, last_error_args) final_message += error_message return request_id, final_message if not docs: raise InvalidOperation("cannot do an empty bulk insert") last_error = None begin = struct.pack("<i", int(continue_on_error)) begin += bson._make_c_string(collection_name) message_length = len(begin) data = [begin] for doc in docs: encoded = bson.BSON.encode(doc, check_keys, uuid_subtype) encoded_length = len(encoded) if encoded_length > client.max_bson_size: raise InvalidDocument("BSON document too large (%d bytes)" " - the connected server supports" " BSON document sizes up to %d" " bytes." % (encoded_length, client.max_bson_size)) message_length += encoded_length if message_length < client.max_message_size: data.append(encoded) continue # We have enough data, send this message. send_safe = safe or not continue_on_error try: client._send_message(_insert_message(EMPTY.join(data), send_safe), send_safe) # Exception type could be OperationFailure or a subtype # (e.g. DuplicateKeyError) except OperationFailure as exc: # Like it says, continue on error... if continue_on_error: # Store exception details to re-raise after the final batch. last_error = exc # With unacknowledged writes just return at the first error. elif not safe: return # With acknowledged writes raise immediately. else: raise message_length = len(begin) + encoded_length data = [begin, encoded] client._send_message(_insert_message(EMPTY.join(data), safe), safe) # Re-raise any exception stored due to continue_on_error if last_error is not None: raise last_error
def _do_batched_insert(collection_name, docs, check_keys, safe, last_error_args, continue_on_error, uuid_subtype, client): """Insert `docs` using multiple batches. """ def _insert_message(insert_message, send_safe): """Build the insert message with header and GLE. """ request_id, final_message = __pack_message(2002, insert_message) if send_safe: request_id, error_message, _ = __last_error(collection_name, last_error_args) final_message += error_message return request_id, final_message if not docs: raise InvalidOperation("cannot do an empty bulk insert") last_error = None begin = struct.pack("<i", int(continue_on_error)) begin += bson._make_c_string(collection_name) message_length = len(begin) data = [begin] for doc in docs: encoded = bson.BSON.encode(doc, check_keys, uuid_subtype) encoded_length = len(encoded) if encoded_length > client.max_bson_size: raise InvalidDocument("BSON document too large (%d bytes)" " - the connected server supports" " BSON document sizes up to %d" " bytes." % (encoded_length, client.max_bson_size)) message_length += encoded_length if message_length < client.max_message_size: data.append(encoded) continue # We have enough data, send this message. send_safe = safe or not continue_on_error try: client._send_message(_insert_message(EMPTY.join(data), send_safe), send_safe) # Exception type could be OperationFailure or a subtype # (e.g. DuplicateKeyError) except OperationFailure as exc: # Like it says, continue on error... if continue_on_error: # Store exception details to re-raise after the final batch. last_error = exc # With unacknowledged writes just return at the first error. elif not safe: return # With acknowledged writes raise immediately. else: raise message_length = len(begin) + encoded_length data = [begin, encoded] client._send_message(_insert_message(EMPTY.join(data), safe), safe) # Re-raise any exception stored due to continue_on_error if last_error is not None: raise last_error