From 476a692d667f24057e8193b07392c359aec2016f Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Fri, 1 Mar 2024 11:48:22 -0500 Subject: [PATCH 01/39] convert find --- src/action.rs | 17 +++--- src/action/find.rs | 105 +++++++++++++++++++++++++++++++++++++ src/operation/find.rs | 27 ++-------- src/operation/find/test.rs | 80 ---------------------------- 4 files changed, 120 insertions(+), 109 deletions(-) create mode 100644 src/action/find.rs delete mode 100644 src/operation/find/test.rs diff --git a/src/action.rs b/src/action.rs index 81fa9866e..99452e605 100644 --- a/src/action.rs +++ b/src/action.rs @@ -10,6 +10,7 @@ mod delete; mod distinct; mod drop; mod drop_index; +mod find; mod list_collections; mod list_databases; mod list_indexes; @@ -147,7 +148,7 @@ macro_rules! action_impl { ) => { impl$(<$lt $(, $($at),+)?>)? std::future::IntoFuture for $action { type Output = $out; - type IntoFuture = $f_ty$(<$lt>)?; + type IntoFuture = $f_ty$(<$lt $(, $($at)+)?>)?; fn into_future($($args)+) -> Self::IntoFuture { $f_ty(Box::pin(async move { @@ -160,9 +161,9 @@ macro_rules! action_impl { type Output = $out; } - crate::action::action_impl_future_wrapper!($($lt)?, $f_ty, $out); + crate::action::action_impl_future_wrapper!($f_ty, $out $(, $lt)? $($(, $($at),+)?)?); - impl$(<$lt>)? std::future::Future for $f_ty$(<$lt>)? { + impl$(<$lt $(, $($at),+)?>)? std::future::Future for $f_ty$(<$lt $(, $($at),+)?>)? { type Output = $out; fn poll(mut self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll { @@ -175,7 +176,7 @@ macro_rules! action_impl { /// Synchronously execute this action. pub fn run(self) -> $sync_out { let $($wrap_args)+ = crate::sync::TOKIO_RUNTIME.block_on(std::future::IntoFuture::into_future(self)); - return $wrap_code + $wrap_code } } } @@ -183,14 +184,18 @@ macro_rules! action_impl { pub(crate) use action_impl; macro_rules! action_impl_future_wrapper { - (, $f_ty:ident, $out:ty) => { + ($f_ty:ident, $out:ty) => { /// Opaque future type for action execution. pub struct $f_ty(crate::BoxFuture<'static, $out>); }; - ($lt:lifetime, $f_ty:ident, $out:ty) => { + ($f_ty:ident, $out:ty, $lt:lifetime) => { /// Opaque future type for action execution. pub struct $f_ty<$lt>(crate::BoxFuture<$lt, $out>); }; + ($f_ty:ident, $out:ty, $lt:lifetime, $($at:ident),+) => { + /// Opaque future type for action execution. + pub struct $f_ty<$lt, $($at),+>(crate::BoxFuture<$lt, $out>); + }; } pub(crate) use action_impl_future_wrapper; diff --git a/src/action/find.rs b/src/action/find.rs new file mode 100644 index 000000000..56d472cfb --- /dev/null +++ b/src/action/find.rs @@ -0,0 +1,105 @@ +use std::time::Duration; + +use bson::{Bson, Document}; + +use crate::{ + coll::options::{CursorType, FindOptions, Hint}, + collation::Collation, + error::Result, + operation::Find as Op, + options::ReadConcern, + selection_criteria::SelectionCriteria, + ClientSession, + Collection, + Cursor, +}; + +use super::{action_impl, option_setters, ExplicitSession, ImplicitSession}; + +impl Collection { + /// Finds the documents in the collection matching `filter`. + pub fn find_2(&self, filter: Document) -> Find<'_, T> { + Find { + coll: self, + filter, + options: None, + session: ImplicitSession, + } + } +} + +#[cfg(feature = "sync")] +impl crate::sync::Collection { + /// Finds the documents in the collection matching `filter`. + pub fn find_2(&self, filter: Document) -> Find<'_, T> { + self.async_collection.find_2(filter) + } +} + +#[must_use] +pub struct Find<'a, T, Session = ImplicitSession> { + coll: &'a Collection, + filter: Document, + options: Option, + session: Session, +} + +impl<'a, T, Session> Find<'a, T, Session> { + option_setters!(options: FindOptions; + allow_disk_use: bool, + allow_partial_results: bool, + batch_size: u32, + comment: String, + comment_bson: Bson, + cursor_type: CursorType, + hint: Hint, + limit: i64, + max: Document, + max_await_time: Duration, + max_scan: u64, + max_time: Duration, + min: Document, + no_cursor_timeout: bool, + projection: Document, + read_concern: ReadConcern, + return_key: bool, + selection_criteria: SelectionCriteria, + show_record_id: bool, + skip: u64, + sort: Document, + collation: Collation, + let_vars: Document, + ); +} + +impl<'a, T> Find<'a, T, ImplicitSession> { + /// Runs the query using the provided session. + pub fn session<'s>( + self, + value: impl Into<&'s mut ClientSession>, + ) -> Find<'a, T, ExplicitSession<'s>> { + Find { + coll: self.coll, + filter: self.filter, + options: self.options, + session: ExplicitSession(value.into()), + } + } +} + +action_impl! { + impl<'a, T> Action for Find<'a, T, ImplicitSession> { + type Future = FindFuture; + + async fn execute(mut self) -> Result> { + resolve_options!(self.coll, self.options, [read_concern, selection_criteria]); + + let find = Op::new(self.coll.namespace(), self.filter, self.options); + self.coll.client().execute_cursor_operation(find).await + } + + fn sync_wrap(out) -> Result> { + out.map(crate::sync::Cursor::new) + } + } +} diff --git a/src/operation/find.rs b/src/operation/find.rs index 947fc23cc..fdd08a49f 100644 --- a/src/operation/find.rs +++ b/src/operation/find.rs @@ -1,6 +1,3 @@ -#[cfg(test)] -mod test; - use crate::{ bson::{doc, Document}, cmap::{Command, RawCommandResponse, StreamDescription}, @@ -20,28 +17,12 @@ use crate::{ #[derive(Debug)] pub(crate) struct Find { ns: Namespace, - filter: Option, + filter: Document, options: Option>, } impl Find { - #[cfg(test)] - fn empty() -> Self { - Self::new( - Namespace { - db: String::new(), - coll: String::new(), - }, - None, - None, - ) - } - - pub(crate) fn new( - ns: Namespace, - filter: Option, - mut options: Option, - ) -> Self { + pub(crate) fn new(ns: Namespace, filter: Document, mut options: Option) -> Self { if let Some(ref mut options) = options { if let Some(ref comment) = options.comment { if options.comment_bson.is_none() { @@ -99,8 +80,8 @@ impl OperationWithDefaults for Find { append_options(&mut body, self.options.as_ref())?; - if let Some(ref filter) = self.filter { - body.insert("filter", filter.clone()); + if !self.filter.is_empty() { + body.insert("filter", self.filter.clone()); } Ok(Command::new_read( diff --git a/src/operation/find/test.rs b/src/operation/find/test.rs deleted file mode 100644 index 57bb6dd28..000000000 --- a/src/operation/find/test.rs +++ /dev/null @@ -1,80 +0,0 @@ -use std::time::Duration; - -use crate::{ - bson::doc, - operation::{ - test::{self, handle_response_test}, - Find, - }, - options::{CursorType, FindOptions}, - Namespace, -}; - -#[test] -fn op_selection_criteria() { - test::op_selection_criteria(|selection_criteria| { - let options = FindOptions { - selection_criteria, - ..Default::default() - }; - Find::new(Namespace::empty(), None, Some(options)) - }); -} - -fn verify_max_await_time(max_await_time: Option, cursor_type: Option) { - let ns = Namespace::empty(); - let find = Find::new( - ns, - None, - Some(FindOptions { - cursor_type, - max_await_time, - ..Default::default() - }), - ); - - let spec = handle_response_test( - &find, - doc! { - "cursor": { - "id": 123, - "ns": "a.b", - "firstBatch": [], - }, - "ok": 1 - }, - ) - .unwrap(); - assert_eq!(spec.max_time(), max_await_time); -} - -#[test] -fn handle_max_await_time() { - verify_max_await_time(None, None); - verify_max_await_time(Some(Duration::from_millis(5)), None); - verify_max_await_time( - Some(Duration::from_millis(5)), - Some(CursorType::NonTailable), - ); - verify_max_await_time(Some(Duration::from_millis(5)), Some(CursorType::Tailable)); - verify_max_await_time( - Some(Duration::from_millis(5)), - Some(CursorType::TailableAwait), - ); -} - -#[test] -fn handle_invalid_response() { - let find = Find::empty(); - - let garbled = doc! { "asdfasf": "ASdfasdf" }; - handle_response_test(&find, garbled).unwrap_err(); - - let missing_cursor_field = doc! { - "cursor": { - "ns": "test.test", - "firstBatch": [], - } - }; - handle_response_test(&find, missing_cursor_field).unwrap_err(); -} From e98a589625c82f762decb5c9fb32325f73fe4a09 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Fri, 1 Mar 2024 13:23:10 -0500 Subject: [PATCH 02/39] update find callers --- src/action.rs | 1 + src/action/find.rs | 26 +- src/client/csfle/client_encryption.rs | 2 +- src/client/csfle/state_machine.rs | 9 +- src/client/session/test.rs | 13 +- src/client/session/test/causal_consistency.rs | 8 +- src/coll.rs | 45 +- src/gridfs.rs | 2 +- src/gridfs/download.rs | 12 +- .../server_selection/test/in_window.rs | 7 +- src/sync/coll.rs | 30 +- src/sync/test.rs | 18 +- src/test/client.rs | 11 +- src/test/coll.rs | 30 +- src/test/csfle.rs | 22 +- src/test/cursor.rs | 41 +- src/test/documentation_examples.rs | 461 +++++++----------- src/test/spec/connection_stepdown.rs | 7 +- src/test/spec/crud_v1.rs | 5 +- src/test/spec/crud_v1/find.rs | 3 +- src/test/spec/retryable_reads.rs | 4 +- src/test/spec/retryable_writes.rs | 8 +- src/test/spec/sdam.rs | 2 +- src/test/spec/sessions.rs | 4 +- src/test/spec/trace.rs | 7 +- src/test/spec/unified_runner/operation.rs | 7 +- src/test/spec/unified_runner/test_runner.rs | 5 +- src/test/spec/v2_runner/operation.rs | 11 +- src/test/spec/v2_runner/test_file.rs | 8 +- tests/readme_examples.rs | 7 +- 30 files changed, 316 insertions(+), 500 deletions(-) diff --git a/src/action.rs b/src/action.rs index 99452e605..d3d930db1 100644 --- a/src/action.rs +++ b/src/action.rs @@ -32,6 +32,7 @@ pub use delete::Delete; pub use distinct::Distinct; pub use drop::{DropCollection, DropDatabase}; pub use drop_index::DropIndex; +pub use find::Find; pub use list_collections::ListCollections; pub use list_databases::ListDatabases; pub use list_indexes::ListIndexes; diff --git a/src/action/find.rs b/src/action/find.rs index 56d472cfb..edb498542 100644 --- a/src/action/find.rs +++ b/src/action/find.rs @@ -12,13 +12,14 @@ use crate::{ ClientSession, Collection, Cursor, + SessionCursor, }; use super::{action_impl, option_setters, ExplicitSession, ImplicitSession}; impl Collection { /// Finds the documents in the collection matching `filter`. - pub fn find_2(&self, filter: Document) -> Find<'_, T> { + pub fn find(&self, filter: Document) -> Find<'_, T> { Find { coll: self, filter, @@ -31,11 +32,12 @@ impl Collection { #[cfg(feature = "sync")] impl crate::sync::Collection { /// Finds the documents in the collection matching `filter`. - pub fn find_2(&self, filter: Document) -> Find<'_, T> { - self.async_collection.find_2(filter) + pub fn find(&self, filter: Document) -> Find<'_, T> { + self.async_collection.find(filter) } } +/// Finds the documents in a collection matching a filter. Construct with [`Collection::find`]. #[must_use] pub struct Find<'a, T, Session = ImplicitSession> { coll: &'a Collection, @@ -103,3 +105,21 @@ action_impl! { } } } + +action_impl! { + impl<'a, T> Action for Find<'a, T, ExplicitSession<'a>> { + type Future = FindSessionFuture; + + async fn execute(mut self) -> Result> { + resolve_read_concern_with_session!(self.coll, self.options, Some(&mut *self.session.0))?; + resolve_selection_criteria_with_session!(self.coll, self.options, Some(&mut *self.session.0))?; + + let find = Op::new(self.coll.namespace(), self.filter, self.options); + self.coll.client().execute_session_cursor_operation(find, self.session.0).await + } + + fn sync_wrap(out) -> Result> { + out.map(crate::sync::SessionCursor::new) + } + } +} diff --git a/src/client/csfle/client_encryption.rs b/src/client/csfle/client_encryption.rs index 5b6c0c045..cbbd33128 100644 --- a/src/client/csfle/client_encryption.rs +++ b/src/client/csfle/client_encryption.rs @@ -105,7 +105,7 @@ impl ClientEncryption { /// Finds all documents in the key vault collection. /// Returns the result of the internal find() operation on the key vault collection. pub async fn get_keys(&self) -> Result> { - self.key_vault.find(doc! {}, None).await + self.key_vault.find(doc! {}).await } /// Adds a keyAltName to the keyAltNames array of the key document in the key vault collection diff --git a/src/client/csfle/state_machine.rs b/src/client/csfle/state_machine.rs index 13f75cbd9..bc7aa5f05 100644 --- a/src/client/csfle/state_machine.rs +++ b/src/client/csfle/state_machine.rs @@ -15,7 +15,6 @@ use tokio::{ use crate::{ client::{options::ServerAddress, WeakClient}, - coll::options::FindOptions, error::{Error, Result}, operation::{run_command::RunCommand, RawOutput}, options::ReadConcern, @@ -164,12 +163,8 @@ impl CryptExecutor { .database(&kv_ns.db) .collection::(&kv_ns.coll); let mut cursor = kv_coll - .find( - filter, - FindOptions::builder() - .read_concern(ReadConcern::majority()) - .build(), - ) + .find(filter) + .read_concern(ReadConcern::majority()) .await?; while cursor.advance().await? { ctx.mongo_feed(cursor.current())?; diff --git a/src/client/session/test.rs b/src/client/session/test.rs index 62fa47ad1..358b5b66e 100644 --- a/src/client/session/test.rs +++ b/src/client/session/test.rs @@ -145,7 +145,7 @@ macro_rules! for_each_op { .await; $test_func( "find", - collection_op!($test_name, coll, coll.find(doc! { "x": 1 }, None)), + collection_op!($test_name, coll, coll.find(doc! { "x": 1 })), ) .await; $test_func( @@ -362,7 +362,7 @@ async fn cluster_time_in_commands() { client .database(function_name!()) .collection::(function_name!()) - .find(doc! {}, None) + .find(doc! {}) .await }) .await; @@ -424,7 +424,7 @@ async fn implicit_session_returned_after_immediate_exhaust() { tokio::time::sleep(Duration::from_millis(250)).await; client.clear_session_pool().await; - let mut cursor = coll.find(doc! {}, None).await.expect("find should succeed"); + let mut cursor = coll.find(doc! {}).await.expect("find should succeed"); assert!(matches!(cursor.next().await, Some(Ok(_)))); let (find_started, _) = client.get_successful_command_execution("find"); @@ -466,9 +466,9 @@ async fn implicit_session_returned_after_exhaust_by_get_more() { tokio::time::sleep(Duration::from_millis(250)).await; client.clear_session_pool().await; - let options = FindOptions::builder().batch_size(3).build(); let mut cursor = coll - .find(doc! {}, options) + .find(doc! {}) + .batch_size(3) .await .expect("find should succeed"); @@ -545,7 +545,8 @@ async fn find_and_getmore_share_session() { let mut cursor; loop { cursor = coll - .find(doc! {}, options.clone()) + .find(doc! {}) + .with_options(options.clone()) .await .expect("find should succeed"); if cursor.has_next() { diff --git a/src/client/session/test/causal_consistency.rs b/src/client/session/test/causal_consistency.rs index 2b51f12f6..89e9b28cf 100644 --- a/src/client/session/test/causal_consistency.rs +++ b/src/client/session/test/causal_consistency.rs @@ -55,11 +55,9 @@ fn all_session_ops() -> impl Iterator { ops.push(op!("find", true, |coll, session| coll .find_one_with_session(doc! { "x": 1 }, None, session))); - ops.push(op!("find", true, |coll, session| coll.find_with_session( - doc! { "x": 1 }, - None, - session - ))); + ops.push(op!("find", true, |coll, session| coll + .find(doc! { "x": 1 }) + .session(session))); ops.push(op!("update", false, |coll, s| coll .update_one(doc! { "x": 1 }, doc! { "$inc": { "x": 1 } },) diff --git a/src/coll.rs b/src/coll.rs index 634ffa5c9..7b6926de8 100644 --- a/src/coll.rs +++ b/src/coll.rs @@ -18,14 +18,12 @@ use crate::{ cmap::conn::PinnedConnectionHandle, concern::{ReadConcern, WriteConcern}, error::{convert_bulk_errors, BulkWriteError, BulkWriteFailure, Error, ErrorKind, Result}, - operation::{Find, FindAndModify, Insert, Update}, + operation::{FindAndModify, Insert, Update}, results::{InsertManyResult, InsertOneResult, UpdateResult}, selection_criteria::SelectionCriteria, Client, ClientSession, - Cursor, Database, - SessionCursor, }; /// `Collection` is the client-side abstraction of a MongoDB Collection. It can be used to @@ -225,38 +223,6 @@ where Ok(()) } - /// Finds the documents in the collection matching `filter`. - pub async fn find( - &self, - filter: impl Into>, - options: impl Into>, - ) -> Result> { - let mut options = options.into(); - resolve_options!(self, options, [read_concern, selection_criteria]); - - let find = Find::new(self.namespace(), filter.into(), options); - let client = self.client(); - - client.execute_cursor_operation(find).await - } - - /// Finds the documents in the collection matching `filter` using the provided `ClientSession`. - pub async fn find_with_session( - &self, - filter: impl Into>, - options: impl Into>, - session: &mut ClientSession, - ) -> Result> { - let mut options = options.into(); - resolve_read_concern_with_session!(self, options, Some(&mut *session))?; - resolve_selection_criteria_with_session!(self, options, Some(&mut *session))?; - - let find = Find::new(self.namespace(), filter.into(), options); - let client = self.client(); - - client.execute_session_cursor_operation(find, session).await - } - pub(crate) fn human_readable_serialization(&self) -> bool { self.inner.human_readable_serialization } @@ -276,7 +242,10 @@ where resolve_options!(self, options, [read_concern, selection_criteria]); let options: FindOptions = options.map(Into::into).unwrap_or_else(Default::default); - let mut cursor = self.find(filter, Some(options)).await?; + let mut cursor = self + .find(filter.into().unwrap_or_default()) + .with_options(options) + .await?; cursor.next().await.transpose() } @@ -294,7 +263,9 @@ where let options: FindOptions = options.map(Into::into).unwrap_or_else(Default::default); let mut cursor = self - .find_with_session(filter, Some(options), session) + .find(filter.into().unwrap_or_default()) + .with_options(options) + .session(&mut *session) .await?; let mut cursor = cursor.stream(session); cursor.next().await.transpose() diff --git a/src/gridfs.rs b/src/gridfs.rs index 9c45d886f..139e05bbe 100644 --- a/src/gridfs.rs +++ b/src/gridfs.rs @@ -234,7 +234,7 @@ impl GridFsBucket { options: impl Into>, ) -> Result> { let find_options = options.into().map(FindOptions::from); - self.files().find(filter, find_options).await + self.files().find(filter).with_options(find_options).await } /// Finds and returns a single [`FilesCollectionDocument`] within this bucket that matches the diff --git a/src/gridfs/download.rs b/src/gridfs/download.rs index ef23228f6..0aa46f689 100644 --- a/src/gridfs/download.rs +++ b/src/gridfs/download.rs @@ -14,7 +14,7 @@ use super::{options::GridFsDownloadByNameOptions, Chunk, FilesCollectionDocument use crate::{ bson::{doc, Bson}, error::{ErrorKind, GridFsErrorKind, GridFsFileIdentifier, Result}, - options::{FindOneOptions, FindOptions}, + options::FindOneOptions, Collection, Cursor, }; @@ -161,10 +161,10 @@ impl GridFsBucket { return Ok(()); } - let options = FindOptions::builder().sort(doc! { "n": 1 }).build(); let mut cursor = self .chunks() - .find(doc! { "files_id": &file.id }, options) + .find(doc! { "files_id": &file.id }) + .sort(doc! { "n": 1 }) .await?; let mut n = 0; @@ -272,8 +272,10 @@ impl GridFsDownloadStream { let initial_state = if file.length == 0 { State::Done } else { - let options = FindOptions::builder().sort(doc! { "n": 1 }).build(); - let cursor = chunks.find(doc! { "files_id": &file.id }, options).await?; + let cursor = chunks + .find(doc! { "files_id": &file.id }) + .sort(doc! { "n": 1 }) + .await?; State::Idle(Some(Idle { buffer: Vec::new(), cursor: Box::new(cursor), diff --git a/src/sdam/description/topology/server_selection/test/in_window.rs b/src/sdam/description/topology/server_selection/test/in_window.rs index b62d604cb..de025d8a8 100644 --- a/src/sdam/description/topology/server_selection/test/in_window.rs +++ b/src/sdam/description/topology/server_selection/test/in_window.rs @@ -7,7 +7,6 @@ use serde::Deserialize; use crate::{ cmap::DEFAULT_MAX_POOL_SIZE, - coll::options::FindOptions, error::Result, event::cmap::CmapEvent, options::ServerAddress, @@ -231,13 +230,11 @@ async fn load_balancing_test() { let client = client.clone(); let selector = selector.clone(); runtime::spawn(async move { - let options = FindOptions::builder() - .selection_criteria(SelectionCriteria::Predicate(selector)) - .build(); client .database("load_balancing_test") .collection::("load_balancing_test") - .find(doc! { "$where": "sleep(500) && true" }, options) + .find(doc! { "$where": "sleep(500) && true" }) + .selection_criteria(SelectionCriteria::Predicate(selector)) .await .unwrap(); }); diff --git a/src/sync/coll.rs b/src/sync/coll.rs index 6ca866f3f..f803d2e9a 100644 --- a/src/sync/coll.rs +++ b/src/sync/coll.rs @@ -2,7 +2,7 @@ use std::{borrow::Borrow, fmt::Debug}; use serde::{de::DeserializeOwned, Serialize}; -use super::{ClientSession, Cursor, SessionCursor}; +use super::ClientSession; use crate::{ bson::Document, error::Result, @@ -11,7 +11,6 @@ use crate::{ FindOneAndReplaceOptions, FindOneAndUpdateOptions, FindOneOptions, - FindOptions, InsertManyOptions, InsertOneOptions, ReadConcern, @@ -111,33 +110,6 @@ where pub fn write_concern(&self) -> Option<&WriteConcern> { self.async_collection.write_concern() } - - /// Finds the documents in the collection matching `filter`. - pub fn find( - &self, - filter: impl Into>, - options: impl Into>, - ) -> Result> { - crate::sync::TOKIO_RUNTIME - .block_on(self.async_collection.find(filter.into(), options.into())) - .map(Cursor::new) - } - - /// Finds the documents in the collection matching `filter` using the provided `ClientSession`. - pub fn find_with_session( - &self, - filter: impl Into>, - options: impl Into>, - session: &mut ClientSession, - ) -> Result> { - crate::sync::TOKIO_RUNTIME - .block_on(self.async_collection.find_with_session( - filter.into(), - options.into(), - &mut session.async_client_session, - )) - .map(SessionCursor::new) - } } impl Collection diff --git a/src/sync/test.rs b/src/sync/test.rs index 129345592..da202a57a 100644 --- a/src/sync/test.rs +++ b/src/sync/test.rs @@ -162,9 +162,10 @@ fn collection() { coll.insert_one(doc! { "x": 1 }, None) .expect("insert should succeed"); - let find_options = FindOptions::builder().projection(doc! { "_id": 0 }).build(); let cursor = coll - .find(doc! { "x": 1 }, find_options) + .find(doc! { "x": 1 }) + .projection(doc! { "_id": 0 }) + .run() .expect("find should succeed"); let results = cursor .collect::>>() @@ -383,7 +384,11 @@ fn borrowed_deserialization() { .sort(doc! { "_id": 1 }) .build(); - let mut cursor = coll.find(None, options.clone()).unwrap(); + let mut cursor = coll + .find(doc! {}) + .with_options(options.clone()) + .run() + .unwrap(); let mut i = 0; while cursor.advance().unwrap() { @@ -393,7 +398,12 @@ fn borrowed_deserialization() { } let mut session = client.start_session().run().unwrap(); - let mut cursor = coll.find_with_session(None, options, &mut session).unwrap(); + let mut cursor = coll + .find(doc! {}) + .with_options(options) + .session(&mut session) + .run() + .unwrap(); let mut i = 0; while cursor.advance(&mut session).unwrap() { diff --git a/src/test/client.rs b/src/test/client.rs index 4ffb1e54a..dbf634170 100644 --- a/src/test/client.rs +++ b/src/test/client.rs @@ -5,7 +5,6 @@ use serde::{Deserialize, Serialize}; use crate::{ bson::{doc, Bson}, - coll::options::FindOptions, error::{CommandError, Error, ErrorKind}, event::{cmap::CmapEvent, sdam::SdamEvent}, hello::LEGACY_HELLO_COMMAND_NAME, @@ -816,10 +815,7 @@ async fn manual_shutdown_with_resources() { // Scope to force drop of resources { // Exhausted cursors don't need cleanup, so make sure there's more than one batch to fetch - let _cursor = coll - .find(None, FindOptions::builder().batch_size(1).build()) - .await - .unwrap(); + let _cursor = coll.find(doc! {}).batch_size(1).await.unwrap(); // Similarly, sessions need an in-progress transaction to have cleanup. let mut session = client.start_session().await.unwrap(); if session.start_transaction(None).await.is_err() { @@ -880,10 +876,7 @@ async fn manual_shutdown_immediate_with_resources() { // Resources are scoped to past the `shutdown_immediate`. // Exhausted cursors don't need cleanup, so make sure there's more than one batch to fetch - let _cursor = coll - .find(None, FindOptions::builder().batch_size(1).build()) - .await - .unwrap(); + let _cursor = coll.find(doc! {}).batch_size(1).await.unwrap(); // Similarly, sessions need an in-progress transaction to have cleanup. let mut session = client.start_session().await.unwrap(); session.start_transaction(None).await.unwrap(); diff --git a/src/test/coll.rs b/src/test/coll.rs index 9aeb871af..63a757dd5 100644 --- a/src/test/coll.rs +++ b/src/test/coll.rs @@ -131,7 +131,7 @@ async fn find() { .unwrap(); assert_eq!(result.inserted_ids.len(), 5); - let mut cursor = coll.find(None, None).await.unwrap().enumerate(); + let mut cursor = coll.find(doc! {}).await.unwrap().enumerate(); while let Some((i, result)) = cursor.next().await { let doc = result.unwrap(); @@ -271,10 +271,7 @@ async fn kill_cursors_on_drop() { .database(function_name!()) .collection::(function_name!()); - let cursor = coll - .find(None, FindOptions::builder().batch_size(1).build()) - .await - .unwrap(); + let cursor = coll.find(doc! {}).batch_size(1).await.unwrap(); assert!(!kill_cursors_sent(&event_client)); @@ -306,10 +303,7 @@ async fn no_kill_cursors_on_exhausted() { .database(function_name!()) .collection::(function_name!()); - let cursor = coll - .find(None, FindOptions::builder().build()) - .await - .unwrap(); + let cursor = coll.find(doc! {}).await.unwrap(); assert!(!kill_cursors_sent(&event_client)); @@ -544,7 +538,7 @@ async fn allow_disk_use_test(options: FindOptions, expected_value: Option) let coll = event_client .database(function_name!()) .collection::(function_name!()); - coll.find(None, options).await.unwrap(); + coll.find(doc! {}).with_options(options).await.unwrap(); let events = event_client.get_command_started_events(&["find"]); assert_eq!(events.len(), 1); @@ -780,9 +774,9 @@ async fn typed_insert_many() { ]; coll.insert_many(insert_data.clone(), None).await.unwrap(); - let options = FindOptions::builder().sort(doc! { "x": 1 }).build(); let actual: Vec = coll - .find(doc! { "x": 2 }, options) + .find(doc! { "x": 2 }) + .sort(doc! { "x": 1 }) .await .unwrap() .try_collect() @@ -920,7 +914,7 @@ async fn collection_options_inherited() { .database(function_name!()) .collection_with_options::(function_name!(), options); - coll.find(None, None).await.unwrap(); + coll.find(doc! {}).await.unwrap(); assert_options_inherited(&client, "find").await; coll.find_one(None, None).await.unwrap(); @@ -988,7 +982,7 @@ async fn cursor_batch_size() { coll.insert_many(vec![&doc; 10], None).await.unwrap(); let opts = FindOptions::builder().batch_size(3).build(); - let cursor_no_session = coll.find(doc! {}, opts.clone()).await.unwrap(); + let cursor_no_session = coll.find(doc! {}).with_options(opts.clone()).await.unwrap(); let docs: Vec<_> = cursor_no_session.try_collect().await.unwrap(); assert_eq!(docs.len(), 10); @@ -999,7 +993,9 @@ async fn cursor_batch_size() { } let mut session = client.start_session().await.unwrap(); let mut cursor = coll - .find_with_session(doc! {}, opts.clone(), &mut session) + .find(doc! {}) + .with_options(opts.clone()) + .session(&mut session) .await .unwrap(); let mut docs = Vec::new(); @@ -1009,7 +1005,9 @@ async fn cursor_batch_size() { assert_eq!(docs.len(), 10); let mut cursor = coll - .find_with_session(doc! {}, opts, &mut session) + .find(doc! {}) + .with_options(opts) + .session(&mut session) .await .unwrap(); let docs: Vec<_> = cursor.stream(&mut session).try_collect().await.unwrap(); diff --git a/src/test/csfle.rs b/src/test/csfle.rs index b37c25d27..8bc4bf710 100644 --- a/src/test/csfle.rs +++ b/src/test/csfle.rs @@ -331,7 +331,7 @@ async fn data_key_double_encryption() -> Result<()> { let docs: Vec<_> = client .database("keyvault") .collection::("datakeys") - .find(doc! { "_id": datakey_id.clone() }, None) + .find(doc! { "_id": datakey_id.clone() }) .await? .try_collect() .await?; @@ -2019,7 +2019,7 @@ async fn explicit_encryption_case_1() -> Result<()> { .contention_factor(0) .await?; let found: Vec<_> = enc_coll - .find(doc! { "encryptedIndexed": find_payload }, None) + .find(doc! { "encryptedIndexed": find_payload }) .await? .try_collect() .await?; @@ -2078,7 +2078,7 @@ async fn explicit_encryption_case_2() -> Result<()> { .contention_factor(0) .await?; let found: Vec<_> = enc_coll - .find(doc! { "encryptedIndexed": find_payload }, None) + .find(doc! { "encryptedIndexed": find_payload }) .await? .try_collect() .await?; @@ -2098,7 +2098,7 @@ async fn explicit_encryption_case_2() -> Result<()> { .contention_factor(10) .await?; let found: Vec<_> = enc_coll - .find(doc! { "encryptedIndexed": find_payload2 }, None) + .find(doc! { "encryptedIndexed": find_payload2 }) .await? .try_collect() .await?; @@ -2145,7 +2145,7 @@ async fn explicit_encryption_case_3() -> Result<()> { .await?; let found: Vec<_> = enc_coll - .find(doc! { "_id": 1 }, None) + .find(doc! { "_id": 1 }) .await? .try_collect() .await?; @@ -3206,7 +3206,8 @@ async fn range_explicit_encryption_test( .await?; let docs: Vec = explicit_encryption_collection - .find(find_payload, find_options.clone()) + .find(find_payload) + .with_options(find_options.clone()) .await? .try_collect() .await?; @@ -3228,7 +3229,8 @@ async fn range_explicit_encryption_test( let docs: Vec = encrypted_client .database("db") .collection("explicit_encryption") - .find(find_payload, find_options.clone()) + .find(find_payload) + .with_options(find_options.clone()) .await? .try_collect() .await?; @@ -3249,7 +3251,8 @@ async fn range_explicit_encryption_test( let docs: Vec = encrypted_client .database("db") .collection("explicit_encryption") - .find(find_payload, find_options.clone()) + .find(find_payload) + .with_options(find_options.clone()) .await? .try_collect() .await?; @@ -3266,7 +3269,8 @@ async fn range_explicit_encryption_test( let docs: Vec = encrypted_client .database("db") .collection("explicit_encryption") - .find(doc! { "$expr": find_payload }, find_options.clone()) + .find(doc! { "$expr": find_payload }) + .with_options(find_options.clone()) .await? .try_collect() .await?; diff --git a/src/test/cursor.rs b/src/test/cursor.rs index 4eda10693..a7140157b 100644 --- a/src/test/cursor.rs +++ b/src/test/cursor.rs @@ -40,13 +40,9 @@ async fn tailable_cursor() { let await_time = Duration::from_millis(500); let mut cursor = coll - .find( - None, - FindOptions::builder() - .cursor_type(CursorType::TailableAwait) - .max_await_time(await_time) - .build(), - ) + .find(doc! {}) + .cursor_type(CursorType::TailableAwait) + .max_await_time(await_time) .await .unwrap(); @@ -97,9 +93,10 @@ async fn session_cursor_next() { .await .unwrap(); - let opts = FindOptions::builder().batch_size(1).build(); let mut cursor = coll - .find_with_session(None, opts, &mut session) + .find(doc! {}) + .batch_size(1) + .session(&mut session) .await .unwrap(); @@ -137,10 +134,7 @@ async fn batch_exhaustion() { .unwrap(); // Start a find where batch size will line up with limit. - let cursor = coll - .find(None, FindOptions::builder().batch_size(2).limit(4).build()) - .await - .unwrap(); + let cursor = coll.find(doc! {}).batch_size(2).limit(4).await.unwrap(); let v: Vec<_> = cursor.try_collect().await.unwrap(); assert_eq!(4, v.len()); @@ -202,7 +196,11 @@ async fn borrowed_deserialization() { .sort(doc! { "_id": 1 }) .build(); - let mut cursor = coll.find(None, options.clone()).await.unwrap(); + let mut cursor = coll + .find(doc! {}) + .with_options(options.clone()) + .await + .unwrap(); let mut i = 0; while cursor.advance().await.unwrap() { @@ -213,7 +211,9 @@ async fn borrowed_deserialization() { let mut session = client.start_session().await.unwrap(); let mut cursor = coll - .find_with_session(None, options.clone(), &mut session) + .find(doc! {}) + .with_options(options.clone()) + .session(&mut session) .await .unwrap(); @@ -241,10 +241,8 @@ async fn session_cursor_with_type() { .await .unwrap(); - let mut cursor: crate::SessionCursor = coll - .find_with_session(doc! {}, None, &mut session) - .await - .unwrap(); + let mut cursor: crate::SessionCursor = + coll.find(doc! {}).session(&mut session).await.unwrap(); let _ = cursor.next(&mut session).await.unwrap().unwrap(); @@ -272,10 +270,7 @@ async fn cursor_final_batch() { .await .unwrap(); - let mut cursor = coll - .find(None, FindOptions::builder().batch_size(3).build()) - .await - .unwrap(); + let mut cursor = coll.find(doc! {}).batch_size(3).await.unwrap(); let mut found = 0; while cursor.advance().await.unwrap() { found += 1; diff --git a/src/test/documentation_examples.rs b/src/test/documentation_examples.rs index 07a4e3bd2..08ca2ec99 100644 --- a/src/test/documentation_examples.rs +++ b/src/test/documentation_examples.rs @@ -7,7 +7,7 @@ use semver::Version; use crate::{ bson::{doc, Bson}, error::Result, - options::{ClientOptions, FindOptions, ServerApi, ServerApiVersion}, + options::{ClientOptions, ServerApi, ServerApiVersion}, test::{log_uncaptured, TestClient, DEFAULT_URI}, Client, Collection, @@ -58,7 +58,7 @@ async fn insert_examples(collection: &Collection) -> Result<()> { assert_coll_count!(collection, 1); // Start Example 2 - let cursor = collection.find(doc! { "item": "canvas" }, None).await?; + let cursor = collection.find(doc! { "item": "canvas" }).await?; // End Example 2 assert_cursor_count!(cursor, 1); @@ -168,27 +168,24 @@ async fn query_top_level_fields_examples(collection: &Collection) -> R assert_coll_count!(collection, 5); // Start Example 7 - let cursor = collection.find(None, None).await?; + let cursor = collection.find(doc! {}).await?; // End Example 7 assert_cursor_count!(cursor, 5); // Start Example 9 - let cursor = collection.find(doc! { "status": "D" }, None).await?; + let cursor = collection.find(doc! { "status": "D" }).await?; // End Example 9 assert_cursor_count!(cursor, 2); // Start Example 10 let cursor = collection - .find( - doc! { - "status": { - "$in": ["A", "D"], - } - }, - None, - ) + .find(doc! { + "status": { + "$in": ["A", "D"], + } + }) .await?; // End Example 10 @@ -196,13 +193,10 @@ async fn query_top_level_fields_examples(collection: &Collection) -> R // Start Example 11 let cursor = collection - .find( - doc! { - "status": "A", - "qty": { "$lt": 30 }, - }, - None, - ) + .find(doc! { + "status": "A", + "qty": { "$lt": 30 }, + }) .await?; // End Example 11 @@ -210,17 +204,14 @@ async fn query_top_level_fields_examples(collection: &Collection) -> R // Start Example 12 let cursor = collection - .find( - doc! { - "$or": [ - { "status": "A" }, - { - "qty": { "$lt": 30 }, - } - ], - }, - None, - ) + .find(doc! { + "$or": [ + { "status": "A" }, + { + "qty": { "$lt": 30 }, + } + ], + }) .await?; // End Example 12 @@ -228,20 +219,17 @@ async fn query_top_level_fields_examples(collection: &Collection) -> R // Start Example 13 let cursor = collection - .find( - doc! { - "status": "A", - "$or": [ - { - "qty": { "$lt": 30 }, - }, - { - "item": { "$regex": "^p" }, - }, - ], - }, - None, - ) + .find(doc! { + "status": "A", + "$or": [ + { + "qty": { "$lt": 30 }, + }, + { + "item": { "$regex": "^p" }, + }, + ], + }) .await?; // End Example 13 @@ -314,16 +302,13 @@ async fn query_embedded_documents_examples(collection: &Collection) -> // Start Example 15 let cursor = collection - .find( - doc! { - "size": { - "h": 14, - "w": 21, - "uom": "cm", - }, + .find(doc! { + "size": { + "h": 14, + "w": 21, + "uom": "cm", }, - None, - ) + }) .await?; // End Example 15 @@ -331,35 +316,29 @@ async fn query_embedded_documents_examples(collection: &Collection) -> // Start Example 16 let cursor = collection - .find( - doc! { - "size": { - "w": 21, - "h": 14, - "uom": "cm", - }, + .find(doc! { + "size": { + "w": 21, + "h": 14, + "uom": "cm", }, - None, - ) + }) .await?; // End Example 16 assert_cursor_count!(cursor, 0); // Start Example 17 - let cursor = collection.find(doc! { "size.uom": "in" }, None).await?; + let cursor = collection.find(doc! { "size.uom": "in" }).await?; // End Example 17 assert_cursor_count!(cursor, 2); // Start Example 18 let cursor = collection - .find( - doc! { - "size.h": { "$lt": 15 }, - }, - None, - ) + .find(doc! { + "size.h": { "$lt": 15 }, + }) .await?; // End Example 18 @@ -367,14 +346,11 @@ async fn query_embedded_documents_examples(collection: &Collection) -> // Start Example 19 let cursor = collection - .find( - doc! { - "size.h": { "$lt": 15 }, - "size.uom": "in", - "status": "D", - }, - None, - ) + .find(doc! { + "size.h": { "$lt": 15 }, + "size.uom": "in", + "status": "D", + }) .await?; // End Example 19 @@ -427,12 +403,9 @@ async fn query_arrays_examples(collection: &Collection) -> Result<()> // Start Example 21 let cursor = collection - .find( - doc! { - "tags": ["red", "blank"], - }, - None, - ) + .find(doc! { + "tags": ["red", "blank"], + }) .await?; // End Example 21 @@ -440,14 +413,11 @@ async fn query_arrays_examples(collection: &Collection) -> Result<()> // Start Example 22 let cursor = collection - .find( - doc! { - "tags": { - "$all": ["red", "blank"], - } - }, - None, - ) + .find(doc! { + "tags": { + "$all": ["red", "blank"], + } + }) .await?; // End Example 22 @@ -455,12 +425,9 @@ async fn query_arrays_examples(collection: &Collection) -> Result<()> // Start Example 23 let cursor = collection - .find( - doc! { - "tags": "red", - }, - None, - ) + .find(doc! { + "tags": "red", + }) .await?; // End Example 23 @@ -468,12 +435,9 @@ async fn query_arrays_examples(collection: &Collection) -> Result<()> // Start Example 24 let cursor = collection - .find( - doc! { - "dim_cm": { "$gt": 25 }, - }, - None, - ) + .find(doc! { + "dim_cm": { "$gt": 25 }, + }) .await?; // End Example 24 @@ -481,15 +445,12 @@ async fn query_arrays_examples(collection: &Collection) -> Result<()> // Start Example 25 let cursor = collection - .find( - doc! { - "dim_cm": { - "$gt": 15, - "$lt": 20, - }, + .find(doc! { + "dim_cm": { + "$gt": 15, + "$lt": 20, }, - None, - ) + }) .await?; // End Example 25 @@ -497,17 +458,14 @@ async fn query_arrays_examples(collection: &Collection) -> Result<()> // Start Example 26 let cursor = collection - .find( - doc! { - "dim_cm": { - "$elemMatch": { - "$gt": 22, - "$lt": 30, - } - }, + .find(doc! { + "dim_cm": { + "$elemMatch": { + "$gt": 22, + "$lt": 30, + } }, - None, - ) + }) .await?; // End Example 26 @@ -515,12 +473,9 @@ async fn query_arrays_examples(collection: &Collection) -> Result<()> // Start Example 27 let cursor = collection - .find( - doc! { - "dim_cm.1": { "$gt": 25 }, - }, - None, - ) + .find(doc! { + "dim_cm.1": { "$gt": 25 }, + }) .await?; // End Example 27 @@ -528,12 +483,9 @@ async fn query_arrays_examples(collection: &Collection) -> Result<()> // Start Example 28 let cursor = collection - .find( - doc! { - "tags": { "$size": 3 }, - }, - None, - ) + .find(doc! { + "tags": { "$size": 3 }, + }) .await?; // End Example 28 @@ -588,15 +540,12 @@ async fn query_array_embedded_documents_examples(collection: &Collection // Start Example 39 let cursor = collection - .find( - doc! { - "item": Bson::Null, - }, - None, - ) + .find(doc! { + "item": Bson::Null, + }) .await?; // End Example 39 @@ -749,12 +674,9 @@ async fn query_null_or_missing_fields_examples(collection: &Collection // Start Example 40 let cursor = collection - .find( - doc! { - "item": { "$type": 10 }, - }, - None, - ) + .find(doc! { + "item": { "$type": 10 }, + }) .await?; // End Example 40 @@ -762,12 +684,9 @@ async fn query_null_or_missing_fields_examples(collection: &Collection // Start Example 41 let cursor = collection - .find( - doc! { - "item": { "$exists": false }, - }, - None, - ) + .find(doc! { + "item": { "$exists": false }, + }) .await?; // End Example 41 @@ -870,32 +789,23 @@ async fn projection_examples(collection: &Collection) -> Result<()> { // Start Example 43 let cursor = collection - .find( - doc! { - "status": "A", - }, - None, - ) + .find(doc! { + "status": "A", + }) .await?; // End Example 43 assert_cursor_count!(cursor, 3); // Start Example 44 - let options = FindOptions::builder() + let cursor = collection + .find(doc! { + "status": "A", + }) .projection(doc! { "item": 1, "status": 1, }) - .build(); - - let cursor = collection - .find( - doc! { - "status": "A", - }, - options, - ) .await?; // End Example 44 @@ -908,21 +818,15 @@ async fn projection_examples(collection: &Collection) -> Result<()> { }); // Start Example 45 - let options = FindOptions::builder() + let cursor = collection + .find(doc! { + "status": "A", + }) .projection(doc! { "item": 1, "status": 1, "_id": 0, }) - .build(); - - let cursor = collection - .find( - doc! { - "status": "A", - }, - options, - ) .await?; // End Example 45 @@ -935,20 +839,14 @@ async fn projection_examples(collection: &Collection) -> Result<()> { }); // Start Example 46 - let options = FindOptions::builder() + let cursor = collection + .find(doc! { + "status": "A", + }) .projection(doc! { "status": 0, "instock": 0, }) - .build(); - - let cursor = collection - .find( - doc! { - "status": "A", - }, - options, - ) .await?; // End Example 46 @@ -961,21 +859,15 @@ async fn projection_examples(collection: &Collection) -> Result<()> { }); // Start Example 47 - let options = FindOptions::builder() + let cursor = collection + .find(doc! { + "status": "A", + }) .projection(doc! { "item": 1, "status": 1, "size.uom": 1, }) - .build(); - - let cursor = collection - .find( - doc! { - "status": "A", - }, - options, - ) .await?; // End Example 47 @@ -994,19 +886,13 @@ async fn projection_examples(collection: &Collection) -> Result<()> { }); // Start Example 48 - let options = FindOptions::builder() + let cursor = collection + .find(doc! { + "status": "A", + }) .projection(doc! { "size.uom": 0, }) - .build(); - - let cursor = collection - .find( - doc! { - "status": "A", - }, - options, - ) .await?; // End Example 48 @@ -1025,21 +911,15 @@ async fn projection_examples(collection: &Collection) -> Result<()> { }); // Start Example 50 - let options = FindOptions::builder() + let cursor = collection + .find(doc! { + "status": "A", + }) .projection(doc! { "item": 1, "status": 1, "instock": { "$slice": -1 }, }) - .build(); - - let cursor = collection - .find( - doc! { - "status": "A", - }, - options, - ) .await?; // End Example 50 @@ -1186,10 +1066,7 @@ async fn update_examples(collection: &Collection) -> Result<()> { // End Example 52 run_on_each_doc!( - collection - .find(doc! { "item": "paper" }, None) - .await - .unwrap(), + collection.find(doc! { "item": "paper" }).await.unwrap(), doc, { let uom = doc.get_document("size").unwrap().get_str("uom").unwrap(); @@ -1221,12 +1098,9 @@ async fn update_examples(collection: &Collection) -> Result<()> { run_on_each_doc!( collection - .find( - doc! { - "qty": { "$lt": 50 }, - }, - None, - ) + .find(doc! { + "qty": { "$lt": 50 }, + }) .await .unwrap(), doc, @@ -1264,10 +1138,7 @@ async fn update_examples(collection: &Collection) -> Result<()> { // End Example 54 run_on_each_doc!( - collection - .find(doc! { "item": "paper" }, None,) - .await - .unwrap(), + collection.find(doc! { "item": "paper" }).await.unwrap(), doc, { assert_eq!(doc.len(), 3); diff --git a/src/test/spec/connection_stepdown.rs b/src/test/spec/connection_stepdown.rs index 2e9249e2a..16c47f5f0 100644 --- a/src/test/spec/connection_stepdown.rs +++ b/src/test/spec/connection_stepdown.rs @@ -5,7 +5,7 @@ use futures::stream::StreamExt; use crate::{ bson::{doc, Document}, error::{CommandError, ErrorKind}, - options::{Acknowledgment, ClientOptions, FindOptions, InsertManyOptions, WriteConcern}, + options::{Acknowledgment, ClientOptions, InsertManyOptions, WriteConcern}, selection_criteria::SelectionCriteria, test::{get_client_options, log_uncaptured, util::EventClient}, Collection, @@ -68,10 +68,7 @@ async fn get_more() { .await .unwrap(); - let mut cursor = coll - .find(None, Some(FindOptions::builder().batch_size(2).build())) - .await - .unwrap(); + let mut cursor = coll.find(doc! {}).batch_size(2).await.unwrap(); let db = client.database("admin"); diff --git a/src/test/spec/crud_v1.rs b/src/test/spec/crud_v1.rs index 1b955d50d..e817b361b 100644 --- a/src/test/spec/crud_v1.rs +++ b/src/test/spec/crud_v1.rs @@ -20,7 +20,6 @@ use serde::Deserialize; use crate::{ bson::{doc, Document}, - coll::options::FindOptions, test::log_uncaptured, Collection, }; @@ -63,8 +62,8 @@ pub struct CollectionOutcome { } pub async fn find_all(coll: &Collection) -> Vec { - let options = FindOptions::builder().sort(doc! { "_id": 1 }).build(); - coll.find(None, options) + coll.find(doc! {}) + .sort(doc! { "_id": 1 }) .await .unwrap() .try_collect() diff --git a/src/test/spec/crud_v1/find.rs b/src/test/spec/crud_v1/find.rs index dcff05e5b..eaeafd6f5 100644 --- a/src/test/spec/crud_v1/find.rs +++ b/src/test/spec/crud_v1/find.rs @@ -57,7 +57,8 @@ async fn run_find_test(test_file: TestFile) { }; let cursor = coll - .find(arguments.filter, options) + .find(arguments.filter) + .with_options(options) .await .expect(&test_case.description); assert_eq!( diff --git a/src/test/spec/retryable_reads.rs b/src/test/spec/retryable_reads.rs index c680c2b42..ce8b60d92 100644 --- a/src/test/spec/retryable_reads.rs +++ b/src/test/spec/retryable_reads.rs @@ -197,7 +197,7 @@ async fn retry_read_different_mongos() { let result = client .database("test") .collection::("retry_read_different_mongos") - .find(doc! {}, None) + .find(doc! {}) .await; assert!(result.is_err()); let events = client.get_command_events(&["find"]); @@ -254,7 +254,7 @@ async fn retry_read_same_mongos() { let result = client .database("test") .collection::("retry_read_same_mongos") - .find(doc! {}, None) + .find(doc! {}) .await; assert!(result.is_ok(), "{:?}", result); let events = client.get_command_events(&["find"]); diff --git a/src/test/spec/retryable_writes.rs b/src/test/spec/retryable_writes.rs index 085ea4ab5..a54b10352 100644 --- a/src/test/spec/retryable_writes.rs +++ b/src/test/spec/retryable_writes.rs @@ -16,7 +16,7 @@ use crate::{ cmap::{CmapEvent, ConnectionCheckoutFailedReason}, command::CommandEvent, }, - options::{ClientOptions, FindOptions, InsertManyOptions}, + options::{ClientOptions, InsertManyOptions}, runtime, runtime::{spawn, AcknowledgedMessage, AsyncJoinHandle}, sdam::MIN_HEARTBEAT_FREQUENCY, @@ -163,9 +163,9 @@ async fn run_legacy() { }; let coll = client.get_coll(&db_name, &coll_name); - let options = FindOptions::builder().sort(doc! { "_id": 1 }).build(); let actual_data: Vec = coll - .find(None, options) + .find(doc! {}) + .sort(doc! { "_id": 1 }) .await .unwrap() .try_collect() @@ -373,7 +373,7 @@ async fn label_not_added(retry_reads: bool) { .await .unwrap(); - let err = coll.find(doc! {}, None).await.unwrap_err(); + let err = coll.find(doc! {}).await.unwrap_err(); assert!(!err.contains_label("RetryableWriteError")); } diff --git a/src/test/spec/sdam.rs b/src/test/spec/sdam.rs index e72fd3377..2a47f43ac 100644 --- a/src/test/spec/sdam.rs +++ b/src/test/spec/sdam.rs @@ -183,7 +183,7 @@ async fn rtt_is_updated() { client .database("foo") .collection::("bar") - .find(None, None) + .find(doc! {}) .await .unwrap(); diff --git a/src/test/spec/sessions.rs b/src/test/spec/sessions.rs index 27994a0f6..20bac8ec2 100644 --- a/src/test/spec/sessions.rs +++ b/src/test/spec/sessions.rs @@ -154,7 +154,7 @@ async fn implicit_session_after_connection() { ); ops.push( async { - let cursor = coll.find(doc! {}, None).await.unwrap(); + let cursor = coll.find(doc! {}).await.unwrap(); let r: Result> = cursor.try_collect().await; r.map(|_| ()) } @@ -241,7 +241,7 @@ async fn sessions_not_supported_implicit_session_ignored() { let mut subscriber = client.handler.subscribe(); let coll = client.database(name).collection(name); - let _ = coll.find(doc! {}, None).await; + let _ = coll.find(doc! {}).await; let event = subscriber .filter_map_event(Duration::from_millis(500), |event| match event { Event::Command(CommandEvent::Started(command_started_event)) diff --git a/src/test/spec/trace.rs b/src/test/spec/trace.rs index 625fa2aa1..745756311 100644 --- a/src/test/spec/trace.rs +++ b/src/test/spec/trace.rs @@ -3,7 +3,6 @@ use std::{collections::HashMap, iter, sync::Arc, time::Duration}; use crate::{ bson::{doc, Document}, client::options::ServerAddress, - coll::options::FindOptions, error::{ BulkWriteError, BulkWriteFailure, @@ -105,7 +104,7 @@ async fn command_logging_truncation_default_limit() { let reply = succeeded.get_value_as_string("reply"); assert!(reply.len() <= DEFAULT_MAX_DOCUMENT_LENGTH_BYTES + 3); // +3 for trailing "..." - coll.find(None, None).await.expect("find should succeed"); + coll.find(doc! {}).await.expect("find should succeed"); let succeeded = tracing_subscriber .wait_for_event(Duration::from_millis(500), |e| { e.get_value_as_string("message") == "Command succeeded" @@ -196,10 +195,8 @@ async fn command_logging_truncation_mid_codepoint() { // trailing "..." assert_eq!(command.len(), 221); - let find_options = FindOptions::builder() + coll.find(doc! {}) .projection(doc! { "_id": 0, "🤔": 1 }) - .build(); - coll.find(None, find_options) .await .expect("find should succeed"); let succeeded = tracing_subscriber diff --git a/src/test/spec/unified_runner/operation.rs b/src/test/spec/unified_runner/operation.rs index 143a7b225..c1919fda6 100644 --- a/src/test/spec/unified_runner/operation.rs +++ b/src/test/spec/unified_runner/operation.rs @@ -585,12 +585,11 @@ impl Find { selection_criteria: None, let_vars: self.let_vars.clone(), }; + let act = collection.find(self.filter.clone()).with_options(options); match &self.session { Some(session_id) => { let cursor = with_mut_session!(test_runner, session_id, |session| async { - collection - .find_with_session(self.filter.clone(), options, session) - .await + act.session(session.deref_mut()).await }) .await?; Ok(TestCursor::Session { @@ -599,7 +598,7 @@ impl Find { }) } None => { - let cursor = collection.find(self.filter.clone(), options).await?; + let cursor = act.await?; Ok(TestCursor::Normal(Mutex::new(cursor))) } } diff --git a/src/test/spec/unified_runner/test_runner.rs b/src/test/spec/unified_runner/test_runner.rs index 2c6db3b5e..45dc44671 100644 --- a/src/test/spec/unified_runner/test_runner.rs +++ b/src/test/spec/unified_runner/test_runner.rs @@ -12,7 +12,6 @@ use crate::{ options::{ CollectionOptions, CreateCollectionOptions, - FindOptions, ReadConcern, ReadPreference, SelectionCriteria, @@ -357,9 +356,9 @@ impl TestRunner { .internal_client .get_coll_with_options(db_name, coll_name, options); - let options = FindOptions::builder().sort(doc! { "_id": 1 }).build(); let actual_data: Vec = collection - .find(doc! {}, options) + .find(doc! {}) + .sort(doc! { "_id": 1 }) .await .unwrap() .try_collect() diff --git a/src/test/spec/v2_runner/operation.rs b/src/test/spec/v2_runner/operation.rs index 644c0794f..06fd16b65 100644 --- a/src/test/spec/v2_runner/operation.rs +++ b/src/test/spec/v2_runner/operation.rs @@ -386,20 +386,19 @@ impl TestOperation for Find { session: Option<&'a mut ClientSession>, ) -> BoxFuture<'a, Result>> { async move { + let act = collection + .find(self.filter.clone().unwrap_or_default()) + .with_options(self.options.clone()); let result = match session { Some(session) => { - let mut cursor = collection - .find_with_session(self.filter.clone(), self.options.clone(), session) - .await?; + let mut cursor = act.session(&mut *session).await?; cursor .stream(session) .try_collect::>() .await? } None => { - let cursor = collection - .find(self.filter.clone(), self.options.clone()) - .await?; + let cursor = act.await?; cursor.try_collect::>().await? } }; diff --git a/src/test/spec/v2_runner/test_file.rs b/src/test/spec/v2_runner/test_file.rs index 033279a16..b622162ab 100644 --- a/src/test/spec/v2_runner/test_file.rs +++ b/src/test/spec/v2_runner/test_file.rs @@ -7,7 +7,7 @@ use serde::{Deserialize, Deserializer}; use crate::{ bson::Document, - options::{FindOptions, ReadPreference, SelectionCriteria, SessionOptions}, + options::{ReadPreference, SelectionCriteria, SessionOptions}, test::{ log_uncaptured, spec::merge_uri_options, @@ -177,12 +177,10 @@ impl Outcome { .database(db_name) .collection_with_options(coll_name, coll_opts); let selection_criteria = SelectionCriteria::ReadPreference(ReadPreference::Primary); - let options = FindOptions::builder() + let actual_data: Vec = coll + .find(doc! {}) .sort(doc! { "_id": 1 }) .selection_criteria(selection_criteria) - .build(); - let actual_data: Vec = coll - .find(None, options) .await .unwrap() .try_collect() diff --git a/tests/readme_examples.rs b/tests/readme_examples.rs index 2d3eab2e6..2310e40ca 100644 --- a/tests/readme_examples.rs +++ b/tests/readme_examples.rs @@ -94,12 +94,11 @@ async fn _finding_documents_into_a_collection( ) -> Result<()> { // This trait is required to use `try_next()` on the cursor use futures::stream::TryStreamExt; - use mongodb::{bson::doc, options::FindOptions}; + use mongodb::bson::doc; // Query the books in the collection with a filter and an option. let filter = doc! { "author": "George Orwell" }; - let find_options = FindOptions::builder().sort(doc! { "title": 1 }).build(); - let mut cursor = typed_collection.find(filter, find_options).await?; + let mut cursor = typed_collection.find(filter).sort(doc! { "title": 1 }).await?; // Iterate over the results of the cursor. while let Some(book) = cursor.try_next().await? { @@ -135,7 +134,7 @@ async fn _using_the_sync_api() -> Result<()> { // Insert some books into the "mydb.books" collection. collection.insert_many(docs, None)?; - let cursor = collection.find(doc! { "author": "George Orwell" }, None)?; + let cursor = collection.find(doc! { "author": "George Orwell" }).run()?; for result in cursor { println!("title: {}", result?.title); } From e52520699bed450c79e7b4f9bfd0f172293daaee Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Fri, 1 Mar 2024 14:10:03 -0500 Subject: [PATCH 03/39] wip find_one conversion --- src/action.rs | 18 +++++++++- src/action/find.rs | 83 ++++++++++++++++++++++++++++++++++++++-------- 2 files changed, 86 insertions(+), 15 deletions(-) diff --git a/src/action.rs b/src/action.rs index d3d930db1..f561dcd2e 100644 --- a/src/action.rs +++ b/src/action.rs @@ -59,6 +59,7 @@ pub struct Single; pub struct Multiple; macro_rules! option_setters { + // Include options aggregate accessors. ( $opt_field:ident: $opt_field_ty:ty; $( @@ -72,10 +73,25 @@ macro_rules! option_setters { /// Set all options. Note that this will replace all previous values set. pub fn with_options(mut self, value: impl Into>) -> Self { - self.options = value.into(); + self.$opt_field = value.into(); self } + crate::action::option_setters!($opt_field_ty; + $( + $(#[$($attrss)*])* + $opt_name: $opt_ty, + )* + ); + }; + // Just generate field setters. + ( + $opt_field_ty:ty; + $( + $(#[$($attrss:tt)*])* + $opt_name:ident: $opt_ty:ty, + )* + ) => { $( #[doc = concat!("Set the [`", stringify!($opt_field_ty), "::", stringify!($opt_name), "`] option.")] $(#[$($attrss)*])* diff --git a/src/action/find.rs b/src/action/find.rs index edb498542..f2109d73f 100644 --- a/src/action/find.rs +++ b/src/action/find.rs @@ -1,6 +1,7 @@ -use std::time::Duration; +use std::{marker::PhantomData, time::Duration}; use bson::{Bson, Document}; +use serde::de::DeserializeOwned; use crate::{ coll::options::{CursorType, FindOptions, Hint}, @@ -15,16 +16,34 @@ use crate::{ SessionCursor, }; -use super::{action_impl, option_setters, ExplicitSession, ImplicitSession}; +use super::{action_impl, option_setters, ExplicitSession, ImplicitSession, Single, Multiple}; impl Collection { /// Finds the documents in the collection matching `filter`. + /// + /// `await` will return `Result>` (or `Result>` if a session is provided). pub fn find(&self, filter: Document) -> Find<'_, T> { Find { coll: self, filter, options: None, session: ImplicitSession, + _mode: PhantomData + } + } +} + +impl Collection { + /// Finds a single document in the collection matching `filter`. + /// + /// `await` will return `Result>`. + pub fn find_one_2(&self, filter: Document) -> Find<'_, T, Single> { + Find { + coll: self, + filter, + options: None, + session: ImplicitSession, + _mode: PhantomData } } } @@ -32,36 +51,43 @@ impl Collection { #[cfg(feature = "sync")] impl crate::sync::Collection { /// Finds the documents in the collection matching `filter`. + /// + /// [`run`](Find::run) will return `Result>` (or `Result>` if a session is provided). pub fn find(&self, filter: Document) -> Find<'_, T> { self.async_collection.find(filter) } } +#[cfg(feature = "sync")] +impl crate::sync::Collection where T: DeserializeOwned { + /// Finds a single document in the collection matching `filter`. + /// + /// [`run`](Find::run) will return `Result>`. + pub fn find_one_2(&self, filter: Document) -> Find<'_, T, Single> { + self.async_collection.find_one_2(filter) + } +} + /// Finds the documents in a collection matching a filter. Construct with [`Collection::find`]. #[must_use] -pub struct Find<'a, T, Session = ImplicitSession> { +pub struct Find<'a, T, Mode = Multiple, Session = ImplicitSession> { coll: &'a Collection, filter: Document, options: Option, session: Session, + _mode: PhantomData, } -impl<'a, T, Session> Find<'a, T, Session> { +impl<'a, T, Mode, Session> Find<'a, T, Mode, Session> { option_setters!(options: FindOptions; - allow_disk_use: bool, allow_partial_results: bool, - batch_size: u32, comment: String, comment_bson: Bson, - cursor_type: CursorType, hint: Hint, - limit: i64, max: Document, - max_await_time: Duration, max_scan: u64, max_time: Duration, min: Document, - no_cursor_timeout: bool, projection: Document, read_concern: ReadConcern, return_key: bool, @@ -74,23 +100,36 @@ impl<'a, T, Session> Find<'a, T, Session> { ); } -impl<'a, T> Find<'a, T, ImplicitSession> { +// Some options don't make sense for `find_one`. +impl<'a, T, Session> Find<'a, T, Multiple, Session> { + option_setters!(FindOptions; + allow_disk_use: bool, + batch_size: u32, + cursor_type: CursorType, + limit: i64, + max_await_time: Duration, + no_cursor_timeout: bool, + ); +} + +impl<'a, T, Mode> Find<'a, T, Mode, ImplicitSession> { /// Runs the query using the provided session. pub fn session<'s>( self, value: impl Into<&'s mut ClientSession>, - ) -> Find<'a, T, ExplicitSession<'s>> { + ) -> Find<'a, T, Mode, ExplicitSession<'s>> { Find { coll: self.coll, filter: self.filter, options: self.options, session: ExplicitSession(value.into()), + _mode: PhantomData, } } } action_impl! { - impl<'a, T> Action for Find<'a, T, ImplicitSession> { + impl<'a, T> Action for Find<'a, T, Multiple, ImplicitSession> { type Future = FindFuture; async fn execute(mut self) -> Result> { @@ -107,7 +146,7 @@ action_impl! { } action_impl! { - impl<'a, T> Action for Find<'a, T, ExplicitSession<'a>> { + impl<'a, T> Action for Find<'a, T, Multiple, ExplicitSession<'a>> { type Future = FindSessionFuture; async fn execute(mut self) -> Result> { @@ -123,3 +162,19 @@ action_impl! { } } } + +/* +action_impl! { + impl<'a, T> Action for Find<'a, T, Single, ImplicitSession> { + type Future = FindOneFuture; + + async fn execute(mut self) -> Result> { + use futures_util::stream::StreamExt; + let mut options = self.options.unwrap_or_default(); + options.limit = Some(-1); + let mut cursor = self.coll.find(self.filter).with_options(options).await?; + cursor.next().await.transpose() + } + } +} +*/ From b6fef3c5c91a1d12eec97213b9835675f1a04ca6 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Fri, 1 Mar 2024 14:40:41 -0500 Subject: [PATCH 04/39] allow where clause --- src/action.rs | 28 +++++++++++++++++++++------- src/action/find.rs | 8 ++++---- 2 files changed, 25 insertions(+), 11 deletions(-) diff --git a/src/action.rs b/src/action.rs index f561dcd2e..e22e8a720 100644 --- a/src/action.rs +++ b/src/action.rs @@ -142,13 +142,17 @@ pub trait Action { macro_rules! action_impl { // Generate with no sync type conversion ( - impl$(<$lt:lifetime $(, $($at:ident),+)?>)? Action for $action:ty { + impl$(<$lt:lifetime $(, $($at:ident),+)?>)? Action for $action:ty + $(where $wt:ident: $wb:path)? + { type Future = $f_ty:ident; async fn execute($($args:ident)+) -> $out:ty $code:block } ) => { crate::action::action_impl! { - impl$(<$lt $(, $($at),+)?>)? Action for $action { + impl$(<$lt $(, $($at),+)?>)? Action for $action + $(where $wt: $wb)? + { type Future = $f_ty; async fn execute($($args)+) -> $out $code fn sync_wrap(out) -> $out { out } @@ -157,13 +161,17 @@ macro_rules! action_impl { }; // Generate with a sync type conversion ( - impl$(<$lt:lifetime $(, $($at:ident),+)?>)? Action for $action:ty { + impl$(<$lt:lifetime $(, $($at:ident),+)?>)? Action for $action:ty + $(where $wt:ident: $wb:path)? + { type Future = $f_ty:ident; async fn execute($($args:ident)+) -> $out:ty $code:block fn sync_wrap($($wrap_args:ident)+) -> $sync_out:ty $wrap_code:block } ) => { - impl$(<$lt $(, $($at),+)?>)? std::future::IntoFuture for $action { + impl$(<$lt $(, $($at),+)?>)? std::future::IntoFuture for $action + $(where $wt: $wb)? + { type Output = $out; type IntoFuture = $f_ty$(<$lt $(, $($at)+)?>)?; @@ -174,13 +182,17 @@ macro_rules! action_impl { } } - impl$(<$lt $(, $($at),+)?>)? crate::action::Action for $action { + impl$(<$lt $(, $($at),+)?>)? crate::action::Action for $action + $(where $wt: $wb)? + { type Output = $out; } crate::action::action_impl_future_wrapper!($f_ty, $out $(, $lt)? $($(, $($at),+)?)?); - impl$(<$lt $(, $($at),+)?>)? std::future::Future for $f_ty$(<$lt $(, $($at),+)?>)? { + impl$(<$lt $(, $($at),+)?>)? std::future::Future for $f_ty$(<$lt $(, $($at),+)?>)? + $(where $wt: $wb)? + { type Output = $out; fn poll(mut self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll { @@ -189,7 +201,9 @@ macro_rules! action_impl { } #[cfg(feature = "sync")] - impl$(<$lt $(, $($at),+)?>)? $action { + impl$(<$lt $(, $($at),+)?>)? $action + $(where $wt: $wb)? + { /// Synchronously execute this action. pub fn run(self) -> $sync_out { let $($wrap_args)+ = crate::sync::TOKIO_RUNTIME.block_on(std::future::IntoFuture::into_future(self)); diff --git a/src/action/find.rs b/src/action/find.rs index f2109d73f..a733cb066 100644 --- a/src/action/find.rs +++ b/src/action/find.rs @@ -163,12 +163,13 @@ action_impl! { } } -/* action_impl! { - impl<'a, T> Action for Find<'a, T, Single, ImplicitSession> { + impl<'a, T> Action for Find<'a, T, Single, ImplicitSession> + where T: DeserializeOwned + { type Future = FindOneFuture; - async fn execute(mut self) -> Result> { + async fn execute(self) -> Result> { use futures_util::stream::StreamExt; let mut options = self.options.unwrap_or_default(); options.limit = Some(-1); @@ -177,4 +178,3 @@ action_impl! { } } } -*/ From cade4397ca1d13e5db92e4ab7566277e20c5bd24 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Fri, 1 Mar 2024 15:11:28 -0500 Subject: [PATCH 05/39] more find_one wip --- src/action/find.rs | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/src/action/find.rs b/src/action/find.rs index a733cb066..25996aff9 100644 --- a/src/action/find.rs +++ b/src/action/find.rs @@ -178,3 +178,26 @@ action_impl! { } } } + +/* +action_impl! { + impl<'a, T> Action for Find<'a, T, Single, ExplicitSession<'a>> + where T: DeserializeOwned + Send + { + type Future = FindOneSessionFuture; + + async fn execute(self) -> Result> { + use futures_util::stream::StreamExt; + let mut options = self.options.unwrap_or_default(); + options.limit = Some(-1); + let mut cursor = self.coll + .find(self.filter) + .with_options(options) + .session(&mut *self.session.0) + .await?; + let mut stream = cursor.stream(self.session.0); + stream.next().await.transpose() + } + } +} +*/ \ No newline at end of file From 62545063a86386fcc62392a17f41a608ec506b24 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Fri, 1 Mar 2024 16:27:22 -0500 Subject: [PATCH 06/39] wip proc_macro --- Cargo.toml | 1 + action_macro/.gitignore | 1 + action_macro/Cargo.toml | 14 ++++++++++++++ action_macro/src/lib.rs | 35 +++++++++++++++++++++++++++++++++++ src/action/find.rs | 7 +++---- 5 files changed, 54 insertions(+), 4 deletions(-) create mode 100644 action_macro/.gitignore create mode 100644 action_macro/Cargo.toml create mode 100644 action_macro/src/lib.rs diff --git a/Cargo.toml b/Cargo.toml index 2d538a846..87eeccdb5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -69,6 +69,7 @@ in-use-encryption-unstable = ["mongocrypt", "rayon", "num_cpus"] tracing-unstable = ["tracing", "log"] [dependencies] +action_macro = { path = "action_macro" } async-trait = "0.1.42" base64 = "0.13.0" bitflags = "1.1.0" diff --git a/action_macro/.gitignore b/action_macro/.gitignore new file mode 100644 index 000000000..b83d22266 --- /dev/null +++ b/action_macro/.gitignore @@ -0,0 +1 @@ +/target/ diff --git a/action_macro/Cargo.toml b/action_macro/Cargo.toml new file mode 100644 index 000000000..f846e27ef --- /dev/null +++ b/action_macro/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "action_macro" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +proc-macro2 = "1.0.78" +quote = "1.0.35" +syn = { version = "2.0.52", features = ["full", "parsing", "proc-macro"] } + +[lib] +proc-macro = true diff --git a/action_macro/src/lib.rs b/action_macro/src/lib.rs new file mode 100644 index 000000000..3ab692858 --- /dev/null +++ b/action_macro/src/lib.rs @@ -0,0 +1,35 @@ +extern crate proc_macro; + +use proc_macro::TokenStream; +use quote::quote; +use syn::{parse_macro_input, spanned::Spanned, Error, ItemImpl}; + +#[proc_macro] +pub fn action_impl_2(input: TokenStream) -> TokenStream { + let impl_ = parse_macro_input!(input as ItemImpl); + fallible(impl_).unwrap_or_else(Error::into_compile_error).into() +} + +fn fallible(input: ItemImpl) -> Result { + // Validate that it's `impl Action for ...` + match input.trait_ { + None => return Err(Error::new(input.span(), "A trait must be implemented")), + Some((not, path, _)) => { + if let Some(not) = not { + return Err(Error::new(not.span(), "Must not be a negative impl")); + } + if !path.is_ident("Action") { + return Err(Error::new(path.span(), "Trait must be `Action`")); + } + } + } + + let generics = input.generics; + let action = input.self_ty; + + Ok(quote! { + impl #generics std::future::IntoFuture for #action { + + } + }) +} \ No newline at end of file diff --git a/src/action/find.rs b/src/action/find.rs index 25996aff9..337f29c78 100644 --- a/src/action/find.rs +++ b/src/action/find.rs @@ -1,5 +1,6 @@ use std::{marker::PhantomData, time::Duration}; +use action_macro::action_impl_2; use bson::{Bson, Document}; use serde::de::DeserializeOwned; @@ -179,8 +180,7 @@ action_impl! { } } -/* -action_impl! { +action_impl_2! { impl<'a, T> Action for Find<'a, T, Single, ExplicitSession<'a>> where T: DeserializeOwned + Send { @@ -199,5 +199,4 @@ action_impl! { stream.next().await.transpose() } } -} -*/ \ No newline at end of file +} \ No newline at end of file From acf434f8e39e952697a5e54c94381278416dbcf8 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Fri, 1 Mar 2024 16:57:04 -0500 Subject: [PATCH 07/39] proc_macro progress --- action_macro/src/lib.rs | 37 +++++++++++++++++++++++++++++++++++-- 1 file changed, 35 insertions(+), 2 deletions(-) diff --git a/action_macro/src/lib.rs b/action_macro/src/lib.rs index 3ab692858..52ff8a313 100644 --- a/action_macro/src/lib.rs +++ b/action_macro/src/lib.rs @@ -1,8 +1,9 @@ extern crate proc_macro; use proc_macro::TokenStream; +use proc_macro2::Span; use quote::quote; -use syn::{parse_macro_input, spanned::Spanned, Error, ItemImpl}; +use syn::{parse_macro_input, spanned::Spanned, Error, ImplItem, ImplItemType, ItemImpl}; #[proc_macro] pub fn action_impl_2(input: TokenStream) -> TokenStream { @@ -26,10 +27,42 @@ fn fallible(input: ItemImpl) -> Result { let generics = input.generics; let action = input.self_ty; + let future_ty = find_item(&input.items, "Future type", |item| { + let item_ty = impl_item_type(item)?; + if item_ty.ident.to_string() != "Future" { + return None; + } + Some(&item_ty.ty) + })?; Ok(quote! { impl #generics std::future::IntoFuture for #action { - + type IntoFuture = #future_ty; } }) +} + +/// Finds the single item that matches the predicate. +fn find_item<'a, T>(items: &'a [ImplItem], name: &str, pred: impl Fn(&'a ImplItem) -> Option) -> Result { + let mut found = None; + for item in items { + if let Some(v) = pred(item) { + if found.is_none() { + found = Some(v); + } else { + return Err(Error::new(item.span(), format!("Duplicate {} found", name))); + } + } + } + match found { + Some(v) => Ok(v), + None => Err(Error::new(Span::call_site(), format!("No {} found", name))), + } +} + +fn impl_item_type(item: &ImplItem) -> Option<&ImplItemType> { + match item { + ImplItem::Type(t) => Some(t), + _ => None, + } } \ No newline at end of file From 5f77e83681cbde98773e3eb66ceded0241a07f0e Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Sat, 2 Mar 2024 09:53:15 -0500 Subject: [PATCH 08/39] full proc macro --- action_macro/src/lib.rs | 223 ++++++++++++++++++++++++++++++++-------- src/action/find.rs | 4 +- 2 files changed, 181 insertions(+), 46 deletions(-) diff --git a/action_macro/src/lib.rs b/action_macro/src/lib.rs index 52ff8a313..23b893f00 100644 --- a/action_macro/src/lib.rs +++ b/action_macro/src/lib.rs @@ -3,66 +3,203 @@ extern crate proc_macro; use proc_macro::TokenStream; use proc_macro2::Span; use quote::quote; -use syn::{parse_macro_input, spanned::Spanned, Error, ImplItem, ImplItemType, ItemImpl}; +use syn::{braced, parenthesized, parse::{Parse, ParseStream}, parse_macro_input, parse_quote, punctuated::Punctuated, spanned::Spanned, Block, Generics, Ident, Lifetime, LifetimeParam, Token, Type}; #[proc_macro] pub fn action_impl_2(input: TokenStream) -> TokenStream { - let impl_ = parse_macro_input!(input as ItemImpl); - fallible(impl_).unwrap_or_else(Error::into_compile_error).into() -} + let ActionImpl { + generics, + lifetime, + action, + future_name, + exec_self_mut, + exec_output, + exec_body, + sync_wrap, + } = parse_macro_input!(input as ActionImpl); -fn fallible(input: ItemImpl) -> Result { - // Validate that it's `impl Action for ...` - match input.trait_ { - None => return Err(Error::new(input.span(), "A trait must be implemented")), - Some((not, path, _)) => { - if let Some(not) = not { - return Err(Error::new(not.span(), "Must not be a negative impl")); - } - if !path.is_ident("Action") { - return Err(Error::new(path.span(), "Trait must be `Action`")); - } - } + let mut unbounded_generics = generics.clone(); + for lt in unbounded_generics.lifetimes_mut() { + lt.bounds.clear(); + } + for ty in unbounded_generics.type_params_mut() { + ty.bounds.clear(); } - let generics = input.generics; - let action = input.self_ty; - let future_ty = find_item(&input.items, "Future type", |item| { - let item_ty = impl_item_type(item)?; - if item_ty.ident.to_string() != "Future" { - return None; + let SyncWrap { + arg_mut: sync_arg_mut, + arg: sync_arg, + sync_output, + sync_body, + } = sync_wrap.unwrap_or_else(|| { + SyncWrap { + arg_mut: None, + arg: Ident::new("out", Span::call_site()), + sync_output: exec_output.clone(), + sync_body: parse_quote!({ out }), + } + }); + + quote! { + impl #generics crate::action::Action for #action { + type Output = #exec_output; } - Some(&item_ty.ty) - })?; - Ok(quote! { impl #generics std::future::IntoFuture for #action { - type IntoFuture = #future_ty; + type Output = #exec_output; + type IntoFuture = #future_name #unbounded_generics; + + fn into_future(#exec_self_mut self) -> Self::IntoFuture { + #future_name (Box::pin(async move { + #exec_body + })) + } + } + + pub struct #future_name #generics (crate::BoxFuture<#lifetime, #exec_output>); + + impl #generics std::future::Future for #future_name #unbounded_generics { + type Output = #exec_output; + + fn poll(mut self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll { + self.0.as_mut().poll(cx) + } + } + + #[cfg(feature = "sync")] + impl #generics #action { + /// Synchronously execute this action. + pub fn run(self) -> #sync_output { + let #sync_arg_mut #sync_arg = crate::sync::TOKIO_RUNTIME.block_on(std::future::IntoFuture::into_future(self)); + #sync_body + } } - }) + }.into() } -/// Finds the single item that matches the predicate. -fn find_item<'a, T>(items: &'a [ImplItem], name: &str, pred: impl Fn(&'a ImplItem) -> Option) -> Result { - let mut found = None; - for item in items { - if let Some(v) = pred(item) { - if found.is_none() { - found = Some(v); - } else { - return Err(Error::new(item.span(), format!("Duplicate {} found", name))); +/* +impl Action for ActionType { + type Future = FutureName; + async fn execute([mut] self) -> OutType { } + [SyncWrap] +} +*/ +struct ActionImpl { + generics: Generics, + lifetime: Lifetime, + action: Type, + future_name: Ident, + exec_self_mut: Option, + exec_output: Type, + exec_body: Block, + sync_wrap: Option, +} + +impl Parse for ActionImpl { + fn parse(input: ParseStream) -> syn::Result { + // impl Action for ActionType + input.parse::()?; + let generics: Generics = input.parse()?; + let mut lifetime = None; + for lt in generics.lifetimes() { + if lifetime.is_some() { + return Err(input.error("only one lifetime argument permitted")); } + lifetime = Some(lt); } + let lifetime = match lifetime { + Some(lt) => lt.lifetime.clone(), + None => Lifetime::new("'static'", generics.span()), + }; + parse_name(input, "Action")?; + input.parse::()?; + let action = input.parse()?; + + let impl_body; + braced!(impl_body in input); + + // type Future = FutureName; + impl_body.parse::()?; + parse_name(&impl_body, "Future")?; + impl_body.parse::()?; + let future_name = impl_body.parse()?; + impl_body.parse::()?; + + // async fn execute([mut] self) -> OutType { } + impl_body.parse::()?; + impl_body.parse::()?; + parse_name(&impl_body, "execute")?; + let exec_args; + parenthesized!(exec_args in impl_body); + let exec_self_mut = exec_args.parse()?; + exec_args.parse::()?; + if !exec_args.is_empty() { + return Err(exec_args.error("unexpected token")); + } + impl_body.parse::]>()?; + let exec_output = impl_body.parse()?; + let exec_body = impl_body.parse()?; + + // Optional SyncWrap. + let sync_wrap = if impl_body.peek(Token![fn]) { + Some(impl_body.parse()?) + } else { + None + }; + + if !impl_body.is_empty() { + return Err(exec_args.error("unexpected token")); + } + + Ok(ActionImpl { + generics, + lifetime, + action, + future_name, + exec_self_mut, + exec_output, + exec_body, + sync_wrap, + }) } - match found { - Some(v) => Ok(v), - None => Err(Error::new(Span::call_site(), format!("No {} found", name))), +} + +// fn sync_wrap([mut] out) -> OutType { } +struct SyncWrap { + arg_mut: Option, + arg: Ident, + sync_output: Type, + sync_body: Block, +} + +impl Parse for SyncWrap { + fn parse(input: ParseStream) -> syn::Result { + input.parse::()?; + parse_name(input, "sync_wrap")?; + let args_input; + parenthesized!(args_input in input); + let arg_mut = args_input.parse()?; + let arg = args_input.parse()?; + if !args_input.is_empty() { + return Err(args_input.error("unexpected token")); + } + input.parse::]>()?; + let sync_output = input.parse()?; + let sync_body = input.parse()?; + + Ok(SyncWrap { + arg_mut, + arg, + sync_output, + sync_body, + }) } } -fn impl_item_type(item: &ImplItem) -> Option<&ImplItemType> { - match item { - ImplItem::Type(t) => Some(t), - _ => None, +/// Parse an identifier with a specific expected value. +fn parse_name(input: ParseStream, name: &str) -> syn::Result<()> { + if input.parse::()?.to_string() != name { + return Err(input.error(format!("expected '{}'", name))); } + Ok(()) } \ No newline at end of file diff --git a/src/action/find.rs b/src/action/find.rs index 337f29c78..468ab2d4e 100644 --- a/src/action/find.rs +++ b/src/action/find.rs @@ -181,9 +181,7 @@ action_impl! { } action_impl_2! { - impl<'a, T> Action for Find<'a, T, Single, ExplicitSession<'a>> - where T: DeserializeOwned + Send - { + impl<'a, T: DeserializeOwned + Send> Action for Find<'a, T, Single, ExplicitSession<'a>> { type Future = FindOneSessionFuture; async fn execute(self) -> Result> { From ae05bdb5d2e7baa7c0345e94094ddf889c659913 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Sat, 2 Mar 2024 10:28:53 -0500 Subject: [PATCH 09/39] swap to new macro impl --- action_macro/src/lib.rs | 77 ++++++++++++++++++++------------- src/action.rs | 96 +---------------------------------------- src/action/find.rs | 22 +++++----- 3 files changed, 58 insertions(+), 137 deletions(-) diff --git a/action_macro/src/lib.rs b/action_macro/src/lib.rs index 23b893f00..c0c677cc6 100644 --- a/action_macro/src/lib.rs +++ b/action_macro/src/lib.rs @@ -1,12 +1,30 @@ extern crate proc_macro; -use proc_macro::TokenStream; -use proc_macro2::Span; use quote::quote; -use syn::{braced, parenthesized, parse::{Parse, ParseStream}, parse_macro_input, parse_quote, punctuated::Punctuated, spanned::Spanned, Block, Generics, Ident, Lifetime, LifetimeParam, Token, Type}; - +use syn::{ + braced, + parenthesized, + parse::{Parse, ParseStream}, + parse_macro_input, + parse_quote, + parse_quote_spanned, + spanned::Spanned, + Block, + Error, + Generics, + Ident, + Lifetime, + Token, + Type, +}; + +/// Generates: +/// * an `IntoFuture` executing the given method body +/// * an opaque wrapper type for the future in case we want to do something more fancy than +/// BoxFuture. +/// * a `run` method for sync execution, optionally with a wrapper function #[proc_macro] -pub fn action_impl_2(input: TokenStream) -> TokenStream { +pub fn action_impl(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let ActionImpl { generics, lifetime, @@ -27,17 +45,12 @@ pub fn action_impl_2(input: TokenStream) -> TokenStream { } let SyncWrap { - arg_mut: sync_arg_mut, - arg: sync_arg, + sync_arg_mut, + sync_arg, sync_output, sync_body, - } = sync_wrap.unwrap_or_else(|| { - SyncWrap { - arg_mut: None, - arg: Ident::new("out", Span::call_site()), - sync_output: exec_output.clone(), - sync_body: parse_quote!({ out }), - } + } = sync_wrap.unwrap_or_else(|| { + parse_quote! { fn sync_wrap(out) -> #exec_output { out } } }); quote! { @@ -77,13 +90,11 @@ pub fn action_impl_2(input: TokenStream) -> TokenStream { }.into() } -/* -impl Action for ActionType { - type Future = FutureName; - async fn execute([mut] self) -> OutType { } - [SyncWrap] -} -*/ +// impl Action for ActionType { +// type Future = FutureName; +// async fn execute([mut] self) -> OutType { } +// [SyncWrap] +// } struct ActionImpl { generics: Generics, lifetime: Lifetime, @@ -109,7 +120,7 @@ impl Parse for ActionImpl { } let lifetime = match lifetime { Some(lt) => lt.lifetime.clone(), - None => Lifetime::new("'static'", generics.span()), + None => parse_quote_spanned! { generics.span() => 'static }, }; parse_name(input, "Action")?; input.parse::()?; @@ -166,8 +177,8 @@ impl Parse for ActionImpl { // fn sync_wrap([mut] out) -> OutType { } struct SyncWrap { - arg_mut: Option, - arg: Ident, + sync_arg_mut: Option, + sync_arg: Ident, sync_output: Type, sync_body: Block, } @@ -178,8 +189,8 @@ impl Parse for SyncWrap { parse_name(input, "sync_wrap")?; let args_input; parenthesized!(args_input in input); - let arg_mut = args_input.parse()?; - let arg = args_input.parse()?; + let sync_arg_mut = args_input.parse()?; + let sync_arg = args_input.parse()?; if !args_input.is_empty() { return Err(args_input.error("unexpected token")); } @@ -188,8 +199,8 @@ impl Parse for SyncWrap { let sync_body = input.parse()?; Ok(SyncWrap { - arg_mut, - arg, + sync_arg_mut, + sync_arg, sync_output, sync_body, }) @@ -198,8 +209,12 @@ impl Parse for SyncWrap { /// Parse an identifier with a specific expected value. fn parse_name(input: ParseStream, name: &str) -> syn::Result<()> { - if input.parse::()?.to_string() != name { - return Err(input.error(format!("expected '{}'", name))); + let ident = input.parse::()?; + if ident.to_string() != name { + return Err(Error::new( + ident.span(), + format!("expected '{}', got '{}'", name, ident), + )); } Ok(()) -} \ No newline at end of file +} diff --git a/src/action.rs b/src/action.rs index e22e8a720..20f67c980 100644 --- a/src/action.rs +++ b/src/action.rs @@ -134,101 +134,7 @@ pub trait Action { } } -/// Generates: -/// * an `IntoFuture` executing the given method body -/// * an opaque wrapper type for the future in case we want to do something more fancy than -/// BoxFuture. -/// * a `run` method for sync execution, optionally with a wrapper function -macro_rules! action_impl { - // Generate with no sync type conversion - ( - impl$(<$lt:lifetime $(, $($at:ident),+)?>)? Action for $action:ty - $(where $wt:ident: $wb:path)? - { - type Future = $f_ty:ident; - async fn execute($($args:ident)+) -> $out:ty $code:block - } - ) => { - crate::action::action_impl! { - impl$(<$lt $(, $($at),+)?>)? Action for $action - $(where $wt: $wb)? - { - type Future = $f_ty; - async fn execute($($args)+) -> $out $code - fn sync_wrap(out) -> $out { out } - } - } - }; - // Generate with a sync type conversion - ( - impl$(<$lt:lifetime $(, $($at:ident),+)?>)? Action for $action:ty - $(where $wt:ident: $wb:path)? - { - type Future = $f_ty:ident; - async fn execute($($args:ident)+) -> $out:ty $code:block - fn sync_wrap($($wrap_args:ident)+) -> $sync_out:ty $wrap_code:block - } - ) => { - impl$(<$lt $(, $($at),+)?>)? std::future::IntoFuture for $action - $(where $wt: $wb)? - { - type Output = $out; - type IntoFuture = $f_ty$(<$lt $(, $($at)+)?>)?; - - fn into_future($($args)+) -> Self::IntoFuture { - $f_ty(Box::pin(async move { - $code - })) - } - } - - impl$(<$lt $(, $($at),+)?>)? crate::action::Action for $action - $(where $wt: $wb)? - { - type Output = $out; - } - - crate::action::action_impl_future_wrapper!($f_ty, $out $(, $lt)? $($(, $($at),+)?)?); - - impl$(<$lt $(, $($at),+)?>)? std::future::Future for $f_ty$(<$lt $(, $($at),+)?>)? - $(where $wt: $wb)? - { - type Output = $out; - - fn poll(mut self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll { - self.0.as_mut().poll(cx) - } - } - - #[cfg(feature = "sync")] - impl$(<$lt $(, $($at),+)?>)? $action - $(where $wt: $wb)? - { - /// Synchronously execute this action. - pub fn run(self) -> $sync_out { - let $($wrap_args)+ = crate::sync::TOKIO_RUNTIME.block_on(std::future::IntoFuture::into_future(self)); - $wrap_code - } - } - } -} -pub(crate) use action_impl; - -macro_rules! action_impl_future_wrapper { - ($f_ty:ident, $out:ty) => { - /// Opaque future type for action execution. - pub struct $f_ty(crate::BoxFuture<'static, $out>); - }; - ($f_ty:ident, $out:ty, $lt:lifetime) => { - /// Opaque future type for action execution. - pub struct $f_ty<$lt>(crate::BoxFuture<$lt, $out>); - }; - ($f_ty:ident, $out:ty, $lt:lifetime, $($at:ident),+) => { - /// Opaque future type for action execution. - pub struct $f_ty<$lt, $($at),+>(crate::BoxFuture<$lt, $out>); - }; -} -pub(crate) use action_impl_future_wrapper; +pub(crate) use action_macro::action_impl; use crate::Collection; diff --git a/src/action/find.rs b/src/action/find.rs index 468ab2d4e..9cc3caeb4 100644 --- a/src/action/find.rs +++ b/src/action/find.rs @@ -1,6 +1,5 @@ use std::{marker::PhantomData, time::Duration}; -use action_macro::action_impl_2; use bson::{Bson, Document}; use serde::de::DeserializeOwned; @@ -17,19 +16,20 @@ use crate::{ SessionCursor, }; -use super::{action_impl, option_setters, ExplicitSession, ImplicitSession, Single, Multiple}; +use super::{action_impl, option_setters, ExplicitSession, ImplicitSession, Multiple, Single}; impl Collection { /// Finds the documents in the collection matching `filter`. /// - /// `await` will return `Result>` (or `Result>` if a session is provided). + /// `await` will return `Result>` (or `Result>` if a session is + /// provided). pub fn find(&self, filter: Document) -> Find<'_, T> { Find { coll: self, filter, options: None, session: ImplicitSession, - _mode: PhantomData + _mode: PhantomData, } } } @@ -44,7 +44,7 @@ impl Collection { filter, options: None, session: ImplicitSession, - _mode: PhantomData + _mode: PhantomData, } } } @@ -53,14 +53,15 @@ impl Collection { impl crate::sync::Collection { /// Finds the documents in the collection matching `filter`. /// - /// [`run`](Find::run) will return `Result>` (or `Result>` if a session is provided). + /// [`run`](Find::run) will return `Result>` (or `Result>` if a + /// session is provided). pub fn find(&self, filter: Document) -> Find<'_, T> { self.async_collection.find(filter) } } #[cfg(feature = "sync")] -impl crate::sync::Collection where T: DeserializeOwned { +impl crate::sync::Collection { /// Finds a single document in the collection matching `filter`. /// /// [`run`](Find::run) will return `Result>`. @@ -165,8 +166,7 @@ action_impl! { } action_impl! { - impl<'a, T> Action for Find<'a, T, Single, ImplicitSession> - where T: DeserializeOwned + impl<'a, T: DeserializeOwned> Action for Find<'a, T, Single, ImplicitSession> { type Future = FindOneFuture; @@ -180,7 +180,7 @@ action_impl! { } } -action_impl_2! { +action_impl! { impl<'a, T: DeserializeOwned + Send> Action for Find<'a, T, Single, ExplicitSession<'a>> { type Future = FindOneSessionFuture; @@ -197,4 +197,4 @@ action_impl_2! { stream.next().await.transpose() } } -} \ No newline at end of file +} From 10d39c131df4d2362fb1b43ae9731b3c718ba09d Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 4 Mar 2024 11:50:40 -0500 Subject: [PATCH 10/39] update find_one callers --- src/action/find.rs | 10 ++--- src/client/csfle/client_encryption.rs | 4 +- src/client/session/test.rs | 2 +- src/client/session/test/causal_consistency.rs | 23 ++++------ src/coll.rs | 45 ------------------- src/coll/options.rs | 2 +- src/concern/test.rs | 9 ++-- src/gridfs.rs | 10 ++++- src/gridfs/download.rs | 11 ++--- src/gridfs/upload.rs | 10 ++--- .../server_selection/test/in_window.rs | 2 +- src/sync/coll.rs | 33 -------------- src/sync/test.rs | 4 +- src/test/atlas_connectivity.rs | 2 +- src/test/auth_aws.rs | 22 ++++----- src/test/client.rs | 6 +-- src/test/coll.rs | 30 ++++++------- src/test/csfle.rs | 24 ++++------ src/test/spec/gridfs.rs | 2 +- src/test/spec/oidc.rs | 2 +- src/test/spec/retryable_reads.rs | 15 ++++--- src/test/spec/sessions.rs | 3 +- src/test/spec/transactions.rs | 12 ++--- src/test/spec/unified_runner/operation.rs | 4 +- src/test/spec/v2_runner/operation.rs | 15 +++---- 25 files changed, 104 insertions(+), 198 deletions(-) diff --git a/src/action/find.rs b/src/action/find.rs index 9cc3caeb4..4a4f6c322 100644 --- a/src/action/find.rs +++ b/src/action/find.rs @@ -34,11 +34,11 @@ impl Collection { } } -impl Collection { +impl Collection { /// Finds a single document in the collection matching `filter`. /// /// `await` will return `Result>`. - pub fn find_one_2(&self, filter: Document) -> Find<'_, T, Single> { + pub fn find_one(&self, filter: Document) -> Find<'_, T, Single> { Find { coll: self, filter, @@ -61,12 +61,12 @@ impl crate::sync::Collection { } #[cfg(feature = "sync")] -impl crate::sync::Collection { +impl crate::sync::Collection { /// Finds a single document in the collection matching `filter`. /// /// [`run`](Find::run) will return `Result>`. - pub fn find_one_2(&self, filter: Document) -> Find<'_, T, Single> { - self.async_collection.find_one_2(filter) + pub fn find_one(&self, filter: Document) -> Find<'_, T, Single> { + self.async_collection.find_one(filter) } } diff --git a/src/client/csfle/client_encryption.rs b/src/client/csfle/client_encryption.rs index cbbd33128..d36e38585 100644 --- a/src/client/csfle/client_encryption.rs +++ b/src/client/csfle/client_encryption.rs @@ -99,7 +99,7 @@ impl ClientEncryption { /// Finds a single key document with the given UUID (BSON binary subtype 0x04). /// Returns the result of the internal find() operation on the key vault collection. pub async fn get_key(&self, id: &Binary) -> Result> { - self.key_vault.find_one(doc! { "_id": id }, None).await + self.key_vault.find_one(doc! { "_id": id }).await } /// Finds all documents in the key vault collection. @@ -160,7 +160,7 @@ impl ClientEncryption { key_alt_name: impl AsRef, ) -> Result> { self.key_vault - .find_one(doc! { "keyAltNames": key_alt_name.as_ref() }, None) + .find_one(doc! { "keyAltNames": key_alt_name.as_ref() }) .await } diff --git a/src/client/session/test.rs b/src/client/session/test.rs index 358b5b66e..7ba0b234d 100644 --- a/src/client/session/test.rs +++ b/src/client/session/test.rs @@ -150,7 +150,7 @@ macro_rules! for_each_op { .await; $test_func( "find", - collection_op!($test_name, coll, coll.find_one(doc! { "x": 1 }, None)), + collection_op!($test_name, coll, coll.find_one(doc! { "x": 1 })), ) .await; $test_func( diff --git a/src/client/session/test/causal_consistency.rs b/src/client/session/test/causal_consistency.rs index 89e9b28cf..48d7e901b 100644 --- a/src/client/session/test/causal_consistency.rs +++ b/src/client/session/test/causal_consistency.rs @@ -53,7 +53,8 @@ fn all_session_ops() -> impl Iterator { })); ops.push(op!("find", true, |coll, session| coll - .find_one_with_session(doc! { "x": 1 }, None, session))); + .find_one(doc! { "x": 1 }) + .session(session))); ops.push(op!("find", true, |coll, session| coll .find(doc! { "x": 1 }) @@ -240,9 +241,7 @@ async fn read_includes_after_cluster_time() { for op in all_session_ops().filter(|o| o.is_read) { let command_name = op.name; let mut session = client.start_session().await.unwrap(); - coll.find_one_with_session(None, None, &mut session) - .await - .unwrap(); + coll.find_one(doc! {}).session(&mut session).await.unwrap(); let op_time = session.operation_time().unwrap(); op.execute(coll.clone(), &mut session).await.unwrap(); @@ -288,9 +287,7 @@ async fn find_after_write_includes_after_cluster_time() { .unwrap(); op.execute(coll.clone(), &mut session).await.unwrap(); let op_time = session.operation_time().unwrap(); - coll.find_one_with_session(None, None, &mut session) - .await - .unwrap(); + coll.find_one(doc! {}).session(&mut session).await.unwrap(); let command_started = client.get_command_started_events(&["find"]).pop().unwrap(); assert_eq!( @@ -402,9 +399,7 @@ async fn omit_default_read_concern_level() { .causal_consistency(true) .await .unwrap(); - coll.find_one_with_session(None, None, &mut session) - .await - .unwrap(); + coll.find_one(doc! {}).session(&mut session).await.unwrap(); let op_time = session.operation_time().unwrap(); op.execute(coll.clone(), &mut session).await.unwrap(); @@ -449,9 +444,7 @@ async fn test_causal_consistency_read_concern_merge() { for op in all_session_ops().filter(|o| o.is_read) { let command_name = op.name; - coll.find_one_with_session(None, None, &mut session) - .await - .unwrap(); + coll.find_one(doc! {}).session(&mut session).await.unwrap(); let op_time = session.operation_time().unwrap(); op.execute(coll.clone(), &mut session).await.unwrap(); @@ -482,7 +475,7 @@ async fn omit_cluster_time_standalone() { .database("causal_consistency_11") .collection::("causal_consistency_11"); - coll.find_one(None, None).await.unwrap(); + coll.find_one(doc! {}).await.unwrap(); let (started, _) = client.get_successful_command_execution("find"); started.command.get_document("$clusterTime").unwrap_err(); @@ -501,7 +494,7 @@ async fn cluster_time_sent_in_commands() { .database("causal_consistency_12") .collection::("causal_consistency_12"); - coll.find_one(None, None).await.unwrap(); + coll.find_one(doc! {}).await.unwrap(); let (started, _) = client.get_successful_command_execution("find"); started.command.get_document("$clusterTime").unwrap(); diff --git a/src/coll.rs b/src/coll.rs index 7b6926de8..eac63cf61 100644 --- a/src/coll.rs +++ b/src/coll.rs @@ -3,7 +3,6 @@ pub mod options; use std::{borrow::Borrow, collections::HashSet, fmt, fmt::Debug, str::FromStr, sync::Arc}; -use futures_util::stream::StreamExt; use serde::{ de::{DeserializeOwned, Error as DeError}, Deserialize, @@ -228,50 +227,6 @@ where } } -impl Collection -where - T: DeserializeOwned + Unpin + Send + Sync, -{ - /// Finds a single document in the collection matching `filter`. - pub async fn find_one( - &self, - filter: impl Into>, - options: impl Into>, - ) -> Result> { - let mut options = options.into(); - resolve_options!(self, options, [read_concern, selection_criteria]); - - let options: FindOptions = options.map(Into::into).unwrap_or_else(Default::default); - let mut cursor = self - .find(filter.into().unwrap_or_default()) - .with_options(options) - .await?; - cursor.next().await.transpose() - } - - /// Finds a single document in the collection matching `filter` using the provided - /// `ClientSession`. - pub async fn find_one_with_session( - &self, - filter: impl Into>, - options: impl Into>, - session: &mut ClientSession, - ) -> Result> { - let mut options = options.into(); - resolve_read_concern_with_session!(self, options, Some(&mut *session))?; - resolve_selection_criteria_with_session!(self, options, Some(&mut *session))?; - - let options: FindOptions = options.map(Into::into).unwrap_or_else(Default::default); - let mut cursor = self - .find(filter.into().unwrap_or_default()) - .with_options(options) - .session(&mut *session) - .await?; - let mut cursor = cursor.stream(session); - cursor.next().await.transpose() - } -} - impl Collection where T: DeserializeOwned + Send + Sync, diff --git a/src/coll/options.rs b/src/coll/options.rs index 9ec535dda..4d2a75458 100644 --- a/src/coll/options.rs +++ b/src/coll/options.rs @@ -930,7 +930,7 @@ where #[serde(rename_all = "camelCase")] #[builder(field_defaults(default, setter(into)))] #[non_exhaustive] -pub struct FindOneOptions { +pub(crate) struct FindOneOptions { /// If true, partial results will be returned from a mongos rather than an error being /// returned if one or more shards is down. pub allow_partial_results: Option, diff --git a/src/concern/test.rs b/src/concern/test.rs index bbad87f9e..b8adc7033 100644 --- a/src/concern/test.rs +++ b/src/concern/test.rs @@ -8,7 +8,6 @@ use crate::{ FindOneAndDeleteOptions, FindOneAndReplaceOptions, FindOneAndUpdateOptions, - FindOneOptions, InsertManyOptions, InsertOneOptions, ReadConcern, @@ -158,17 +157,15 @@ async fn snapshot_read_concern() { .read_concern(ReadConcern::snapshot()) .build(); session.start_transaction(options).await.unwrap(); - let result = coll.find_one_with_session(None, None, &mut session).await; + let result = coll.find_one(doc! {}).session(&mut session).await; assert!(result.is_ok()); assert_event_contains_read_concern(&client).await; } if client.server_version_lt(4, 9) { - let options = FindOneOptions::builder() - .read_concern(ReadConcern::snapshot()) - .build(); let error = coll - .find_one(None, options) + .find_one(doc! {}) + .read_concern(ReadConcern::snapshot()) .await .expect_err("non-transaction find one with snapshot read concern should fail"); // ensure that an error from the server is returned diff --git a/src/gridfs.rs b/src/gridfs.rs index 139e05bbe..40a4191a7 100644 --- a/src/gridfs.rs +++ b/src/gridfs.rs @@ -244,8 +244,14 @@ impl GridFsBucket { filter: Document, options: impl Into>, ) -> Result> { - let find_options = options.into().map(FindOneOptions::from); - self.files().find_one(filter, find_options).await + let find_options = options + .into() + .map(FindOneOptions::from) + .map(FindOptions::from); + self.files() + .find_one(filter) + .with_options(find_options) + .await } /// Renames the file with the given 'id' to the provided `new_filename`. This method returns an diff --git a/src/gridfs/download.rs b/src/gridfs/download.rs index 0aa46f689..2cbfd2c2a 100644 --- a/src/gridfs/download.rs +++ b/src/gridfs/download.rs @@ -14,7 +14,6 @@ use super::{options::GridFsDownloadByNameOptions, Chunk, FilesCollectionDocument use crate::{ bson::{doc, Bson}, error::{ErrorKind, GridFsErrorKind, GridFsFileIdentifier, Result}, - options::FindOneOptions, Collection, Cursor, }; @@ -42,21 +41,19 @@ impl GridFsBucket { } else { (-1, -revision - 1) }; - let options = FindOneOptions::builder() - .sort(doc! { "uploadDate": sort }) - .skip(skip as u64) - .build(); match self .files() - .find_one(doc! { "filename": filename }, options) + .find_one(doc! { "filename": filename }) + .sort(doc! { "uploadDate": sort }) + .skip(skip as u64) .await? { Some(fcd) => Ok(fcd), None => { if self .files() - .find_one(doc! { "filename": filename }, None) + .find_one(doc! { "filename": filename }) .await? .is_some() { diff --git a/src/gridfs/upload.rs b/src/gridfs/upload.rs index 8cceaec8c..2adfe2cda 100644 --- a/src/gridfs/upload.rs +++ b/src/gridfs/upload.rs @@ -19,7 +19,7 @@ use crate::{ client::AsyncDropToken, error::{Error, ErrorKind, GridFsErrorKind, Result}, index::IndexModel, - options::{FindOneOptions, ReadPreference, SelectionCriteria}, + options::{ReadPreference, SelectionCriteria}, Collection, }; @@ -153,14 +153,12 @@ impl GridFsBucket { async fn create_indexes(&self) -> Result<()> { if !self.inner.created_indexes.load(Ordering::SeqCst) { - let find_options = FindOneOptions::builder() - .selection_criteria(SelectionCriteria::ReadPreference(ReadPreference::Primary)) - .projection(doc! { "_id": 1 }) - .build(); if self .files() .clone_with_type::() - .find_one(None, find_options) + .find_one(doc! {}) + .selection_criteria(SelectionCriteria::ReadPreference(ReadPreference::Primary)) + .projection(doc! { "_id": 1 }) .await? .is_none() { diff --git a/src/sdam/description/topology/server_selection/test/in_window.rs b/src/sdam/description/topology/server_selection/test/in_window.rs index de025d8a8..c278b45be 100644 --- a/src/sdam/description/topology/server_selection/test/in_window.rs +++ b/src/sdam/description/topology/server_selection/test/in_window.rs @@ -177,7 +177,7 @@ async fn load_balancing_test() { .collection::("load_balancing_test"); handles.push(runtime::spawn(async move { for _ in 0..iterations { - collection.find_one(None, None).await?; + collection.find_one(doc! {}).await?; } Ok(()) })) diff --git a/src/sync/coll.rs b/src/sync/coll.rs index f803d2e9a..c580781c5 100644 --- a/src/sync/coll.rs +++ b/src/sync/coll.rs @@ -10,7 +10,6 @@ use crate::{ FindOneAndDeleteOptions, FindOneAndReplaceOptions, FindOneAndUpdateOptions, - FindOneOptions, InsertManyOptions, InsertOneOptions, ReadConcern, @@ -112,38 +111,6 @@ where } } -impl Collection -where - T: DeserializeOwned + Unpin + Send + Sync, -{ - /// Finds a single document in the collection matching `filter`. - pub fn find_one( - &self, - filter: impl Into>, - options: impl Into>, - ) -> Result> { - crate::sync::TOKIO_RUNTIME.block_on( - self.async_collection - .find_one(filter.into(), options.into()), - ) - } - - /// Finds a single document in the collection matching `filter` using the provided - /// `ClientSession`. - pub fn find_one_with_session( - &self, - filter: impl Into>, - options: impl Into>, - session: &mut ClientSession, - ) -> Result> { - crate::sync::TOKIO_RUNTIME.block_on(self.async_collection.find_one_with_session( - filter.into(), - options.into(), - &mut session.async_client_session, - )) - } -} - impl Collection where T: DeserializeOwned + Send + Sync, diff --git a/src/sync/test.rs b/src/sync/test.rs index da202a57a..88a75887a 100644 --- a/src/sync/test.rs +++ b/src/sync/test.rs @@ -325,7 +325,7 @@ fn collection_generic_bounds() { let coll: Collection = client .database(function_name!()) .collection(function_name!()); - let _result: Result> = coll.find_one(None, None); + let _result: Result> = coll.find_one(doc! {}).run(); #[derive(Serialize)] struct Bar; @@ -430,7 +430,7 @@ fn mixed_sync_and_async() -> Result<()> { async_client .database(DB_NAME) .collection::(COLL_NAME) - .find_one(doc! {}, None) + .find_one(doc! {}) .await })? .unwrap(); diff --git a/src/test/atlas_connectivity.rs b/src/test/atlas_connectivity.rs index 01cbca813..6da943ffa 100644 --- a/src/test/atlas_connectivity.rs +++ b/src/test/atlas_connectivity.rs @@ -34,7 +34,7 @@ async fn run_test(uri_env_var: &str, resolver_config: Option) { .expect("hello should succeed"); let coll = db.collection::("test"); - coll.find_one(None, None) + coll.find_one(doc! {}) .await .expect("findOne should succeed"); } diff --git a/src/test/auth_aws.rs b/src/test/auth_aws.rs index 5363718a8..0bd75b82e 100644 --- a/src/test/auth_aws.rs +++ b/src/test/auth_aws.rs @@ -1,5 +1,7 @@ use std::env::{remove_var, set_var, var}; +use bson::doc; + use crate::{bson::Document, client::auth::aws::test_utils::*, test::DEFAULT_URI, Client}; use super::TestClient; @@ -9,7 +11,7 @@ async fn auth_aws() { let client = TestClient::new().await; let coll = client.database("aws").collection::("somecoll"); - coll.find_one(None, None).await.unwrap(); + coll.find_one(doc! {}).await.unwrap(); } // The TestClient performs operations upon creation that trigger authentication, so the credential @@ -29,7 +31,7 @@ async fn credential_caching() { let client = get_client().await; let coll = client.database("aws").collection::("somecoll"); - coll.find_one(None, None).await.unwrap(); + coll.find_one(doc! {}).await.unwrap(); assert!(cached_credential().await.is_some()); let now = bson::DateTime::now(); @@ -37,7 +39,7 @@ async fn credential_caching() { let client = get_client().await; let coll = client.database("aws").collection::("somecoll"); - coll.find_one(None, None).await.unwrap(); + coll.find_one(doc! {}).await.unwrap(); assert!(cached_credential().await.is_some()); assert!(cached_expiration().await > now); @@ -45,7 +47,7 @@ async fn credential_caching() { let client = get_client().await; let coll = client.database("aws").collection::("somecoll"); - match coll.find_one(None, None).await { + match coll.find_one(doc! {}).await { Ok(_) => panic!( "find one should have failed with authentication error due to poisoned cached \ credential" @@ -54,7 +56,7 @@ async fn credential_caching() { } assert!(cached_credential().await.is_none()); - coll.find_one(None, None).await.unwrap(); + coll.find_one(doc! {}).await.unwrap(); assert!(cached_credential().await.is_some()); } @@ -69,7 +71,7 @@ async fn credential_caching_environment_vars() { let client = get_client().await; let coll = client.database("aws").collection::("somecoll"); - coll.find_one(None, None).await.unwrap(); + coll.find_one(doc! {}).await.unwrap(); assert!(cached_credential().await.is_some()); set_var("AWS_ACCESS_KEY_ID", cached_access_key_id().await); @@ -81,7 +83,7 @@ async fn credential_caching_environment_vars() { let client = get_client().await; let coll = client.database("aws").collection::("somecoll"); - coll.find_one(None, None).await.unwrap(); + coll.find_one(doc! {}).await.unwrap(); assert!(cached_credential().await.is_none()); set_var("AWS_ACCESS_KEY_ID", "bad"); @@ -90,7 +92,7 @@ async fn credential_caching_environment_vars() { let client = get_client().await; let coll = client.database("aws").collection::("somecoll"); - match coll.find_one(None, None).await { + match coll.find_one(doc! {}).await { Ok(_) => panic!( "find one should have failed with authentication error due to poisoned environment \ variables" @@ -105,7 +107,7 @@ async fn credential_caching_environment_vars() { let client = get_client().await; let coll = client.database("aws").collection::("somecoll"); - coll.find_one(None, None).await.unwrap(); + coll.find_one(doc! {}).await.unwrap(); assert!(cached_credential().await.is_some()); set_var("AWS_ACCESS_KEY_ID", "bad"); @@ -114,7 +116,7 @@ async fn credential_caching_environment_vars() { let client = get_client().await; let coll = client.database("aws").collection::("somecoll"); - coll.find_one(None, None).await.unwrap(); + coll.find_one(doc! {}).await.unwrap(); remove_var("AWS_ACCESS_KEY_ID"); remove_var("AWS_SECRET_ACCESS_KEY"); diff --git a/src/test/client.rs b/src/test/client.rs index dbf634170..6a863e4c7 100644 --- a/src/test/client.rs +++ b/src/test/client.rs @@ -603,7 +603,7 @@ async fn x509_auth() { client .database(function_name!()) .collection::(function_name!()) - .find_one(None, None) + .find_one(doc! {}) .await .unwrap(); } @@ -632,7 +632,7 @@ async fn plain_auth() { let client = Client::with_options(options).unwrap(); let coll = client.database("ldap").collection("test"); - let doc = coll.find_one(None, None).await.unwrap().unwrap(); + let doc = coll.find_one(doc! {}).await.unwrap().unwrap(); #[derive(Debug, Deserialize, PartialEq)] struct TestDocument { @@ -915,7 +915,7 @@ async fn find_one_and_delete_serde_consistency() { }; coll.insert_one(&doc, None).await.unwrap(); - let rec: Foo = coll.find_one(doc! {}, None).await.unwrap().unwrap(); + let rec: Foo = coll.find_one(doc! {}).await.unwrap().unwrap(); assert_eq!(doc.problematic, rec.problematic); let rec: Foo = coll .find_one_and_delete(doc! {}, None) diff --git a/src/test/coll.rs b/src/test/coll.rs index 63a757dd5..4f86dd35a 100644 --- a/src/test/coll.rs +++ b/src/test/coll.rs @@ -21,7 +21,6 @@ use crate::{ DeleteOptions, DropCollectionOptions, FindOneAndDeleteOptions, - FindOneOptions, FindOptions, Hint, IndexOptions, @@ -683,18 +682,15 @@ async fn no_read_preference_to_standalone() { return; } - let options = FindOneOptions::builder() + client + .database(function_name!()) + .collection::(function_name!()) + .find_one(doc! {}) .selection_criteria(SelectionCriteria::ReadPreference( ReadPreference::SecondaryPreferred { options: Default::default(), }, )) - .build(); - - client - .database(function_name!()) - .collection::(function_name!()) - .find_one(None, options) .await .unwrap(); @@ -743,7 +739,7 @@ where { coll.insert_one(insert_data.clone(), None).await.unwrap(); let result = coll - .find_one(to_document(&insert_data).unwrap(), None) + .find_one(to_document(&insert_data).unwrap()) .await .unwrap(); match result { @@ -810,7 +806,7 @@ async fn typed_find_one_and_replace() { .unwrap(); assert_eq!(result, insert_data); - let result = coll.find_one(doc! { "x": 2 }, None).await.unwrap().unwrap(); + let result = coll.find_one(doc! { "x": 2 }).await.unwrap().unwrap(); assert_eq!(result, replacement); } @@ -835,7 +831,7 @@ async fn typed_replace_one() { .await .unwrap(); - let result = coll.find_one(doc! { "x": 2 }, None).await.unwrap().unwrap(); + let result = coll.find_one(doc! { "x": 2 }).await.unwrap().unwrap(); assert_eq!(result, replacement); } @@ -917,7 +913,7 @@ async fn collection_options_inherited() { coll.find(doc! {}).await.unwrap(); assert_options_inherited(&client, "find").await; - coll.find_one(None, None).await.unwrap(); + coll.find_one(doc! {}).await.unwrap(); assert_options_inherited(&client, "find").await; coll.count_documents(doc! {}).await.unwrap(); @@ -957,7 +953,7 @@ async fn collection_generic_bounds() { let coll: Collection = client .database(function_name!()) .collection(function_name!()); - let _result: Result> = coll.find_one(None, None).await; + let _result: Result> = coll.find_one(doc! {}).await; #[derive(Serialize)] struct Bar; @@ -1174,7 +1170,7 @@ async fn configure_human_readable_serialization() { // instead. let document_collection = non_human_readable_collection.clone_with_type::(); let doc = document_collection - .find_one(doc! { "id": 0 }, None) + .find_one(doc! { "id": 0 }) .await .unwrap() .unwrap(); @@ -1193,7 +1189,7 @@ async fn configure_human_readable_serialization() { .unwrap(); let doc = document_collection - .find_one(doc! { "id": 1 }, None) + .find_one(doc! { "id": 1 }) .await .unwrap() .unwrap(); @@ -1222,7 +1218,7 @@ async fn configure_human_readable_serialization() { // Proper deserialization to a string demonstrates that the data was correctly serialized as a // string. human_readable_collection - .find_one(doc! { "id": 0 }, None) + .find_one(doc! { "id": 0 }) .await .unwrap(); @@ -1239,7 +1235,7 @@ async fn configure_human_readable_serialization() { .unwrap(); human_readable_collection - .find_one(doc! { "id": 1 }, None) + .find_one(doc! { "id": 1 }) .await .unwrap(); } diff --git a/src/test/csfle.rs b/src/test/csfle.rs index 8bc4bf710..22261ae03 100644 --- a/src/test/csfle.rs +++ b/src/test/csfle.rs @@ -212,7 +212,7 @@ async fn custom_key_material() -> Result<()> { .key_material(key) .await?; let mut key_doc = datakeys - .find_one(doc! { "_id": id.clone() }, None) + .find_one(doc! { "_id": id.clone() }) .await? .unwrap(); datakeys.delete_one(doc! { "_id": id}).await?; @@ -383,7 +383,7 @@ async fn data_key_double_encryption() -> Result<()> { None, ) .await?; - let found = coll.find_one(doc! { "_id": provider.name() }, None).await?; + let found = coll.find_one(doc! { "_id": provider.name() }).await?; assert_eq!( found.as_ref().and_then(|doc| doc.get("value")), Some(&Bson::String(format!("hello {}", provider.name()))), @@ -858,7 +858,7 @@ async fn run_corpus_test(local_schema: bool) -> Result<()> { .collection::("coll"); let id = coll.insert_one(corpus_copied, None).await?.inserted_id; let corpus_decrypted = coll - .find_one(doc! { "_id": id.clone() }, None) + .find_one(doc! { "_id": id.clone() }) .await? .expect("document lookup failed"); assert_eq!(corpus, corpus_decrypted); @@ -868,7 +868,7 @@ async fn run_corpus_test(local_schema: bool) -> Result<()> { let corpus_encrypted_actual = client .database("db") .collection::("coll") - .find_one(doc! { "_id": id }, None) + .find_one(doc! { "_id": id }) .await? .expect("encrypted document lookup failed"); for (name, field) in &corpus_encrypted_expected { @@ -1592,7 +1592,7 @@ impl DeadlockTestCase { let found = client_encrypted .database("db") .collection::("coll") - .find_one(doc! { "_id": 0 }, None) + .find_one(doc! { "_id": 0 }) .await?; assert_eq!(found, Some(doc! { "_id": 0, "encrypted": "string0" })); @@ -3442,12 +3442,9 @@ async fn fle2_example() -> Result<()> { // Encrypt a find. let found = encrypted_coll - .find_one( - doc! { - "encryptedIndexed": "indexedValue", - }, - None, - ) + .find_one(doc! { + "encryptedIndexed": "indexedValue", + }) .await? .unwrap(); assert_eq!("indexedValue", found.get_str("encryptedIndexed")?); @@ -3457,10 +3454,7 @@ async fn fle2_example() -> Result<()> { let unencrypted_coll = test_client .database("docsExamples") .collection::("encrypted"); - let found = unencrypted_coll - .find_one(doc! { "_id": 1 }, None) - .await? - .unwrap(); + let found = unencrypted_coll.find_one(doc! { "_id": 1 }).await?.unwrap(); assert_eq!( Some(ElementType::Binary), found.get("encryptedIndexed").map(Bson::element_type) diff --git a/src/test/spec/gridfs.rs b/src/test/spec/gridfs.rs index bf55830a6..f08aada17 100644 --- a/src/test/spec/gridfs.rs +++ b/src/test/spec/gridfs.rs @@ -319,7 +319,7 @@ async fn assert_no_chunks_written(bucket: &GridFsBucket, id: &Bson) { assert!(bucket .chunks() .clone_with_type::() - .find_one(doc! { "files_id": id }, None) + .find_one(doc! { "files_id": id }) .await .unwrap() .is_none()); diff --git a/src/test/spec/oidc.rs b/src/test/spec/oidc.rs index bfb55a6f8..90f526d44 100644 --- a/src/test/spec/oidc.rs +++ b/src/test/spec/oidc.rs @@ -96,7 +96,7 @@ async fn human_single_principal_implicit_username() -> anyhow::Result<()> { client .database("test") .collection::("test") - .find_one(None, None) + .find_one(doc! {}) .await?; assert_eq!(1, *(*call_count).lock().unwrap()); Ok(()) diff --git a/src/test/spec/retryable_reads.rs b/src/test/spec/retryable_reads.rs index ce8b60d92..b6e14a5ae 100644 --- a/src/test/spec/retryable_reads.rs +++ b/src/test/spec/retryable_reads.rs @@ -1,4 +1,4 @@ -use std::{sync::Arc, time::Duration}; +use std::{future::IntoFuture, sync::Arc, time::Duration}; use bson::doc; @@ -60,10 +60,13 @@ async fn retry_releases_connection() { let failpoint = FailPoint::fail_command(&["find"], FailPointMode::Times(1), Some(options)); let _fp_guard = client.enable_failpoint(failpoint, None).await.unwrap(); - runtime::timeout(Duration::from_secs(1), collection.find_one(doc! {}, None)) - .await - .expect("operation should not time out") - .expect("find should succeed"); + runtime::timeout( + Duration::from_secs(1), + collection.find_one(doc! {}).into_future(), + ) + .await + .expect("operation should not time out") + .expect("find should succeed"); } /// Prose test from retryable reads spec verifying that PoolClearedErrors are retried. @@ -110,7 +113,7 @@ async fn retry_read_pool_cleared() { let mut tasks: Vec> = Vec::new(); for _ in 0..2 { let coll = collection.clone(); - let task = runtime::spawn(async move { coll.find_one(doc! {}, None).await }); + let task = runtime::spawn(async move { coll.find_one(doc! {}).await }); tasks.push(task); } diff --git a/src/test/spec/sessions.rs b/src/test/spec/sessions.rs index 20bac8ec2..53f03f65f 100644 --- a/src/test/spec/sessions.rs +++ b/src/test/spec/sessions.rs @@ -285,7 +285,8 @@ async fn sessions_not_supported_explicit_session_error() { let coll = client.database(name).collection(name); let error = coll - .find_one_with_session(doc! {}, None, &mut session) + .find_one(doc! {}) + .session(&mut session) .await .unwrap_err(); assert!(matches!(*error.kind, ErrorKind::SessionsNotSupported)); diff --git a/src/test/spec/transactions.rs b/src/test/spec/transactions.rs index 5fd40d39f..c7336d8a7 100644 --- a/src/test/spec/transactions.rs +++ b/src/test/spec/transactions.rs @@ -86,7 +86,7 @@ async fn deserialize_recovery_token() { .collection(function_name!()); session.start_transaction(None).await.unwrap(); assert!(session.transaction.recovery_token.is_none()); - let result = coll.find_one_with_session(None, None, &mut session).await; + let result = coll.find_one(doc! {}).session(&mut session).await; assert!(result.is_err()); // Assert that the deserialization failed. // Nevertheless, the recovery token should have been retrieved from the ok: 1 response. @@ -111,7 +111,7 @@ async fn convenient_api_custom_error() { coll, |session, coll| { async move { - coll.find_one_with_session(None, None, session).await?; + coll.find_one(doc! {}).session(session).await?; Err(Error::custom(MyErr)) } .boxed() @@ -144,7 +144,7 @@ async fn convenient_api_returned_value() { coll, |session, coll| { async move { - coll.find_one_with_session(None, None, session).await?; + coll.find_one(doc! {}).session(session).await?; Ok(42) } .boxed() @@ -175,7 +175,7 @@ async fn convenient_api_retry_timeout_callback() { coll, |session, coll| { async move { - coll.find_one_with_session(None, None, session).await?; + coll.find_one(doc! {}).session(session).await?; let mut err = Error::custom(42); err.add_label(TRANSIENT_TRANSACTION_ERROR); Err(err) @@ -232,7 +232,7 @@ async fn convenient_api_retry_timeout_commit_unknown() { coll, |session, coll| { async move { - coll.find_one_with_session(None, None, session).await?; + coll.find_one(doc! {}).session(session).await?; Ok(()) } .boxed() @@ -286,7 +286,7 @@ async fn convenient_api_retry_timeout_commit_transient() { coll, |session, coll| { async move { - coll.find_one_with_session(None, None, session).await?; + coll.find_one(doc! {}).session(session).await?; Ok(()) } .boxed() diff --git a/src/test/spec/unified_runner/operation.rs b/src/test/spec/unified_runner/operation.rs index c1919fda6..5d9a8a252 100644 --- a/src/test/spec/unified_runner/operation.rs +++ b/src/test/spec/unified_runner/operation.rs @@ -1087,8 +1087,10 @@ impl TestOperation for FindOne { ) -> BoxFuture<'a, Result>> { async move { let collection = test_runner.get_collection(id).await; + let options: FindOptions = self.options.clone().into(); let result = collection - .find_one(self.filter.clone(), self.options.clone()) + .find_one(self.filter.clone().unwrap_or_default()) + .with_options(options) .await?; match result { Some(result) => Ok(Some(Bson::from(result).into())), diff --git a/src/test/spec/v2_runner/operation.rs b/src/test/spec/v2_runner/operation.rs index 06fd16b65..fff052554 100644 --- a/src/test/spec/v2_runner/operation.rs +++ b/src/test/spec/v2_runner/operation.rs @@ -684,17 +684,12 @@ impl TestOperation for FindOne { session: Option<&'a mut ClientSession>, ) -> BoxFuture<'a, Result>> { async move { + let action = collection + .find_one(self.filter.clone().unwrap_or_default()) + .with_options(self.options.clone().map(FindOptions::from)); let result = match session { - Some(session) => { - collection - .find_one_with_session(self.filter.clone(), self.options.clone(), session) - .await? - } - None => { - collection - .find_one(self.filter.clone(), self.options.clone()) - .await? - } + Some(session) => action.session(session).await?, + None => action.await?, }; match result { Some(result) => Ok(Some(Bson::from(result))), From eacd2b4db122d50d2cdcf6606862a59a3211de25 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 4 Mar 2024 13:35:08 -0500 Subject: [PATCH 11/39] convert find_one_and_delete --- src/action.rs | 1 + src/action/find_and_modify.rs | 98 ++++++++++++++++++++++++ src/action/update.rs | 1 - src/coll.rs | 6 +- src/operation.rs | 2 +- src/operation/find_and_modify.rs | 48 +++++++----- src/operation/find_and_modify/options.rs | 8 +- src/operation/update.rs | 55 ++++++------- 8 files changed, 158 insertions(+), 61 deletions(-) create mode 100644 src/action/find_and_modify.rs diff --git a/src/action.rs b/src/action.rs index 20f67c980..0e7e96f41 100644 --- a/src/action.rs +++ b/src/action.rs @@ -11,6 +11,7 @@ mod distinct; mod drop; mod drop_index; mod find; +mod find_and_modify; mod list_collections; mod list_databases; mod list_indexes; diff --git a/src/action/find_and_modify.rs b/src/action/find_and_modify.rs new file mode 100644 index 000000000..074fc18bf --- /dev/null +++ b/src/action/find_and_modify.rs @@ -0,0 +1,98 @@ +use std::{marker::PhantomData, time::Duration}; + +use bson::{Bson, Document}; +use serde::de::DeserializeOwned; + +use crate::{coll::options::{FindOneAndDeleteOptions, Hint}, collation::Collation, operation::find_and_modify::options::{FindAndModifyOptions, Modification}, options::WriteConcern, ClientSession, Collection}; +use crate::error::Result; +use crate::operation::FindAndModify as Op; + +use super::{action_impl, option_setters}; + +impl Collection { + /// Atomically finds up to one document in the collection matching `filter` and deletes it. + /// + /// This operation will retry once upon failure if the connection and encountered error support + /// retryability. See the documentation + /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on + /// retryable writes. + /// + /// `await` will return `Result>`. + pub fn find_one_and_delete_2( + &self, + filter: Document, + ) -> FindAndModify<'_, T, Delete> { + FindAndModify { + coll: self, + filter, + modification: Modification::Delete, + options: None, + session: None, + _mode: PhantomData, + } + } +} + +#[must_use] +pub struct FindAndModify<'a, T, Mode> { + coll: &'a Collection, + filter: Document, + modification: Modification, + options: Option, + session: Option<&'a mut ClientSession>, + _mode: PhantomData, +} + +pub struct Delete; + +impl<'a, T, Mode> FindAndModify<'a, T, Mode> { + fn options(&mut self) -> &mut FindAndModifyOptions { + self.options.get_or_insert_with(::default) + } + + /// Runs the operation using the provided session. + pub fn session( + mut self, + value: impl Into<&'a mut ClientSession>, + ) -> Self { + self.session = Some(value.into()); + self + } +} + +impl<'a, T> FindAndModify<'a, T, Delete> { + /// Set all options. Note that this will replace all previous values set. + pub fn with_options(mut self, value: impl Into>) -> Self { + self.options = value.into().map(FindAndModifyOptions::from); + self + } + + option_setters! { FindOneAndDeleteOptions; + max_time: Duration, + projection: Document, + sort: Document, + write_concern: WriteConcern, + collation: Collation, + hint: Hint, + let_vars: Document, + comment: Bson, + } +} + +action_impl! { + impl<'a, T: DeserializeOwned + Send> Action for FindAndModify<'a, T, Delete> { + type Future = FindAndDeleteFuture; + + async fn execute(mut self) -> Result> { + resolve_write_concern_with_session!(self.coll, self.options, self.session.as_ref())?; + + let op = Op::::with_modification( + self.coll.namespace(), + self.filter, + self.modification, + self.options, + ); + self.coll.client().execute_operation(op, self.session).await + } + } +} \ No newline at end of file diff --git a/src/action/update.rs b/src/action/update.rs index 231b8d1d4..466c5f09c 100644 --- a/src/action/update.rs +++ b/src/action/update.rs @@ -143,7 +143,6 @@ action_impl! { self.update, self.multi, self.options, - self.coll.human_readable_serialization(), ); self.coll.client().execute_operation(op, self.session).await } diff --git a/src/coll.rs b/src/coll.rs index eac63cf61..0ea116b98 100644 --- a/src/coll.rs +++ b/src/coll.rs @@ -221,10 +221,6 @@ where self.client().execute_operation(op, None).await?; Ok(()) } - - pub(crate) fn human_readable_serialization(&self) -> bool { - self.inner.human_readable_serialization - } } impl Collection @@ -635,7 +631,7 @@ where false, options.map(UpdateOptions::from_replace_options), self.inner.human_readable_serialization, - ); + )?; self.client().execute_operation(update, session).await } diff --git a/src/operation.rs b/src/operation.rs index 829958345..3cf389bf4 100644 --- a/src/operation.rs +++ b/src/operation.rs @@ -11,7 +11,7 @@ pub(crate) mod drop_collection; pub(crate) mod drop_database; mod drop_indexes; mod find; -mod find_and_modify; +pub(crate) mod find_and_modify; mod get_more; mod insert; pub(crate) mod list_collections; diff --git a/src/operation/find_and_modify.rs b/src/operation/find_and_modify.rs index 04e6a538d..fc003f568 100644 --- a/src/operation/find_and_modify.rs +++ b/src/operation/find_and_modify.rs @@ -1,6 +1,6 @@ -mod options; +pub(crate) mod options; -use std::fmt::Debug; +use std::{fmt::Debug, marker::PhantomData}; use bson::{from_slice, RawBson}; use serde::{de::DeserializeOwned, Deserialize, Serialize}; @@ -30,16 +30,34 @@ use crate::{ options::WriteConcern, }; -pub(crate) struct FindAndModify<'a, R, T: DeserializeOwned> { +use super::UpdateOrReplace; + +pub(crate) struct FindAndModify { ns: Namespace, query: Document, - modification: Modification<'a, R>, - human_readable_serialization: Option, + modification: Modification, options: Option, - _phantom: std::marker::PhantomData, + _phantom: PhantomData T>, } -impl FindAndModify<'_, (), T> { +impl FindAndModify { + pub(crate) fn with_modification( + ns: Namespace, + query: Document, + modification: Modification, + options: Option, + ) -> Self { + Self { + ns, + query, + modification, + options, + _phantom: PhantomData, + } + } +} + +impl FindAndModify { pub fn with_delete( ns: Namespace, query: Document, @@ -49,7 +67,6 @@ impl FindAndModify<'_, (), T> { ns, query, modification: Modification::Delete, - human_readable_serialization: None, options: options.map(Into::into), _phantom: Default::default(), } @@ -68,33 +85,29 @@ impl FindAndModify<'_, (), T> { ns, query, modification: Modification::Update(update.into()), - human_readable_serialization: None, options: options.map(Into::into), _phantom: Default::default(), }) } -} -impl<'a, R: Serialize, T: DeserializeOwned> FindAndModify<'a, R, T> { - pub fn with_replace( + pub fn with_replace( ns: Namespace, query: Document, - replacement: &'a R, + replacement: &R, options: Option, human_readable_serialization: bool, ) -> Result { Ok(FindAndModify { ns, query, - modification: Modification::Update(replacement.into()), - human_readable_serialization: Some(human_readable_serialization), + modification: Modification::Update(UpdateOrReplace::replacement(replacement, human_readable_serialization)?), options: options.map(Into::into), _phantom: Default::default(), }) } } -impl<'a, R: Serialize, T: DeserializeOwned> OperationWithDefaults for FindAndModify<'a, R, T> { +impl OperationWithDefaults for FindAndModify { type O = Option; type Command = RawDocumentBuf; const NAME: &'static str = "findAndModify"; @@ -120,8 +133,7 @@ impl<'a, R: Serialize, T: DeserializeOwned> OperationWithDefaults for FindAndMod Modification::Delete => ("remove", true.into()), Modification::Update(update_or_replace) => ( "update", - update_or_replace - .to_raw_bson(self.human_readable_serialization.unwrap_or_default())?, + update_or_replace.to_raw_bson()?, ), }; body.append(key, modification); diff --git a/src/operation/find_and_modify/options.rs b/src/operation/find_and_modify/options.rs index fcdb90af0..c65e00f98 100644 --- a/src/operation/find_and_modify/options.rs +++ b/src/operation/find_and_modify/options.rs @@ -19,16 +19,16 @@ use crate::{ }; #[derive(Clone, Debug)] -pub(super) enum Modification<'a, T> { +pub(crate) enum Modification { Delete, - Update(UpdateOrReplace<'a, T>), + Update(UpdateOrReplace), } #[serde_with::skip_serializing_none] -#[derive(Clone, Debug, TypedBuilder, Serialize)] +#[derive(Clone, Debug, TypedBuilder, Serialize, Default)] #[builder(field_defaults(setter(into)))] #[serde(rename_all = "camelCase")] -pub(super) struct FindAndModifyOptions { +pub(crate) struct FindAndModifyOptions { #[builder(default)] pub(crate) sort: Option, diff --git a/src/operation/update.rs b/src/operation/update.rs index 3d5d34fa0..bee3113c5 100644 --- a/src/operation/update.rs +++ b/src/operation/update.rs @@ -16,13 +16,17 @@ use crate::{ }; #[derive(Clone, Debug)] -pub(crate) enum UpdateOrReplace<'a, T = ()> { +pub(crate) enum UpdateOrReplace { UpdateModifications(UpdateModifications), - Replacement(&'a T), + Replacement(RawDocumentBuf), } -impl<'a, T: Serialize> UpdateOrReplace<'a, T> { - pub(crate) fn to_raw_bson(&self, human_readable_serialization: bool) -> Result { +impl UpdateOrReplace { + pub(crate) fn replacement(update: &T, human_readable_serialization: bool) -> Result { + Ok(Self::Replacement(to_raw_document_buf_with_options(update, human_readable_serialization)?)) + } + + pub(crate) fn to_raw_bson(&self) -> Result { match self { Self::UpdateModifications(update_modifications) => match update_modifications { UpdateModifications::Document(document) => { @@ -30,39 +34,30 @@ impl<'a, T: Serialize> UpdateOrReplace<'a, T> { } UpdateModifications::Pipeline(pipeline) => bson_util::to_raw_bson_array(pipeline), }, - Self::Replacement(replacement) => { - let replacement_doc = - to_raw_document_buf_with_options(replacement, human_readable_serialization)?; + Self::Replacement(replacement_doc) => { bson_util::replacement_raw_document_check(&replacement_doc)?; - Ok(replacement_doc.into()) + Ok(replacement_doc.clone().into()) } } } } -impl From for UpdateOrReplace<'_> { +impl From for UpdateOrReplace { fn from(update_modifications: UpdateModifications) -> Self { Self::UpdateModifications(update_modifications) } } -impl<'a, T: Serialize> From<&'a T> for UpdateOrReplace<'a, T> { - fn from(t: &'a T) -> Self { - Self::Replacement(t) - } -} - #[derive(Debug)] -pub(crate) struct Update<'a, T = ()> { +pub(crate) struct Update { ns: Namespace, filter: Document, - update: UpdateOrReplace<'a, T>, + update: UpdateOrReplace, multi: Option, options: Option, - human_readable_serialization: bool, } -impl Update<'_> { +impl Update { #[cfg(test)] fn empty() -> Self { Self::with_update( @@ -71,7 +66,6 @@ impl Update<'_> { UpdateModifications::Document(doc! {}), false, None, - false, ) } @@ -81,7 +75,6 @@ impl Update<'_> { update: UpdateModifications, multi: bool, options: Option, - human_readable_serialization: bool, ) -> Self { Self { ns, @@ -89,32 +82,30 @@ impl Update<'_> { update: update.into(), multi: multi.then_some(true), options, - human_readable_serialization, } } } -impl<'a, T: Serialize> Update<'a, T> { - pub(crate) fn with_replace( +impl Update { + pub(crate) fn with_replace( ns: Namespace, filter: Document, - update: &'a T, + update: &T, multi: bool, options: Option, human_readable_serialization: bool, - ) -> Self { - Self { + ) -> Result { + Ok(Self { ns, filter, - update: update.into(), + update: UpdateOrReplace::replacement(update, human_readable_serialization)?, multi: multi.then_some(true), options, - human_readable_serialization, - } + }) } } -impl<'a, T: Serialize> OperationWithDefaults for Update<'a, T> { +impl OperationWithDefaults for Update { type O = UpdateResult; type Command = RawDocumentBuf; @@ -127,7 +118,7 @@ impl<'a, T: Serialize> OperationWithDefaults for Update<'a, T> { let mut update = rawdoc! { "q": RawDocumentBuf::from_document(&self.filter)?, - "u": self.update.to_raw_bson(self.human_readable_serialization)?, + "u": self.update.to_raw_bson()?, }; if let Some(ref options) = self.options { From 5fb898ad7f30cf007a3d1932be9a13c207c9a93d Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 4 Mar 2024 13:58:17 -0500 Subject: [PATCH 12/39] generify FindAndModify action impl --- action_macro/src/lib.rs | 19 ++++++++++++++----- src/action/find_and_modify.rs | 4 ++-- 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/action_macro/src/lib.rs b/action_macro/src/lib.rs index c0c677cc6..bfa178ebf 100644 --- a/action_macro/src/lib.rs +++ b/action_macro/src/lib.rs @@ -30,13 +30,15 @@ pub fn action_impl(input: proc_macro::TokenStream) -> proc_macro::TokenStream { lifetime, action, future_name, + future_generics, exec_self_mut, exec_output, exec_body, sync_wrap, } = parse_macro_input!(input as ActionImpl); - let mut unbounded_generics = generics.clone(); + let future_generics = future_generics.unwrap_or_else(|| generics.clone()); + let mut unbounded_generics = future_generics.clone(); for lt in unbounded_generics.lifetimes_mut() { lt.bounds.clear(); } @@ -69,9 +71,9 @@ pub fn action_impl(input: proc_macro::TokenStream) -> proc_macro::TokenStream { } } - pub struct #future_name #generics (crate::BoxFuture<#lifetime, #exec_output>); + pub struct #future_name #future_generics (crate::BoxFuture<#lifetime, #exec_output>); - impl #generics std::future::Future for #future_name #unbounded_generics { + impl #future_generics std::future::Future for #future_name #unbounded_generics { type Output = #exec_output; fn poll(mut self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll { @@ -91,7 +93,7 @@ pub fn action_impl(input: proc_macro::TokenStream) -> proc_macro::TokenStream { } // impl Action for ActionType { -// type Future = FutureName; +// type Future = FutureName; // async fn execute([mut] self) -> OutType { } // [SyncWrap] // } @@ -100,6 +102,7 @@ struct ActionImpl { lifetime: Lifetime, action: Type, future_name: Ident, + future_generics: Option, exec_self_mut: Option, exec_output: Type, exec_body: Block, @@ -129,11 +132,16 @@ impl Parse for ActionImpl { let impl_body; braced!(impl_body in input); - // type Future = FutureName; + // type Future = FutureName; impl_body.parse::()?; parse_name(&impl_body, "Future")?; impl_body.parse::()?; let future_name = impl_body.parse()?; + let future_generics = if impl_body.peek(Token![<]) { + Some(impl_body.parse()?) + } else { + None + }; impl_body.parse::()?; // async fn execute([mut] self) -> OutType { } @@ -167,6 +175,7 @@ impl Parse for ActionImpl { lifetime, action, future_name, + future_generics, exec_self_mut, exec_output, exec_body, diff --git a/src/action/find_and_modify.rs b/src/action/find_and_modify.rs index 074fc18bf..adc73d90f 100644 --- a/src/action/find_and_modify.rs +++ b/src/action/find_and_modify.rs @@ -80,8 +80,8 @@ impl<'a, T> FindAndModify<'a, T, Delete> { } action_impl! { - impl<'a, T: DeserializeOwned + Send> Action for FindAndModify<'a, T, Delete> { - type Future = FindAndDeleteFuture; + impl<'a, T: DeserializeOwned + Send, Mode> Action for FindAndModify<'a, T, Mode> { + type Future = FindAndDeleteFuture<'a, T: DeserializeOwned + Send>; async fn execute(mut self) -> Result> { resolve_write_concern_with_session!(self.coll, self.options, self.session.as_ref())?; From 53bacb55c7ffa6a3ccf469e7d3487c948d7b9e92 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 4 Mar 2024 14:07:13 -0500 Subject: [PATCH 13/39] add sync --- src/action.rs | 1 + src/action/find_and_modify.rs | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+) diff --git a/src/action.rs b/src/action.rs index 0e7e96f41..65d83e65c 100644 --- a/src/action.rs +++ b/src/action.rs @@ -34,6 +34,7 @@ pub use distinct::Distinct; pub use drop::{DropCollection, DropDatabase}; pub use drop_index::DropIndex; pub use find::Find; +pub use find_and_modify::FindAndModify; pub use list_collections::ListCollections; pub use list_databases::ListDatabases; pub use list_indexes::ListIndexes; diff --git a/src/action/find_and_modify.rs b/src/action/find_and_modify.rs index adc73d90f..61485e1a8 100644 --- a/src/action/find_and_modify.rs +++ b/src/action/find_and_modify.rs @@ -33,6 +33,25 @@ impl Collection { } } +#[cfg(feature = "sync")] +impl crate::sync::Collection { + /// Atomically finds up to one document in the collection matching `filter` and deletes it. + /// + /// This operation will retry once upon failure if the connection and encountered error support + /// retryability. See the documentation + /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on + /// retryable writes. + /// + /// [`run`](FindAndModify::run) will return `Result>`. + pub fn find_one_and_delete_2( + &self, + filter: Document, + ) -> FindAndModify<'_, T, Delete> { + self.async_collection.find_one_and_delete_2(filter) + } +} + +/// Atomically find up to one document in the collection matching a filter and modify it. Construct with [`Collection::find_one_and_delete`]. #[must_use] pub struct FindAndModify<'a, T, Mode> { coll: &'a Collection, From 599f8a57c21444aeb8f733b017f3d949013f616d Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 4 Mar 2024 14:14:23 -0500 Subject: [PATCH 14/39] update find_one_and_delete callers --- src/action/find_and_modify.rs | 44 +++++++++--------- src/client/session/test.rs | 6 +-- src/client/session/test/causal_consistency.rs | 3 +- src/coll.rs | 46 ------------------- src/concern/test.rs | 45 ++++++++---------- src/operation/find_and_modify.rs | 31 +++---------- src/operation/update.rs | 10 +++- src/sync/coll.rs | 38 --------------- src/test/client.rs | 8 +--- src/test/coll.rs | 13 ++++-- src/test/spec/crud_v1/find_one_and_delete.rs | 3 +- src/test/spec/retryable_writes.rs | 2 +- src/test/spec/sessions.rs | 3 +- src/test/spec/unified_runner/operation.rs | 3 +- src/test/spec/v2_runner/operation.rs | 21 ++------- 15 files changed, 80 insertions(+), 196 deletions(-) diff --git a/src/action/find_and_modify.rs b/src/action/find_and_modify.rs index 61485e1a8..82b5cd65f 100644 --- a/src/action/find_and_modify.rs +++ b/src/action/find_and_modify.rs @@ -3,9 +3,18 @@ use std::{marker::PhantomData, time::Duration}; use bson::{Bson, Document}; use serde::de::DeserializeOwned; -use crate::{coll::options::{FindOneAndDeleteOptions, Hint}, collation::Collation, operation::find_and_modify::options::{FindAndModifyOptions, Modification}, options::WriteConcern, ClientSession, Collection}; -use crate::error::Result; -use crate::operation::FindAndModify as Op; +use crate::{ + coll::options::{FindOneAndDeleteOptions, Hint}, + collation::Collation, + error::Result, + operation::{ + find_and_modify::options::{FindAndModifyOptions, Modification}, + FindAndModify as Op, + }, + options::WriteConcern, + ClientSession, + Collection, +}; use super::{action_impl, option_setters}; @@ -18,10 +27,7 @@ impl Collection { /// retryable writes. /// /// `await` will return `Result>`. - pub fn find_one_and_delete_2( - &self, - filter: Document, - ) -> FindAndModify<'_, T, Delete> { + pub fn find_one_and_delete(&self, filter: Document) -> FindAndModify<'_, T, Delete> { FindAndModify { coll: self, filter, @@ -43,15 +49,13 @@ impl crate::sync::Collection { /// retryable writes. /// /// [`run`](FindAndModify::run) will return `Result>`. - pub fn find_one_and_delete_2( - &self, - filter: Document, - ) -> FindAndModify<'_, T, Delete> { - self.async_collection.find_one_and_delete_2(filter) + pub fn find_one_and_delete(&self, filter: Document) -> FindAndModify<'_, T, Delete> { + self.async_collection.find_one_and_delete(filter) } } -/// Atomically find up to one document in the collection matching a filter and modify it. Construct with [`Collection::find_one_and_delete`]. +/// Atomically find up to one document in the collection matching a filter and modify it. Construct +/// with [`Collection::find_one_and_delete`]. #[must_use] pub struct FindAndModify<'a, T, Mode> { coll: &'a Collection, @@ -66,14 +70,12 @@ pub struct Delete; impl<'a, T, Mode> FindAndModify<'a, T, Mode> { fn options(&mut self) -> &mut FindAndModifyOptions { - self.options.get_or_insert_with(::default) + self.options + .get_or_insert_with(::default) } /// Runs the operation using the provided session. - pub fn session( - mut self, - value: impl Into<&'a mut ClientSession>, - ) -> Self { + pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { self.session = Some(value.into()); self } @@ -94,7 +96,7 @@ impl<'a, T> FindAndModify<'a, T, Delete> { collation: Collation, hint: Hint, let_vars: Document, - comment: Bson, + comment: Bson, } } @@ -104,7 +106,7 @@ action_impl! { async fn execute(mut self) -> Result> { resolve_write_concern_with_session!(self.coll, self.options, self.session.as_ref())?; - + let op = Op::::with_modification( self.coll.namespace(), self.filter, @@ -114,4 +116,4 @@ action_impl! { self.coll.client().execute_operation(op, self.session).await } } -} \ No newline at end of file +} diff --git a/src/client/session/test.rs b/src/client/session/test.rs index 7ba0b234d..7193b39a5 100644 --- a/src/client/session/test.rs +++ b/src/client/session/test.rs @@ -109,11 +109,7 @@ macro_rules! for_each_op { .await; $test_func( "findAndModify", - collection_op!( - $test_name, - coll, - coll.find_one_and_delete(doc! { "x": 1 }, None) - ), + collection_op!($test_name, coll, coll.find_one_and_delete(doc! { "x": 1 })), ) .await; $test_func( diff --git a/src/client/session/test/causal_consistency.rs b/src/client/session/test/causal_consistency.rs index 48d7e901b..ac457abc9 100644 --- a/src/client/session/test/causal_consistency.rs +++ b/src/client/session/test/causal_consistency.rs @@ -101,7 +101,8 @@ fn all_session_ops() -> impl Iterator { ))); ops.push(op!("findAndModify", false, |coll, s| coll - .find_one_and_delete_with_session(doc! { "x": 1 }, None, s,))); + .find_one_and_delete(doc! { "x": 1 }) + .session(s))); ops.push(op!("aggregate", true, |coll, s| coll .count_documents(doc! { "x": 1 }) diff --git a/src/coll.rs b/src/coll.rs index 0ea116b98..d851735b2 100644 --- a/src/coll.rs +++ b/src/coll.rs @@ -227,52 +227,6 @@ impl Collection where T: DeserializeOwned + Send + Sync, { - async fn find_one_and_delete_common( - &self, - filter: Document, - options: impl Into>, - session: impl Into>, - ) -> Result> { - let session = session.into(); - - let mut options = options.into(); - resolve_write_concern_with_session!(self, options, session.as_ref())?; - - let op = FindAndModify::with_delete(self.namespace(), filter, options); - self.client().execute_operation(op, session).await - } - - /// Atomically finds up to one document in the collection matching `filter` and deletes it. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub async fn find_one_and_delete( - &self, - filter: Document, - options: impl Into>, - ) -> Result> { - self.find_one_and_delete_common(filter, options, None).await - } - - /// Atomically finds up to one document in the collection matching `filter` and deletes it using - /// the provided `ClientSession`. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub async fn find_one_and_delete_with_session( - &self, - filter: Document, - options: impl Into>, - session: &mut ClientSession, - ) -> Result> { - self.find_one_and_delete_common(filter, options, session) - .await - } - async fn find_one_and_update_common( &self, filter: Document, diff --git a/src/concern/test.rs b/src/concern/test.rs index b8adc7033..0c072188a 100644 --- a/src/concern/test.rs +++ b/src/concern/test.rs @@ -5,7 +5,6 @@ use crate::{ error::ErrorKind, options::{ Acknowledgment, - FindOneAndDeleteOptions, FindOneAndReplaceOptions, FindOneAndUpdateOptions, InsertManyOptions, @@ -528,32 +527,24 @@ async fn command_contains_write_concern_find_one_and_delete() { coll.insert_many(&[doc! { "foo": "bar" }, doc! { "foo": "bar" }], None) .await .unwrap(); - coll.find_one_and_delete( - doc! { "foo": "bar" }, - FindOneAndDeleteOptions::builder() - .write_concern( - WriteConcern::builder() - .w(Acknowledgment::Nodes(1)) - .journal(true) - .build(), - ) - .build(), - ) - .await - .unwrap(); - coll.find_one_and_delete( - doc! { "foo": "bar" }, - FindOneAndDeleteOptions::builder() - .write_concern( - WriteConcern::builder() - .w(Acknowledgment::Nodes(1)) - .journal(false) - .build(), - ) - .build(), - ) - .await - .unwrap(); + coll.find_one_and_delete(doc! { "foo": "bar" }) + .write_concern( + WriteConcern::builder() + .w(Acknowledgment::Nodes(1)) + .journal(true) + .build(), + ) + .await + .unwrap(); + coll.find_one_and_delete(doc! { "foo": "bar" }) + .write_concern( + WriteConcern::builder() + .w(Acknowledgment::Nodes(1)) + .journal(false) + .build(), + ) + .await + .unwrap(); assert_eq!( command_write_concerns(&client, "findAndModify"), diff --git a/src/operation/find_and_modify.rs b/src/operation/find_and_modify.rs index fc003f568..1747f8df7 100644 --- a/src/operation/find_and_modify.rs +++ b/src/operation/find_and_modify.rs @@ -11,12 +11,7 @@ use crate::{ bson_util, cmap::{Command, RawCommandResponse, StreamDescription}, coll::{ - options::{ - FindOneAndDeleteOptions, - FindOneAndReplaceOptions, - FindOneAndUpdateOptions, - UpdateModifications, - }, + options::{FindOneAndReplaceOptions, FindOneAndUpdateOptions, UpdateModifications}, Namespace, }, error::{ErrorKind, Result}, @@ -58,20 +53,6 @@ impl FindAndModify { } impl FindAndModify { - pub fn with_delete( - ns: Namespace, - query: Document, - options: Option, - ) -> Self { - FindAndModify { - ns, - query, - modification: Modification::Delete, - options: options.map(Into::into), - _phantom: Default::default(), - } - } - pub fn with_update( ns: Namespace, query: Document, @@ -100,7 +81,10 @@ impl FindAndModify { Ok(FindAndModify { ns, query, - modification: Modification::Update(UpdateOrReplace::replacement(replacement, human_readable_serialization)?), + modification: Modification::Update(UpdateOrReplace::replacement( + replacement, + human_readable_serialization, + )?), options: options.map(Into::into), _phantom: Default::default(), }) @@ -131,10 +115,7 @@ impl OperationWithDefaults for FindAndModify { let (key, modification) = match &self.modification { Modification::Delete => ("remove", true.into()), - Modification::Update(update_or_replace) => ( - "update", - update_or_replace.to_raw_bson()?, - ), + Modification::Update(update_or_replace) => ("update", update_or_replace.to_raw_bson()?), }; body.append(key, modification); diff --git a/src/operation/update.rs b/src/operation/update.rs index bee3113c5..66046a9ee 100644 --- a/src/operation/update.rs +++ b/src/operation/update.rs @@ -22,8 +22,14 @@ pub(crate) enum UpdateOrReplace { } impl UpdateOrReplace { - pub(crate) fn replacement(update: &T, human_readable_serialization: bool) -> Result { - Ok(Self::Replacement(to_raw_document_buf_with_options(update, human_readable_serialization)?)) + pub(crate) fn replacement( + update: &T, + human_readable_serialization: bool, + ) -> Result { + Ok(Self::Replacement(to_raw_document_buf_with_options( + update, + human_readable_serialization, + )?)) } pub(crate) fn to_raw_bson(&self) -> Result { diff --git a/src/sync/coll.rs b/src/sync/coll.rs index c580781c5..2eca29f47 100644 --- a/src/sync/coll.rs +++ b/src/sync/coll.rs @@ -7,7 +7,6 @@ use crate::{ bson::Document, error::Result, options::{ - FindOneAndDeleteOptions, FindOneAndReplaceOptions, FindOneAndUpdateOptions, InsertManyOptions, @@ -115,43 +114,6 @@ impl Collection where T: DeserializeOwned + Send + Sync, { - /// Atomically finds up to one document in the collection matching `filter` and deletes it. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub fn find_one_and_delete( - &self, - filter: Document, - options: impl Into>, - ) -> Result> { - crate::sync::TOKIO_RUNTIME.block_on( - self.async_collection - .find_one_and_delete(filter, options.into()), - ) - } - - /// Atomically finds up to one document in the collection matching `filter` and deletes it using - /// the provided `ClientSession`. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub fn find_one_and_delete_with_session( - &self, - filter: Document, - options: impl Into>, - session: &mut ClientSession, - ) -> Result> { - crate::sync::TOKIO_RUNTIME.block_on(self.async_collection.find_one_and_delete_with_session( - filter, - options.into(), - &mut session.async_client_session, - )) - } - /// Atomically finds up to one document in the collection matching `filter` and updates it. /// Both `Document` and `Vec` implement `Into`, so either can be /// passed in place of constructing the enum case. Note: pipeline updates are only supported diff --git a/src/test/client.rs b/src/test/client.rs index 6a863e4c7..9f79fc931 100644 --- a/src/test/client.rs +++ b/src/test/client.rs @@ -917,14 +917,10 @@ async fn find_one_and_delete_serde_consistency() { coll.insert_one(&doc, None).await.unwrap(); let rec: Foo = coll.find_one(doc! {}).await.unwrap().unwrap(); assert_eq!(doc.problematic, rec.problematic); - let rec: Foo = coll - .find_one_and_delete(doc! {}, None) - .await - .unwrap() - .unwrap(); + let rec: Foo = coll.find_one_and_delete(doc! {}).await.unwrap().unwrap(); assert_eq!(doc.problematic, rec.problematic); - let nothing = coll.find_one_and_delete(doc! {}, None).await.unwrap(); + let nothing = coll.find_one_and_delete(doc! {}).await.unwrap(); assert!(nothing.is_none()); } diff --git a/src/test/coll.rs b/src/test/coll.rs index 4f86dd35a..e8234ab9e 100644 --- a/src/test/coll.rs +++ b/src/test/coll.rs @@ -608,7 +608,10 @@ async fn find_one_and_delete_hint_test(options: Option, } let coll = client.database(name).collection(name); - let _: Result> = coll.find_one_and_delete(doc! {}, options.clone()).await; + let _: Result> = coll + .find_one_and_delete(doc! {}) + .with_options(options.clone()) + .await; let events = client.get_command_started_events(&["findAndModify"]); assert_eq!(events.len(), 1); @@ -654,10 +657,10 @@ async fn find_one_and_delete_hint_server_version() { .database(function_name!()) .collection::("coll"); - let options = FindOneAndDeleteOptions::builder() + let res = coll + .find_one_and_delete(doc! {}) .hint(Hint::Name(String::new())) - .build(); - let res = coll.find_one_and_delete(doc! {}, options).await; + .await; let req1 = VersionReq::parse("< 4.2").unwrap(); let req2 = VersionReq::parse("4.2.*").unwrap(); @@ -857,7 +860,7 @@ async fn typed_returns() { assert_eq!(result, insert_data); let result = coll - .find_one_and_delete(doc! { "x": 2 }, None) + .find_one_and_delete(doc! { "x": 2 }) .await .unwrap() .unwrap(); diff --git a/src/test/spec/crud_v1/find_one_and_delete.rs b/src/test/spec/crud_v1/find_one_and_delete.rs index eba24a45b..3f85aa9c1 100644 --- a/src/test/spec/crud_v1/find_one_and_delete.rs +++ b/src/test/spec/crud_v1/find_one_and_delete.rs @@ -54,7 +54,8 @@ async fn run_find_one_and_delete_test(test_file: TestFile) { }; let result = coll - .find_one_and_delete(arguments.filter, options) + .find_one_and_delete(arguments.filter) + .with_options(options) .await .expect(&test_case.description); assert_eq!(result, outcome.result, "{}", test_case.description); diff --git a/src/test/spec/retryable_writes.rs b/src/test/spec/retryable_writes.rs index a54b10352..c8d87c7a9 100644 --- a/src/test/spec/retryable_writes.rs +++ b/src/test/spec/retryable_writes.rs @@ -258,7 +258,7 @@ async fn transaction_ids_included() { coll.delete_one(doc! {}).await.unwrap(); assert!(includes_txn_number("delete")); - coll.find_one_and_delete(doc! {}, None).await.unwrap(); + coll.find_one_and_delete(doc! {}).await.unwrap(); assert!(includes_txn_number("findAndModify")); coll.find_one_and_replace(doc! {}, doc! { "x": 1 }, None) diff --git a/src/test/spec/sessions.rs b/src/test/spec/sessions.rs index 53f03f65f..6775e91ad 100644 --- a/src/test/spec/sessions.rs +++ b/src/test/spec/sessions.rs @@ -138,7 +138,8 @@ async fn implicit_session_after_connection() { .boxed(), ); ops.push( - coll.find_one_and_delete(doc! {}, None) + coll.find_one_and_delete(doc! {}) + .into_future() .map(ignore_val) .boxed(), ); diff --git a/src/test/spec/unified_runner/operation.rs b/src/test/spec/unified_runner/operation.rs index 5d9a8a252..011553052 100644 --- a/src/test/spec/unified_runner/operation.rs +++ b/src/test/spec/unified_runner/operation.rs @@ -1368,7 +1368,8 @@ impl TestOperation for FindOneAndDelete { async move { let collection = test_runner.get_collection(id).await; let result = collection - .find_one_and_delete(self.filter.clone(), self.options.clone()) + .find_one_and_delete(self.filter.clone()) + .with_options(self.options.clone()) .await?; let result = to_bson(&result)?; Ok(Some(result.into())) diff --git a/src/test/spec/v2_runner/operation.rs b/src/test/spec/v2_runner/operation.rs index fff052554..6ea3d5dec 100644 --- a/src/test/spec/v2_runner/operation.rs +++ b/src/test/spec/v2_runner/operation.rs @@ -905,22 +905,11 @@ impl TestOperation for FindOneAndDelete { session: Option<&'a mut ClientSession>, ) -> BoxFuture<'a, Result>> { async move { - let result = match session { - Some(session) => { - collection - .find_one_and_delete_with_session( - self.filter.clone(), - self.options.clone(), - session, - ) - .await? - } - None => { - collection - .find_one_and_delete(self.filter.clone(), self.options.clone()) - .await? - } - }; + let result = collection + .find_one_and_delete(self.filter.clone()) + .with_options(self.options.clone()) + .optional(session, |a, s| a.session(s)) + .await?; let result = bson::to_bson(&result)?; Ok(Some(result)) } From a618615b6a116a110945646597fa089c15a09295 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 4 Mar 2024 15:17:16 -0500 Subject: [PATCH 15/39] eureka --- src/action/find_and_modify.rs | 73 ++++++++++++++++++++++++++++---- src/coll.rs | 4 ++ src/operation/find_and_modify.rs | 13 ++++-- 3 files changed, 78 insertions(+), 12 deletions(-) diff --git a/src/action/find_and_modify.rs b/src/action/find_and_modify.rs index 82b5cd65f..310297715 100644 --- a/src/action/find_and_modify.rs +++ b/src/action/find_and_modify.rs @@ -1,15 +1,15 @@ -use std::{marker::PhantomData, time::Duration}; +use std::{borrow::Borrow, marker::PhantomData, time::Duration}; use bson::{Bson, Document}; -use serde::de::DeserializeOwned; +use serde::{de::DeserializeOwned, Serialize}; use crate::{ - coll::options::{FindOneAndDeleteOptions, Hint}, + coll::options::{FindOneAndDeleteOptions, Hint, UpdateModifications}, collation::Collation, error::Result, operation::{ find_and_modify::options::{FindAndModifyOptions, Modification}, - FindAndModify as Op, + FindAndModify as Op, UpdateOrReplace, }, options::WriteConcern, ClientSession, @@ -18,7 +18,7 @@ use crate::{ use super::{action_impl, option_setters}; -impl Collection { +impl Collection { /// Atomically finds up to one document in the collection matching `filter` and deletes it. /// /// This operation will retry once upon failure if the connection and encountered error support @@ -31,7 +31,60 @@ impl Collection { FindAndModify { coll: self, filter, - modification: Modification::Delete, + modification: Ok(Modification::Delete), + options: None, + session: None, + _mode: PhantomData, + } + } + + /// Atomically finds up to one document in the collection matching `filter` and updates it. + /// Both `Document` and `Vec` implement `Into`, so either can be + /// passed in place of constructing the enum case. Note: pipeline updates are only supported + /// in MongoDB 4.2+. + /// + /// This operation will retry once upon failure if the connection and encountered error support + /// retryability. See the documentation + /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on + /// retryable writes. + /// + /// `await` will return `Result>`. + pub async fn find_one_and_update_2( + &self, + filter: Document, + update: impl Into, + ) -> FindAndModify<'_, T, Update> { + let update = update.into(); + FindAndModify { + coll: self, + filter, + modification: Ok(Modification::Update(update.into())), + options: None, + session: None, + _mode: PhantomData, + } + } +} + +impl Collection { + /// Atomically finds up to one document in the collection matching `filter` and replaces it with + /// `replacement`. + /// + /// This operation will retry once upon failure if the connection and encountered error support + /// retryability. See the documentation + /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on + /// retryable writes. + pub async fn find_one_and_replace_2( + &self, + filter: Document, + replacement: impl Borrow, + ) -> FindAndModify<'_, T, Replace> { + let human_readable_serialization = self.human_readable_serialization(); + FindAndModify { + coll: self, + filter, + modification: UpdateOrReplace::replacement(replacement.borrow(), human_readable_serialization) + .map(Modification::Update), options: None, session: None, _mode: PhantomData, @@ -60,13 +113,15 @@ impl crate::sync::Collection { pub struct FindAndModify<'a, T, Mode> { coll: &'a Collection, filter: Document, - modification: Modification, + modification: Result, options: Option, session: Option<&'a mut ClientSession>, _mode: PhantomData, } pub struct Delete; +pub struct Update; +pub struct Replace; impl<'a, T, Mode> FindAndModify<'a, T, Mode> { fn options(&mut self) -> &mut FindAndModifyOptions { @@ -110,9 +165,9 @@ action_impl! { let op = Op::::with_modification( self.coll.namespace(), self.filter, - self.modification, + self.modification?, self.options, - ); + )?; self.coll.client().execute_operation(op, self.session).await } } diff --git a/src/coll.rs b/src/coll.rs index d851735b2..5ea65ad0c 100644 --- a/src/coll.rs +++ b/src/coll.rs @@ -221,6 +221,10 @@ where self.client().execute_operation(op, None).await?; Ok(()) } + + pub(crate) fn human_readable_serialization(&self) -> bool { + self.inner.human_readable_serialization + } } impl Collection diff --git a/src/operation/find_and_modify.rs b/src/operation/find_and_modify.rs index 1747f8df7..5f83bf144 100644 --- a/src/operation/find_and_modify.rs +++ b/src/operation/find_and_modify.rs @@ -41,14 +41,21 @@ impl FindAndModify { query: Document, modification: Modification, options: Option, - ) -> Self { - Self { + ) -> Result { + if let Modification::Update( + UpdateOrReplace::UpdateModifications( + UpdateModifications::Document(d) + ) + ) = &modification { + bson_util::update_document_check(d)?; + }; + Ok(Self { ns, query, modification, options, _phantom: PhantomData, - } + }) } } From 243459d0390cf3ea07e7ab2dd02b95ca74612053 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Tue, 5 Mar 2024 12:33:35 -0500 Subject: [PATCH 16/39] update find_one_and_update callers --- action_macro/src/lib.rs | 1 + src/action.rs | 6 +- src/action/find_and_modify.rs | 69 +++++++++++++++++-- src/client/csfle/client_encryption.rs | 3 +- src/client/session/test.rs | 2 +- src/client/session/test/causal_consistency.rs | 8 +-- src/coll.rs | 62 ----------------- src/coll/options.rs | 9 +++ src/concern/test.rs | 47 +++++-------- src/operation/find_and_modify.rs | 29 ++------ src/operation/find_and_modify/options.rs | 8 +-- src/sync/coll.rs | 53 -------------- src/test/coll.rs | 2 +- src/test/spec/crud_v1/find_one_and_update.rs | 3 +- src/test/spec/retryable_writes.rs | 2 +- src/test/spec/sessions.rs | 3 +- src/test/spec/unified_runner/operation.rs | 22 ++---- src/test/spec/v2_runner/operation.rs | 26 ++----- 18 files changed, 122 insertions(+), 233 deletions(-) diff --git a/action_macro/src/lib.rs b/action_macro/src/lib.rs index bfa178ebf..ed529042a 100644 --- a/action_macro/src/lib.rs +++ b/action_macro/src/lib.rs @@ -56,6 +56,7 @@ pub fn action_impl(input: proc_macro::TokenStream) -> proc_macro::TokenStream { }); quote! { + impl #generics crate::action::private::Sealed for #action {} impl #generics crate::action::Action for #action { type Output = #exec_output; } diff --git a/src/action.rs b/src/action.rs index 65d83e65c..608d234b6 100644 --- a/src/action.rs +++ b/src/action.rs @@ -106,9 +106,13 @@ macro_rules! option_setters { } use option_setters; +pub(crate) mod private { + pub trait Sealed {} +} + /// A pending action to execute on the server. The action can be configured via chained methods and /// executed via `await` (or `run` if using the sync client). -pub trait Action { +pub trait Action: private::Sealed { /// The type of the value produced by execution. type Output; diff --git a/src/action/find_and_modify.rs b/src/action/find_and_modify.rs index 310297715..1be4ee120 100644 --- a/src/action/find_and_modify.rs +++ b/src/action/find_and_modify.rs @@ -4,12 +4,19 @@ use bson::{Bson, Document}; use serde::{de::DeserializeOwned, Serialize}; use crate::{ - coll::options::{FindOneAndDeleteOptions, Hint, UpdateModifications}, + coll::options::{ + FindOneAndDeleteOptions, + FindOneAndUpdateOptions, + Hint, + ReturnDocument, + UpdateModifications, + }, collation::Collation, error::Result, operation::{ find_and_modify::options::{FindAndModifyOptions, Modification}, - FindAndModify as Op, UpdateOrReplace, + FindAndModify as Op, + UpdateOrReplace, }, options::WriteConcern, ClientSession, @@ -49,7 +56,7 @@ impl Collection { /// retryable writes. /// /// `await` will return `Result>`. - pub async fn find_one_and_update_2( + pub fn find_one_and_update( &self, filter: Document, update: impl Into, @@ -74,7 +81,7 @@ impl Collection { /// retryability. See the documentation /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on /// retryable writes. - pub async fn find_one_and_replace_2( + pub fn find_one_and_replace_2( &self, filter: Document, replacement: impl Borrow, @@ -83,8 +90,11 @@ impl Collection { FindAndModify { coll: self, filter, - modification: UpdateOrReplace::replacement(replacement.borrow(), human_readable_serialization) - .map(Modification::Update), + modification: UpdateOrReplace::replacement( + replacement.borrow(), + human_readable_serialization, + ) + .map(Modification::Update), options: None, session: None, _mode: PhantomData, @@ -105,6 +115,25 @@ impl crate::sync::Collection { pub fn find_one_and_delete(&self, filter: Document) -> FindAndModify<'_, T, Delete> { self.async_collection.find_one_and_delete(filter) } + + /// Atomically finds up to one document in the collection matching `filter` and updates it. + /// Both `Document` and `Vec` implement `Into`, so either can be + /// passed in place of constructing the enum case. Note: pipeline updates are only supported + /// in MongoDB 4.2+. + /// + /// This operation will retry once upon failure if the connection and encountered error support + /// retryability. See the documentation + /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on + /// retryable writes. + /// + /// [`run`](FindAndModify::run) will return `Result>`. + pub fn find_one_and_update( + &self, + filter: Document, + update: impl Into, + ) -> FindAndModify<'_, T, Update> { + self.async_collection.find_one_and_update(filter, update) + } } /// Atomically find up to one document in the collection matching a filter and modify it. Construct @@ -155,6 +184,34 @@ impl<'a, T> FindAndModify<'a, T, Delete> { } } +impl<'a, T> FindAndModify<'a, T, Update> { + /// Set all options. Note that this will replace all previous values set. + pub fn with_options(mut self, value: impl Into>) -> Self { + self.options = value.into().map(FindAndModifyOptions::from); + self + } + + option_setters! { FindOneAndUpdateOptions; + array_filters: Vec, + bypass_document_validation: bool, + max_time: Duration, + projection: Document, + sort: Document, + upsert: bool, + write_concern: WriteConcern, + collation: Collation, + hint: Hint, + let_vars: Document, + comment: Bson, + } + + /// Set the [`FindOneAndUpdateOptions::return_document`] option. + pub fn return_document(mut self, value: ReturnDocument) -> Self { + self.options().new = Some(value.as_bool()); + self + } +} + action_impl! { impl<'a, T: DeserializeOwned + Send, Mode> Action for FindAndModify<'a, T, Mode> { type Future = FindAndDeleteFuture<'a, T: DeserializeOwned + Send>; diff --git a/src/client/csfle/client_encryption.rs b/src/client/csfle/client_encryption.rs index d36e38585..aee2f4438 100644 --- a/src/client/csfle/client_encryption.rs +++ b/src/client/csfle/client_encryption.rs @@ -120,7 +120,6 @@ impl ClientEncryption { .find_one_and_update( doc! { "_id": id }, doc! { "$addToSet": { "keyAltNames": key_alt_name } }, - None, ) .await } @@ -150,7 +149,7 @@ impl ClientEncryption { } }; self.key_vault - .find_one_and_update(doc! { "_id": id }, vec![update], None) + .find_one_and_update(doc! { "_id": id }, vec![update]) .await } diff --git a/src/client/session/test.rs b/src/client/session/test.rs index 7193b39a5..a0826f592 100644 --- a/src/client/session/test.rs +++ b/src/client/session/test.rs @@ -117,7 +117,7 @@ macro_rules! for_each_op { collection_op!( $test_name, coll, - coll.find_one_and_update(doc! {}, doc! { "$inc": { "x": 1 } }, None) + coll.find_one_and_update(doc! {}, doc! { "$inc": { "x": 1 } }) ), ) .await; diff --git a/src/client/session/test/causal_consistency.rs b/src/client/session/test/causal_consistency.rs index ac457abc9..8010d6970 100644 --- a/src/client/session/test/causal_consistency.rs +++ b/src/client/session/test/causal_consistency.rs @@ -85,12 +85,8 @@ fn all_session_ops() -> impl Iterator { .session(s))); ops.push(op!("findAndModify", false, |coll, s| coll - .find_one_and_update_with_session( - doc! { "x": 1 }, - doc! { "$inc": { "x": 1 } }, - None, - s, - ))); + .find_one_and_update(doc! { "x": 1 }, doc! { "$inc": { "x": 1 } },) + .session(s))); ops.push(op!("findAndModify", false, |coll, s| coll .find_one_and_replace_with_session( diff --git a/src/coll.rs b/src/coll.rs index 5ea65ad0c..4cae86334 100644 --- a/src/coll.rs +++ b/src/coll.rs @@ -227,68 +227,6 @@ where } } -impl Collection -where - T: DeserializeOwned + Send + Sync, -{ - async fn find_one_and_update_common( - &self, - filter: Document, - update: impl Into, - options: impl Into>, - session: impl Into>, - ) -> Result> { - let update = update.into(); - - let session = session.into(); - - let mut options = options.into(); - resolve_write_concern_with_session!(self, options, session.as_ref())?; - - let op = FindAndModify::with_update(self.namespace(), filter, update, options)?; - self.client().execute_operation(op, session).await - } - - /// Atomically finds up to one document in the collection matching `filter` and updates it. - /// Both `Document` and `Vec` implement `Into`, so either can be - /// passed in place of constructing the enum case. Note: pipeline updates are only supported - /// in MongoDB 4.2+. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub async fn find_one_and_update( - &self, - filter: Document, - update: impl Into, - options: impl Into>, - ) -> Result> { - self.find_one_and_update_common(filter, update, options, None) - .await - } - - /// Atomically finds up to one document in the collection matching `filter` and updates it using - /// the provided `ClientSession`. Both `Document` and `Vec` implement - /// `Into`, so either can be passed in place of constructing the enum - /// case. Note: pipeline updates are only supported in MongoDB 4.2+. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub async fn find_one_and_update_with_session( - &self, - filter: Document, - update: impl Into, - options: impl Into>, - session: &mut ClientSession, - ) -> Result> { - self.find_one_and_update_common(filter, update, options, session) - .await - } -} - impl Collection where T: Serialize + DeserializeOwned + Send + Sync, diff --git a/src/coll/options.rs b/src/coll/options.rs index 4d2a75458..6c8a63a53 100644 --- a/src/coll/options.rs +++ b/src/coll/options.rs @@ -61,6 +61,15 @@ pub enum ReturnDocument { Before, } +impl ReturnDocument { + pub(crate) fn as_bool(&self) -> bool { + match self { + ReturnDocument::After => true, + ReturnDocument::Before => false, + } + } +} + impl<'de> Deserialize<'de> for ReturnDocument { fn deserialize>(deserializer: D) -> std::result::Result { let s = String::deserialize(deserializer)?; diff --git a/src/concern/test.rs b/src/concern/test.rs index 0c072188a..479160fa0 100644 --- a/src/concern/test.rs +++ b/src/concern/test.rs @@ -6,7 +6,6 @@ use crate::{ options::{ Acknowledgment, FindOneAndReplaceOptions, - FindOneAndUpdateOptions, InsertManyOptions, InsertOneOptions, ReadConcern, @@ -625,34 +624,24 @@ async fn command_contains_write_concern_find_one_and_update() { coll.insert_many(&[doc! { "foo": "bar" }, doc! { "foo": "bar" }], None) .await .unwrap(); - coll.find_one_and_update( - doc! { "foo": "bar" }, - doc! { "$set": { "foo": "fun" } }, - FindOneAndUpdateOptions::builder() - .write_concern( - WriteConcern::builder() - .w(Acknowledgment::Nodes(1)) - .journal(true) - .build(), - ) - .build(), - ) - .await - .unwrap(); - coll.find_one_and_update( - doc! { "foo": "bar" }, - doc! { "$set": { "foo": "fun" } }, - FindOneAndUpdateOptions::builder() - .write_concern( - WriteConcern::builder() - .w(Acknowledgment::Nodes(1)) - .journal(false) - .build(), - ) - .build(), - ) - .await - .unwrap(); + coll.find_one_and_update(doc! { "foo": "bar" }, doc! { "$set": { "foo": "fun" } }) + .write_concern( + WriteConcern::builder() + .w(Acknowledgment::Nodes(1)) + .journal(true) + .build(), + ) + .await + .unwrap(); + coll.find_one_and_update(doc! { "foo": "bar" }, doc! { "$set": { "foo": "fun" } }) + .write_concern( + WriteConcern::builder() + .w(Acknowledgment::Nodes(1)) + .journal(false) + .build(), + ) + .await + .unwrap(); assert_eq!( command_write_concerns(&client, "findAndModify"), diff --git a/src/operation/find_and_modify.rs b/src/operation/find_and_modify.rs index 5f83bf144..2d73af009 100644 --- a/src/operation/find_and_modify.rs +++ b/src/operation/find_and_modify.rs @@ -11,7 +11,7 @@ use crate::{ bson_util, cmap::{Command, RawCommandResponse, StreamDescription}, coll::{ - options::{FindOneAndReplaceOptions, FindOneAndUpdateOptions, UpdateModifications}, + options::{FindOneAndReplaceOptions, UpdateModifications}, Namespace, }, error::{ErrorKind, Result}, @@ -42,11 +42,10 @@ impl FindAndModify { modification: Modification, options: Option, ) -> Result { - if let Modification::Update( - UpdateOrReplace::UpdateModifications( - UpdateModifications::Document(d) - ) - ) = &modification { + if let Modification::Update(UpdateOrReplace::UpdateModifications( + UpdateModifications::Document(d), + )) = &modification + { bson_util::update_document_check(d)?; }; Ok(Self { @@ -60,24 +59,6 @@ impl FindAndModify { } impl FindAndModify { - pub fn with_update( - ns: Namespace, - query: Document, - update: UpdateModifications, - options: Option, - ) -> Result { - if let UpdateModifications::Document(ref d) = update { - bson_util::update_document_check(d)?; - }; - Ok(FindAndModify { - ns, - query, - modification: Modification::Update(update.into()), - options: options.map(Into::into), - _phantom: Default::default(), - }) - } - pub fn with_replace( ns: Namespace, query: Document, diff --git a/src/operation/find_and_modify/options.rs b/src/operation/find_and_modify/options.rs index c65e00f98..5147c3ba5 100644 --- a/src/operation/find_and_modify/options.rs +++ b/src/operation/find_and_modify/options.rs @@ -130,11 +130,5 @@ impl From for FindAndModifyOptions { } fn return_document_to_bool(return_document: Option) -> Option { - if let Some(return_document) = return_document { - return match return_document { - ReturnDocument::After => Some(true), - ReturnDocument::Before => Some(false), - }; - } - None + return_document.as_ref().map(ReturnDocument::as_bool) } diff --git a/src/sync/coll.rs b/src/sync/coll.rs index 2eca29f47..c123aa6bb 100644 --- a/src/sync/coll.rs +++ b/src/sync/coll.rs @@ -8,13 +8,11 @@ use crate::{ error::Result, options::{ FindOneAndReplaceOptions, - FindOneAndUpdateOptions, InsertManyOptions, InsertOneOptions, ReadConcern, ReplaceOptions, SelectionCriteria, - UpdateModifications, WriteConcern, }, results::{InsertManyResult, InsertOneResult, UpdateResult}, @@ -110,57 +108,6 @@ where } } -impl Collection -where - T: DeserializeOwned + Send + Sync, -{ - /// Atomically finds up to one document in the collection matching `filter` and updates it. - /// Both `Document` and `Vec` implement `Into`, so either can be - /// passed in place of constructing the enum case. Note: pipeline updates are only supported - /// in MongoDB 4.2+. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub fn find_one_and_update( - &self, - filter: Document, - update: impl Into, - options: impl Into>, - ) -> Result> { - crate::sync::TOKIO_RUNTIME.block_on(self.async_collection.find_one_and_update( - filter, - update.into(), - options.into(), - )) - } - - /// Atomically finds up to one document in the collection matching `filter` and updates it using - /// the provided `ClientSession`. Both `Document` and `Vec` implement - /// `Into`, so either can be passed in place of constructing the enum - /// case. Note: pipeline updates are only supported in MongoDB 4.2+. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub fn find_one_and_update_with_session( - &self, - filter: Document, - update: impl Into, - options: impl Into>, - session: &mut ClientSession, - ) -> Result> { - crate::sync::TOKIO_RUNTIME.block_on(self.async_collection.find_one_and_update_with_session( - filter, - update.into(), - options.into(), - &mut session.async_client_session, - )) - } -} - impl Collection where T: Serialize + DeserializeOwned + Send + Sync, diff --git a/src/test/coll.rs b/src/test/coll.rs index e8234ab9e..d04cb062f 100644 --- a/src/test/coll.rs +++ b/src/test/coll.rs @@ -853,7 +853,7 @@ async fn typed_returns() { coll.insert_one(insert_data.clone(), None).await.unwrap(); let result = coll - .find_one_and_update(doc! { "x": 1 }, doc! { "$inc": { "x": 1 } }, None) + .find_one_and_update(doc! { "x": 1 }, doc! { "$inc": { "x": 1 } }) .await .unwrap() .unwrap(); diff --git a/src/test/spec/crud_v1/find_one_and_update.rs b/src/test/spec/crud_v1/find_one_and_update.rs index 66c48f265..d87f3acb6 100644 --- a/src/test/spec/crud_v1/find_one_and_update.rs +++ b/src/test/spec/crud_v1/find_one_and_update.rs @@ -71,7 +71,8 @@ async fn run_find_one_and_update_test(test_file: TestFile) { }; let result = coll - .find_one_and_update(arguments.filter, arguments.update, options) + .find_one_and_update(arguments.filter, arguments.update) + .with_options(options) .await .expect(&test_case.description); assert_eq!( diff --git a/src/test/spec/retryable_writes.rs b/src/test/spec/retryable_writes.rs index c8d87c7a9..d50b13774 100644 --- a/src/test/spec/retryable_writes.rs +++ b/src/test/spec/retryable_writes.rs @@ -266,7 +266,7 @@ async fn transaction_ids_included() { .unwrap(); assert!(includes_txn_number("findAndModify")); - coll.find_one_and_update(doc! {}, doc! { "$set": doc! { "x": 1 } }, None) + coll.find_one_and_update(doc! {}, doc! { "$set": doc! { "x": 1 } }) .await .unwrap(); assert!(includes_txn_number("findAndModify")); diff --git a/src/test/spec/sessions.rs b/src/test/spec/sessions.rs index 6775e91ad..1d02387c7 100644 --- a/src/test/spec/sessions.rs +++ b/src/test/spec/sessions.rs @@ -144,7 +144,8 @@ async fn implicit_session_after_connection() { .boxed(), ); ops.push( - coll.find_one_and_update(doc! {}, doc! { "$set": { "a": 1 } }, None) + coll.find_one_and_update(doc! {}, doc! { "$set": { "a": 1 } }) + .into_future() .map(ignore_val) .boxed(), ); diff --git a/src/test/spec/unified_runner/operation.rs b/src/test/spec/unified_runner/operation.rs index 011553052..1750bf8de 100644 --- a/src/test/spec/unified_runner/operation.rs +++ b/src/test/spec/unified_runner/operation.rs @@ -1288,29 +1288,17 @@ impl TestOperation for FindOneAndUpdate { ) -> BoxFuture<'a, Result>> { async move { let collection = test_runner.get_collection(id).await; + let act = collection + .find_one_and_update(self.filter.clone(), self.update.clone()) + .with_options(self.options.clone()); let result = match &self.session { Some(session_id) => { with_mut_session!(test_runner, session_id, |session| async { - collection - .find_one_and_update_with_session( - self.filter.clone(), - self.update.clone(), - self.options.clone(), - session, - ) - .await + act.session(session.deref_mut()).await }) .await? } - None => { - collection - .find_one_and_update( - self.filter.clone(), - self.update.clone(), - self.options.clone(), - ) - .await? - } + None => act.await?, }; let result = to_bson(&result)?; Ok(Some(result.into())) diff --git a/src/test/spec/v2_runner/operation.rs b/src/test/spec/v2_runner/operation.rs index 6ea3d5dec..3437289c6 100644 --- a/src/test/spec/v2_runner/operation.rs +++ b/src/test/spec/v2_runner/operation.rs @@ -820,27 +820,11 @@ impl TestOperation for FindOneAndUpdate { session: Option<&'a mut ClientSession>, ) -> BoxFuture<'a, Result>> { async move { - let result = match session { - Some(session) => { - collection - .find_one_and_update_with_session( - self.filter.clone(), - self.update.clone(), - self.options.clone(), - session, - ) - .await? - } - None => { - collection - .find_one_and_update( - self.filter.clone(), - self.update.clone(), - self.options.clone(), - ) - .await? - } - }; + let result = collection + .find_one_and_update(self.filter.clone(), self.update.clone()) + .with_options(self.options.clone()) + .optional(session, |a, s| a.session(s)) + .await?; let result = bson::to_bson(&result)?; Ok(Some(result)) } From 5e305c6344f92db2598233de7fb112cea5dda765 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Tue, 5 Mar 2024 12:47:03 -0500 Subject: [PATCH 17/39] update find_one_and_replace callers --- src/action/find_and_modify.rs | 49 ++++++++++++- src/client/session/test.rs | 2 +- src/client/session/test/causal_consistency.rs | 8 +-- src/coll.rs | 70 +------------------ src/concern/test.rs | 47 +++++-------- src/operation/find_and_modify.rs | 28 +------- src/sync/coll.rs | 52 +------------- src/test/coll.rs | 2 +- src/test/spec/crud_v1/find_one_and_replace.rs | 3 +- src/test/spec/retryable_writes.rs | 2 +- src/test/spec/sessions.rs | 3 +- src/test/spec/unified_runner/operation.rs | 7 +- src/test/spec/v2_runner/operation.rs | 26 ++----- 13 files changed, 87 insertions(+), 212 deletions(-) diff --git a/src/action/find_and_modify.rs b/src/action/find_and_modify.rs index 1be4ee120..e685a3b10 100644 --- a/src/action/find_and_modify.rs +++ b/src/action/find_and_modify.rs @@ -6,6 +6,7 @@ use serde::{de::DeserializeOwned, Serialize}; use crate::{ coll::options::{ FindOneAndDeleteOptions, + FindOneAndReplaceOptions, FindOneAndUpdateOptions, Hint, ReturnDocument, @@ -81,7 +82,7 @@ impl Collection { /// retryability. See the documentation /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on /// retryable writes. - pub fn find_one_and_replace_2( + pub fn find_one_and_replace( &self, filter: Document, replacement: impl Borrow, @@ -136,6 +137,25 @@ impl crate::sync::Collection { } } +#[cfg(feature = "sync")] +impl crate::sync::Collection { + /// Atomically finds up to one document in the collection matching `filter` and replaces it with + /// `replacement`. + /// + /// This operation will retry once upon failure if the connection and encountered error support + /// retryability. See the documentation + /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on + /// retryable writes. + pub fn find_one_and_replace( + &self, + filter: Document, + replacement: impl Borrow, + ) -> FindAndModify<'_, T, Replace> { + self.async_collection + .find_one_and_replace(filter, replacement) + } +} + /// Atomically find up to one document in the collection matching a filter and modify it. Construct /// with [`Collection::find_one_and_delete`]. #[must_use] @@ -212,6 +232,33 @@ impl<'a, T> FindAndModify<'a, T, Update> { } } +impl<'a, T> FindAndModify<'a, T, Replace> { + /// Set all options. Note that this will replace all previous values set. + pub fn with_options(mut self, value: impl Into>) -> Self { + self.options = value.into().map(FindAndModifyOptions::from); + self + } + + option_setters! { FindOneAndReplaceOptions; + bypass_document_validation: bool, + max_time: Duration, + projection: Document, + sort: Document, + upsert: bool, + write_concern: WriteConcern, + collation: Collation, + hint: Hint, + let_vars: Document, + comment: Bson, + } + + /// Set the [`FindOneAndReplaceOptions::return_document`] option. + pub fn return_document(mut self, value: ReturnDocument) -> Self { + self.options().new = Some(value.as_bool()); + self + } +} + action_impl! { impl<'a, T: DeserializeOwned + Send, Mode> Action for FindAndModify<'a, T, Mode> { type Future = FindAndDeleteFuture<'a, T: DeserializeOwned + Send>; diff --git a/src/client/session/test.rs b/src/client/session/test.rs index a0826f592..8c8f4d0fc 100644 --- a/src/client/session/test.rs +++ b/src/client/session/test.rs @@ -126,7 +126,7 @@ macro_rules! for_each_op { collection_op!( $test_name, coll, - coll.find_one_and_replace(doc! {}, doc! {"x": 1}, None) + coll.find_one_and_replace(doc! {}, doc! {"x": 1}) ), ) .await; diff --git a/src/client/session/test/causal_consistency.rs b/src/client/session/test/causal_consistency.rs index 8010d6970..ef10907b7 100644 --- a/src/client/session/test/causal_consistency.rs +++ b/src/client/session/test/causal_consistency.rs @@ -89,12 +89,8 @@ fn all_session_ops() -> impl Iterator { .session(s))); ops.push(op!("findAndModify", false, |coll, s| coll - .find_one_and_replace_with_session( - doc! { "x": 1 }, - doc! { "x": 1 }, - None, - s, - ))); + .find_one_and_replace(doc! { "x": 1 }, doc! { "x": 1 },) + .session(s))); ops.push(op!("findAndModify", false, |coll, s| coll .find_one_and_delete(doc! { "x": 1 }) diff --git a/src/coll.rs b/src/coll.rs index 4cae86334..da9485000 100644 --- a/src/coll.rs +++ b/src/coll.rs @@ -3,12 +3,7 @@ pub mod options; use std::{borrow::Borrow, collections::HashSet, fmt, fmt::Debug, str::FromStr, sync::Arc}; -use serde::{ - de::{DeserializeOwned, Error as DeError}, - Deserialize, - Deserializer, - Serialize, -}; +use serde::{de::Error as DeError, Deserialize, Deserializer, Serialize}; use self::options::*; use crate::{ @@ -17,7 +12,7 @@ use crate::{ cmap::conn::PinnedConnectionHandle, concern::{ReadConcern, WriteConcern}, error::{convert_bulk_errors, BulkWriteError, BulkWriteFailure, Error, ErrorKind, Result}, - operation::{FindAndModify, Insert, Update}, + operation::{Insert, Update}, results::{InsertManyResult, InsertOneResult, UpdateResult}, selection_criteria::SelectionCriteria, Client, @@ -227,67 +222,6 @@ where } } -impl Collection -where - T: Serialize + DeserializeOwned + Send + Sync, -{ - async fn find_one_and_replace_common( - &self, - filter: Document, - replacement: impl Borrow, - options: impl Into>, - session: impl Into>, - ) -> Result> { - let mut options = options.into(); - let session = session.into(); - resolve_write_concern_with_session!(self, options, session.as_ref())?; - - let op = FindAndModify::with_replace( - self.namespace(), - filter, - replacement.borrow(), - options, - self.inner.human_readable_serialization, - )?; - self.client().execute_operation(op, session).await - } - - /// Atomically finds up to one document in the collection matching `filter` and replaces it with - /// `replacement`. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub async fn find_one_and_replace( - &self, - filter: Document, - replacement: impl Borrow, - options: impl Into>, - ) -> Result> { - self.find_one_and_replace_common(filter, replacement, options, None) - .await - } - - /// Atomically finds up to one document in the collection matching `filter` and replaces it with - /// `replacement` using the provided `ClientSession`. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub async fn find_one_and_replace_with_session( - &self, - filter: Document, - replacement: impl Borrow, - options: impl Into>, - session: &mut ClientSession, - ) -> Result> { - self.find_one_and_replace_common(filter, replacement, options, session) - .await - } -} - impl Collection where T: Serialize + Send + Sync, diff --git a/src/concern/test.rs b/src/concern/test.rs index 479160fa0..f37e065ed 100644 --- a/src/concern/test.rs +++ b/src/concern/test.rs @@ -5,7 +5,6 @@ use crate::{ error::ErrorKind, options::{ Acknowledgment, - FindOneAndReplaceOptions, InsertManyOptions, InsertOneOptions, ReadConcern, @@ -570,34 +569,24 @@ async fn command_contains_write_concern_find_one_and_replace() { coll.insert_many(&[doc! { "foo": "bar" }, doc! { "foo": "bar" }], None) .await .unwrap(); - coll.find_one_and_replace( - doc! { "foo": "bar" }, - doc! { "baz": "fun" }, - FindOneAndReplaceOptions::builder() - .write_concern( - WriteConcern::builder() - .w(Acknowledgment::Nodes(1)) - .journal(true) - .build(), - ) - .build(), - ) - .await - .unwrap(); - coll.find_one_and_replace( - doc! { "foo": "bar" }, - doc! { "baz": "fun" }, - FindOneAndReplaceOptions::builder() - .write_concern( - WriteConcern::builder() - .w(Acknowledgment::Nodes(1)) - .journal(false) - .build(), - ) - .build(), - ) - .await - .unwrap(); + coll.find_one_and_replace(doc! { "foo": "bar" }, doc! { "baz": "fun" }) + .write_concern( + WriteConcern::builder() + .w(Acknowledgment::Nodes(1)) + .journal(true) + .build(), + ) + .await + .unwrap(); + coll.find_one_and_replace(doc! { "foo": "bar" }, doc! { "baz": "fun" }) + .write_concern( + WriteConcern::builder() + .w(Acknowledgment::Nodes(1)) + .journal(false) + .build(), + ) + .await + .unwrap(); assert_eq!( command_write_concerns(&client, "findAndModify"), diff --git a/src/operation/find_and_modify.rs b/src/operation/find_and_modify.rs index 2d73af009..9ee6fe09f 100644 --- a/src/operation/find_and_modify.rs +++ b/src/operation/find_and_modify.rs @@ -3,17 +3,14 @@ pub(crate) mod options; use std::{fmt::Debug, marker::PhantomData}; use bson::{from_slice, RawBson}; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use serde::{de::DeserializeOwned, Deserialize}; use self::options::FindAndModifyOptions; use crate::{ bson::{doc, rawdoc, Document, RawDocumentBuf}, bson_util, cmap::{Command, RawCommandResponse, StreamDescription}, - coll::{ - options::{FindOneAndReplaceOptions, UpdateModifications}, - Namespace, - }, + coll::{options::UpdateModifications, Namespace}, error::{ErrorKind, Result}, operation::{ append_options_to_raw_document, @@ -58,27 +55,6 @@ impl FindAndModify { } } -impl FindAndModify { - pub fn with_replace( - ns: Namespace, - query: Document, - replacement: &R, - options: Option, - human_readable_serialization: bool, - ) -> Result { - Ok(FindAndModify { - ns, - query, - modification: Modification::Update(UpdateOrReplace::replacement( - replacement, - human_readable_serialization, - )?), - options: options.map(Into::into), - _phantom: Default::default(), - }) - } -} - impl OperationWithDefaults for FindAndModify { type O = Option; type Command = RawDocumentBuf; diff --git a/src/sync/coll.rs b/src/sync/coll.rs index c123aa6bb..524da3f99 100644 --- a/src/sync/coll.rs +++ b/src/sync/coll.rs @@ -1,13 +1,12 @@ use std::{borrow::Borrow, fmt::Debug}; -use serde::{de::DeserializeOwned, Serialize}; +use serde::Serialize; use super::ClientSession; use crate::{ bson::Document, error::Result, options::{ - FindOneAndReplaceOptions, InsertManyOptions, InsertOneOptions, ReadConcern, @@ -108,55 +107,6 @@ where } } -impl Collection -where - T: Serialize + DeserializeOwned + Send + Sync, -{ - /// Atomically finds up to one document in the collection matching `filter` and replaces it with - /// `replacement`. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub fn find_one_and_replace( - &self, - filter: Document, - replacement: T, - options: impl Into>, - ) -> Result> { - crate::sync::TOKIO_RUNTIME.block_on(self.async_collection.find_one_and_replace( - filter, - replacement, - options.into(), - )) - } - - /// Atomically finds up to one document in the collection matching `filter` and replaces it with - /// `replacement` using the provided `ClientSession`. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub fn find_one_and_replace_with_session( - &self, - filter: Document, - replacement: T, - options: impl Into>, - session: &mut ClientSession, - ) -> Result> { - crate::sync::TOKIO_RUNTIME.block_on( - self.async_collection.find_one_and_replace_with_session( - filter, - replacement, - options.into(), - &mut session.async_client_session, - ), - ) - } -} - impl Collection where T: Serialize + Send + Sync, diff --git a/src/test/coll.rs b/src/test/coll.rs index d04cb062f..f80c59f5e 100644 --- a/src/test/coll.rs +++ b/src/test/coll.rs @@ -803,7 +803,7 @@ async fn typed_find_one_and_replace() { str: "b".into(), }; let result = coll - .find_one_and_replace(doc! { "x": 1 }, replacement.clone(), None) + .find_one_and_replace(doc! { "x": 1 }, replacement.clone()) .await .unwrap() .unwrap(); diff --git a/src/test/spec/crud_v1/find_one_and_replace.rs b/src/test/spec/crud_v1/find_one_and_replace.rs index b2501d4a9..12e6770f3 100644 --- a/src/test/spec/crud_v1/find_one_and_replace.rs +++ b/src/test/spec/crud_v1/find_one_and_replace.rs @@ -68,7 +68,8 @@ async fn run_find_one_and_replace_test(test_file: TestFile) { }; let result = coll - .find_one_and_replace(arguments.filter, arguments.replacement, options) + .find_one_and_replace(arguments.filter, arguments.replacement) + .with_options(options) .await .expect(&test_case.description); assert_eq!( diff --git a/src/test/spec/retryable_writes.rs b/src/test/spec/retryable_writes.rs index d50b13774..9fb837aab 100644 --- a/src/test/spec/retryable_writes.rs +++ b/src/test/spec/retryable_writes.rs @@ -261,7 +261,7 @@ async fn transaction_ids_included() { coll.find_one_and_delete(doc! {}).await.unwrap(); assert!(includes_txn_number("findAndModify")); - coll.find_one_and_replace(doc! {}, doc! { "x": 1 }, None) + coll.find_one_and_replace(doc! {}, doc! { "x": 1 }) .await .unwrap(); assert!(includes_txn_number("findAndModify")); diff --git a/src/test/spec/sessions.rs b/src/test/spec/sessions.rs index 1d02387c7..ce0856b0d 100644 --- a/src/test/spec/sessions.rs +++ b/src/test/spec/sessions.rs @@ -150,7 +150,8 @@ async fn implicit_session_after_connection() { .boxed(), ); ops.push( - coll.find_one_and_replace(doc! {}, doc! { "a": 1 }, None) + coll.find_one_and_replace(doc! {}, doc! { "a": 1 }) + .into_future() .map(ignore_val) .boxed(), ); diff --git a/src/test/spec/unified_runner/operation.rs b/src/test/spec/unified_runner/operation.rs index 1750bf8de..76aa4b22c 100644 --- a/src/test/spec/unified_runner/operation.rs +++ b/src/test/spec/unified_runner/operation.rs @@ -1325,11 +1325,8 @@ impl TestOperation for FindOneAndReplace { async move { let collection = test_runner.get_collection(id).await; let result = collection - .find_one_and_replace( - self.filter.clone(), - self.replacement.clone(), - self.options.clone(), - ) + .find_one_and_replace(self.filter.clone(), self.replacement.clone()) + .with_options(self.options.clone()) .await?; let result = to_bson(&result)?; diff --git a/src/test/spec/v2_runner/operation.rs b/src/test/spec/v2_runner/operation.rs index 3437289c6..6548a2243 100644 --- a/src/test/spec/v2_runner/operation.rs +++ b/src/test/spec/v2_runner/operation.rs @@ -847,27 +847,11 @@ impl TestOperation for FindOneAndReplace { session: Option<&'a mut ClientSession>, ) -> BoxFuture<'a, Result>> { async move { - let result = match session { - Some(session) => { - collection - .find_one_and_replace_with_session( - self.filter.clone(), - self.replacement.clone(), - self.options.clone(), - session, - ) - .await? - } - None => { - collection - .find_one_and_replace( - self.filter.clone(), - self.replacement.clone(), - self.options.clone(), - ) - .await? - } - }; + let result = collection + .find_one_and_replace(self.filter.clone(), self.replacement.clone()) + .with_options(self.options.clone()) + .optional(session, |a, s| a.session(s)) + .await?; let result = bson::to_bson(&result)?; Ok(Some(result)) } From 43bfd0b51022cc7c041eece92d36323902305106 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Thu, 7 Mar 2024 12:08:51 -0500 Subject: [PATCH 18/39] insert wip: move conversion out of insert op --- src/action.rs | 1 + src/action/insert_many.rs | 40 +++++++++++++++++++++++++++++++++++++++ src/coll.rs | 4 ++-- src/operation/insert.rs | 30 +++++++++++++++++++---------- 4 files changed, 63 insertions(+), 12 deletions(-) create mode 100644 src/action/insert_many.rs diff --git a/src/action.rs b/src/action.rs index 608d234b6..d1be2e605 100644 --- a/src/action.rs +++ b/src/action.rs @@ -12,6 +12,7 @@ mod drop; mod drop_index; mod find; mod find_and_modify; +mod insert_many; mod list_collections; mod list_databases; mod list_indexes; diff --git a/src/action/insert_many.rs b/src/action/insert_many.rs new file mode 100644 index 000000000..b75ab8bd2 --- /dev/null +++ b/src/action/insert_many.rs @@ -0,0 +1,40 @@ +use std::borrow::Borrow; + +use bson::RawDocumentBuf; +use serde::Serialize; + +use crate::{coll::options::InsertManyOptions, error::Result, serde_util, Collection}; + +use super::CollRef; + +impl Collection { + /// Inserts the data in `docs` into the collection. + /// + /// Note that this method accepts both owned and borrowed values, so the input documents + /// do not need to be cloned in order to be passed in. + /// + /// This operation will retry once upon failure if the connection and encountered error support + /// retryability. See the documentation + /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on + /// retryable writes. + /// + /// `await` will return `Result`. + pub fn insert_many_2(&self, docs: impl IntoIterator>) -> InsertMany { + let human_readable = self.human_readable_serialization(); + InsertMany { + coll: CollRef::new(self), + docs: docs + .into_iter() + .map(|v| serde_util::to_raw_document_buf_with_options(v.borrow(), human_readable)) + .collect(), + options: None, + } + } +} + +#[must_use] +pub struct InsertMany<'a> { + coll: CollRef<'a>, + docs: Result>, + options: Option, +} diff --git a/src/coll.rs b/src/coll.rs index da9485000..acc0a0211 100644 --- a/src/coll.rs +++ b/src/coll.rs @@ -264,7 +264,7 @@ where options.clone(), encrypted, self.inner.human_readable_serialization, - ); + )?; match self .client() @@ -398,7 +398,7 @@ where options.map(InsertManyOptions::from_insert_one_options), encrypted, self.inner.human_readable_serialization, - ); + )?; self.client() .execute_operation(insert, session) .await diff --git a/src/operation/insert.rs b/src/operation/insert.rs index 8ace66155..15d6718bc 100644 --- a/src/operation/insert.rs +++ b/src/operation/insert.rs @@ -22,22 +22,34 @@ use crate::{ use super::{COMMAND_OVERHEAD_SIZE, MAX_ENCRYPTED_WRITE_SIZE}; #[derive(Debug)] -pub(crate) struct Insert<'a, T> { +pub(crate) struct Insert { ns: Namespace, - documents: Vec<&'a T>, + documents: Vec, inserted_ids: Vec, options: InsertManyOptions, encrypted: bool, - human_readable_serialization: bool, } -impl<'a, T> Insert<'a, T> { - pub(crate) fn new( +impl Insert { + pub(crate) fn new( ns: Namespace, - documents: Vec<&'a T>, + documents: Vec<&T>, options: Option, encrypted: bool, human_readable_serialization: bool, + ) -> Result { + let documents = documents + .into_iter() + .map(|v| serde_util::to_raw_document_buf_with_options(v, human_readable_serialization)) + .collect::>>()?; + Ok(Self::raw(ns, documents, options, encrypted)) + } + + pub(crate) fn raw( + ns: Namespace, + documents: Vec, + options: Option, + encrypted: bool, ) -> Self { let mut options = options.unwrap_or_default(); if options.ordered.is_none() { @@ -50,12 +62,11 @@ impl<'a, T> Insert<'a, T> { documents, inserted_ids: vec![], encrypted, - human_readable_serialization, } } } -impl<'a, T: Serialize> OperationWithDefaults for Insert<'a, T> { +impl OperationWithDefaults for Insert { type O = InsertManyResult; type Command = RawDocumentBuf; @@ -75,8 +86,7 @@ impl<'a, T: Serialize> OperationWithDefaults for Insert<'a, T> { .take(Checked::new(description.max_write_batch_size).try_into()?) .enumerate() { - let mut doc = - serde_util::to_raw_document_buf_with_options(d, self.human_readable_serialization)?; + let mut doc = d.clone(); let id = match doc.get("_id")? { Some(b) => b.try_into()?, None => { From 657b11e14d7bc64d4f478d8bc070b416cf976849 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Thu, 7 Mar 2024 12:30:16 -0500 Subject: [PATCH 19/39] insert wip: inefficient execute --- src/action/insert_many.rs | 165 +++++++++++++++++++++++++++++++++++++- 1 file changed, 161 insertions(+), 4 deletions(-) diff --git a/src/action/insert_many.rs b/src/action/insert_many.rs index b75ab8bd2..e96e23a34 100644 --- a/src/action/insert_many.rs +++ b/src/action/insert_many.rs @@ -1,11 +1,20 @@ -use std::borrow::Borrow; +use std::{borrow::Borrow, collections::HashSet}; -use bson::RawDocumentBuf; +use bson::{Bson, RawDocumentBuf}; use serde::Serialize; -use crate::{coll::options::InsertManyOptions, error::Result, serde_util, Collection}; +use crate::{ + coll::options::InsertManyOptions, + error::{BulkWriteError, BulkWriteFailure, Error, ErrorKind, Result}, + operation::Insert as Op, + options::WriteConcern, + results::InsertManyResult, + serde_util, + ClientSession, + Collection, +}; -use super::CollRef; +use super::{action_impl, option_setters, CollRef}; impl Collection { /// Inserts the data in `docs` into the collection. @@ -28,13 +37,161 @@ impl Collection { .map(|v| serde_util::to_raw_document_buf_with_options(v.borrow(), human_readable)) .collect(), options: None, + session: None, } } } +#[cfg(feature = "sync")] +impl crate::sync::Collection { + /// Inserts the data in `docs` into the collection. + /// + /// Note that this method accepts both owned and borrowed values, so the input documents + /// do not need to be cloned in order to be passed in. + /// + /// This operation will retry once upon failure if the connection and encountered error support + /// retryability. See the documentation + /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on + /// retryable writes. + /// + /// `await` will return `Result`. + pub fn insert_many_2(&self, docs: impl IntoIterator>) -> InsertMany { + self.async_collection.insert_many_2(docs) + } +} + #[must_use] pub struct InsertMany<'a> { coll: CollRef<'a>, docs: Result>, options: Option, + session: Option<&'a mut ClientSession>, +} + +impl<'a> InsertMany<'a> { + option_setters! { options: InsertManyOptions; + bypass_document_validation: bool, + ordered: bool, + write_concern: WriteConcern, + comment: Bson, + } + + /// Runs the operation using the provided session. + pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { + self.session = Some(value.into()); + self + } +} + +action_impl! { + impl<'a> Action for InsertMany<'a> { + type Future = InsertManyFuture; + + async fn execute(mut self) -> Result { + resolve_write_concern_with_session!(self.coll, self.options, self.session.as_ref())?; + + let ds = self.docs?; + if ds.is_empty() { + return Err(ErrorKind::InvalidArgument { + message: "No documents provided to insert_many".to_string(), + } + .into()); + } + let ordered = self.options.as_ref().and_then(|o| o.ordered).unwrap_or(true); + #[cfg(feature = "in-use-encryption-unstable")] + let encrypted = self.coll.client().auto_encryption_opts().await.is_some(); + #[cfg(not(feature = "in-use-encryption-unstable"))] + let encrypted = false; + + let mut cumulative_failure: Option = None; + let mut error_labels: HashSet = Default::default(); + let mut cumulative_result: Option = None; + + let mut n_attempted = 0; + + while n_attempted < ds.len() { + let docs: Vec<_> = ds.iter().skip(n_attempted).cloned().collect(); + let insert = Op::raw( + self.coll.namespace(), + docs, + self.options.clone(), + encrypted, + ); + + match self + .coll + .client() + .execute_operation(insert, self.session.as_deref_mut()) + .await + { + Ok(result) => { + let current_batch_size = result.inserted_ids.len(); + + let cumulative_result = + cumulative_result.get_or_insert_with(InsertManyResult::new); + for (index, id) in result.inserted_ids { + cumulative_result + .inserted_ids + .insert(index + n_attempted, id); + } + + n_attempted += current_batch_size; + } + Err(e) => { + let labels = e.labels().clone(); + match *e.kind { + ErrorKind::BulkWrite(bw) => { + // for ordered inserts this size will be incorrect, but knowing the + // batch size isn't needed for ordered + // failures since we return immediately from + // them anyways. + let current_batch_size = bw.inserted_ids.len() + + bw.write_errors.as_ref().map(|we| we.len()).unwrap_or(0); + + let failure_ref = + cumulative_failure.get_or_insert_with(BulkWriteFailure::new); + if let Some(write_errors) = bw.write_errors { + for err in write_errors { + let index = n_attempted + err.index; + + failure_ref + .write_errors + .get_or_insert_with(Default::default) + .push(BulkWriteError { index, ..err }); + } + } + + if let Some(wc_error) = bw.write_concern_error { + failure_ref.write_concern_error = Some(wc_error); + } + + error_labels.extend(labels); + + if ordered { + // this will always be true since we invoked get_or_insert_with + // above. + if let Some(failure) = cumulative_failure { + return Err(Error::new( + ErrorKind::BulkWrite(failure), + Some(error_labels), + )); + } + } + n_attempted += current_batch_size; + } + _ => return Err(e), + } + } + } + } + + match cumulative_failure { + Some(failure) => Err(Error::new( + ErrorKind::BulkWrite(failure), + Some(error_labels), + )), + None => Ok(cumulative_result.unwrap_or_else(InsertManyResult::new)), + } + } + } } From ba3d7e69a60cdba1b712721bbfa23789d2e6a99c Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Thu, 7 Mar 2024 12:51:36 -0500 Subject: [PATCH 20/39] less inefficient --- src/action.rs | 1 + src/action/insert_many.rs | 6 +++--- src/coll.rs | 16 ++-------------- src/operation/insert.rs | 34 +++++++++------------------------- 4 files changed, 15 insertions(+), 42 deletions(-) diff --git a/src/action.rs b/src/action.rs index d1be2e605..3408fe210 100644 --- a/src/action.rs +++ b/src/action.rs @@ -36,6 +36,7 @@ pub use drop::{DropCollection, DropDatabase}; pub use drop_index::DropIndex; pub use find::Find; pub use find_and_modify::FindAndModify; +pub use insert_many::InsertMany; pub use list_collections::ListCollections; pub use list_databases::ListDatabases; pub use list_indexes::ListIndexes; diff --git a/src/action/insert_many.rs b/src/action/insert_many.rs index e96e23a34..7fc6c7fed 100644 --- a/src/action/insert_many.rs +++ b/src/action/insert_many.rs @@ -1,4 +1,4 @@ -use std::{borrow::Borrow, collections::HashSet}; +use std::{borrow::Borrow, collections::HashSet, ops::Deref}; use bson::{Bson, RawDocumentBuf}; use serde::Serialize; @@ -110,8 +110,8 @@ action_impl! { let mut n_attempted = 0; while n_attempted < ds.len() { - let docs: Vec<_> = ds.iter().skip(n_attempted).cloned().collect(); - let insert = Op::raw( + let docs: Vec<_> = ds.iter().skip(n_attempted).map(Deref::deref).collect(); + let insert = Op::new( self.coll.namespace(), docs, self.options.clone(), diff --git a/src/coll.rs b/src/coll.rs index acc0a0211..303ffdff4 100644 --- a/src/coll.rs +++ b/src/coll.rs @@ -258,13 +258,7 @@ where while n_attempted < ds.len() { let docs: Vec<&T> = ds.iter().skip(n_attempted).map(Borrow::borrow).collect(); - let insert = Insert::new( - self.namespace(), - docs, - options.clone(), - encrypted, - self.inner.human_readable_serialization, - )?; + let insert: Insert = unreachable!(); match self .client() @@ -392,13 +386,7 @@ where #[cfg(not(feature = "in-use-encryption-unstable"))] let encrypted = false; - let insert = Insert::new( - self.namespace(), - vec![doc], - options.map(InsertManyOptions::from_insert_one_options), - encrypted, - self.inner.human_readable_serialization, - )?; + let insert: Insert = unreachable!(); self.client() .execute_operation(insert, session) .await diff --git a/src/operation/insert.rs b/src/operation/insert.rs index 15d6718bc..a49fb3e1c 100644 --- a/src/operation/insert.rs +++ b/src/operation/insert.rs @@ -3,8 +3,7 @@ mod test; use std::{collections::HashMap, convert::TryInto}; -use bson::{oid::ObjectId, Bson, RawArrayBuf, RawDocumentBuf}; -use serde::Serialize; +use bson::{oid::ObjectId, Bson, RawArrayBuf, RawDocument, RawDocumentBuf}; use crate::{ bson::rawdoc, @@ -15,39 +14,24 @@ use crate::{ operation::{OperationWithDefaults, Retryability, WriteResponseBody}, options::{InsertManyOptions, WriteConcern}, results::InsertManyResult, - serde_util, Namespace, }; use super::{COMMAND_OVERHEAD_SIZE, MAX_ENCRYPTED_WRITE_SIZE}; #[derive(Debug)] -pub(crate) struct Insert { +pub(crate) struct Insert<'a> { ns: Namespace, - documents: Vec, + documents: Vec<&'a RawDocument>, inserted_ids: Vec, options: InsertManyOptions, encrypted: bool, } -impl Insert { - pub(crate) fn new( +impl<'a> Insert<'a> { + pub(crate) fn new( ns: Namespace, - documents: Vec<&T>, - options: Option, - encrypted: bool, - human_readable_serialization: bool, - ) -> Result { - let documents = documents - .into_iter() - .map(|v| serde_util::to_raw_document_buf_with_options(v, human_readable_serialization)) - .collect::>>()?; - Ok(Self::raw(ns, documents, options, encrypted)) - } - - pub(crate) fn raw( - ns: Namespace, - documents: Vec, + documents: Vec<&'a RawDocument>, options: Option, encrypted: bool, ) -> Self { @@ -66,7 +50,7 @@ impl Insert { } } -impl OperationWithDefaults for Insert { +impl<'a> OperationWithDefaults for Insert<'a> { type O = InsertManyResult; type Command = RawDocumentBuf; @@ -80,13 +64,13 @@ impl OperationWithDefaults for Insert { let max_doc_sequence_size = Checked::::try_from(description.max_message_size_bytes)? - COMMAND_OVERHEAD_SIZE; - for (i, d) in self + for (i, &d) in self .documents .iter() .take(Checked::new(description.max_write_batch_size).try_into()?) .enumerate() { - let mut doc = d.clone(); + let mut doc = d.to_owned(); let id = match doc.get("_id")? { Some(b) => b.try_into()?, None => { From 6b289cdd7043015e886ed9785c9fab320698de07 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Thu, 7 Mar 2024 13:21:24 -0500 Subject: [PATCH 21/39] update insert_many callers --- src/action/insert_many.rs | 9 +- src/client/session/test.rs | 20 +- src/client/session/test/causal_consistency.rs | 2 +- src/coll.rs | 150 +----- src/concern/test.rs | 59 +-- src/gridfs/upload.rs | 2 +- src/operation/insert.rs | 3 - src/operation/insert/test.rs | 153 ------ src/sync/coll.rs | 44 +- src/sync/test.rs | 2 +- src/test/change_stream.rs | 2 +- src/test/client.rs | 4 +- src/test/coll.rs | 41 +- src/test/csfle.rs | 25 +- src/test/cursor.rs | 54 +-- src/test/documentation_examples.rs | 90 ++-- .../aggregation_data.rs | 455 +++++++++--------- src/test/spec/connection_stepdown.rs | 16 +- src/test/spec/crud_v1/aggregate.rs | 2 +- src/test/spec/crud_v1/count.rs | 2 +- src/test/spec/crud_v1/delete_many.rs | 2 +- src/test/spec/crud_v1/delete_one.rs | 2 +- src/test/spec/crud_v1/distinct.rs | 2 +- src/test/spec/crud_v1/find.rs | 2 +- src/test/spec/crud_v1/find_one_and_delete.rs | 2 +- src/test/spec/crud_v1/find_one_and_replace.rs | 2 +- src/test/spec/crud_v1/find_one_and_update.rs | 2 +- src/test/spec/crud_v1/insert_many.rs | 11 +- src/test/spec/crud_v1/insert_one.rs | 2 +- src/test/spec/crud_v1/replace_one.rs | 2 +- src/test/spec/crud_v1/update_many.rs | 2 +- src/test/spec/crud_v1/update_one.rs | 2 +- src/test/spec/retryable_writes.rs | 12 +- src/test/spec/trace.rs | 4 +- src/test/spec/unified_runner/operation.rs | 20 +- src/test/spec/unified_runner/test_runner.rs | 4 +- src/test/spec/v2_runner.rs | 8 +- src/test/spec/v2_runner/csfle.rs | 2 +- src/test/spec/v2_runner/operation.rs | 13 +- tests/readme_examples.rs | 6 +- 40 files changed, 416 insertions(+), 821 deletions(-) delete mode 100644 src/operation/insert/test.rs diff --git a/src/action/insert_many.rs b/src/action/insert_many.rs index 7fc6c7fed..bba473b33 100644 --- a/src/action/insert_many.rs +++ b/src/action/insert_many.rs @@ -28,7 +28,7 @@ impl Collection { /// retryable writes. /// /// `await` will return `Result`. - pub fn insert_many_2(&self, docs: impl IntoIterator>) -> InsertMany { + pub fn insert_many(&self, docs: impl IntoIterator>) -> InsertMany { let human_readable = self.human_readable_serialization(); InsertMany { coll: CollRef::new(self), @@ -54,12 +54,13 @@ impl crate::sync::Collection { /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on /// retryable writes. /// - /// `await` will return `Result`. - pub fn insert_many_2(&self, docs: impl IntoIterator>) -> InsertMany { - self.async_collection.insert_many_2(docs) + /// [`run`](InsertMany::run) will return `Result`. + pub fn insert_many(&self, docs: impl IntoIterator>) -> InsertMany { + self.async_collection.insert_many(docs) } } +/// Inserts documents into a collection. Construct with [`Collection::insert_many`]. #[must_use] pub struct InsertMany<'a> { coll: CollRef<'a>, diff --git a/src/client/session/test.rs b/src/client/session/test.rs index 8c8f4d0fc..3687a1a98 100644 --- a/src/client/session/test.rs +++ b/src/client/session/test.rs @@ -7,10 +7,10 @@ use futures::stream::StreamExt; use crate::{ bson::{doc, Bson}, - coll::options::{CountOptions, InsertManyOptions}, + coll::options::CountOptions, error::Result, event::sdam::SdamEvent, - options::{Acknowledgment, FindOptions, ReadConcern, ReadPreference, WriteConcern}, + options::{FindOptions, ReadConcern, ReadPreference, WriteConcern}, sdam::ServerInfo, selection_criteria::SelectionCriteria, test::{get_client_options, log_uncaptured, Event, EventClient, EventHandler, TestClient}, @@ -63,11 +63,7 @@ macro_rules! for_each_op { .await; $test_func( "insert", - collection_op!( - $test_name, - coll, - coll.insert_many(vec![doc! { "x": 1 }], None) - ), + collection_op!($test_name, coll, coll.insert_many(vec![doc! { "x": 1 }])), ) .await; $test_func( @@ -412,7 +408,7 @@ async fn implicit_session_returned_after_immediate_exhaust() { let coll = client .init_db_and_coll(function_name!(), function_name!()) .await; - coll.insert_many(vec![doc! {}, doc! {}], None) + coll.insert_many(vec![doc! {}, doc! {}]) .await .expect("insert should succeed"); @@ -505,10 +501,10 @@ async fn find_and_getmore_share_session() { .init_db_and_coll(function_name!(), function_name!()) .await; - let options = InsertManyOptions::builder() - .write_concern(WriteConcern::builder().w(Acknowledgment::Majority).build()) - .build(); - coll.insert_many(vec![doc! {}; 3], options).await.unwrap(); + coll.insert_many(vec![doc! {}; 3]) + .write_concern(WriteConcern::majority()) + .await + .unwrap(); let read_preferences: Vec = vec![ ReadPreference::Primary, diff --git a/src/client/session/test/causal_consistency.rs b/src/client/session/test/causal_consistency.rs index ef10907b7..78411f3e6 100644 --- a/src/client/session/test/causal_consistency.rs +++ b/src/client/session/test/causal_consistency.rs @@ -49,7 +49,7 @@ fn all_session_ops() -> impl Iterator { })); ops.push(op!("insert", false, |coll, session| { - coll.insert_many_with_session(vec![doc! { "x": 1 }], None, session) + coll.insert_many(vec![doc! { "x": 1 }]).session(session) })); ops.push(op!("find", true, |coll, session| coll diff --git a/src/coll.rs b/src/coll.rs index 303ffdff4..2a959644d 100644 --- a/src/coll.rs +++ b/src/coll.rs @@ -1,7 +1,7 @@ mod action; pub mod options; -use std::{borrow::Borrow, collections::HashSet, fmt, fmt::Debug, str::FromStr, sync::Arc}; +use std::{borrow::Borrow, fmt, fmt::Debug, str::FromStr, sync::Arc}; use serde::{de::Error as DeError, Deserialize, Deserializer, Serialize}; @@ -11,9 +11,9 @@ use crate::{ client::options::ServerAddress, cmap::conn::PinnedConnectionHandle, concern::{ReadConcern, WriteConcern}, - error::{convert_bulk_errors, BulkWriteError, BulkWriteFailure, Error, ErrorKind, Result}, + error::{convert_bulk_errors, Error, Result}, operation::{Insert, Update}, - results::{InsertManyResult, InsertOneResult, UpdateResult}, + results::{InsertOneResult, UpdateResult}, selection_criteria::SelectionCriteria, Client, ClientSession, @@ -226,150 +226,6 @@ impl Collection where T: Serialize + Send + Sync, { - #[allow(clippy::needless_option_as_deref)] - async fn insert_many_common( - &self, - docs: impl IntoIterator>, - options: impl Into>, - mut session: Option<&mut ClientSession>, - ) -> Result { - let ds: Vec<_> = docs.into_iter().collect(); - let mut options = options.into(); - resolve_write_concern_with_session!(self, options, session.as_ref())?; - - if ds.is_empty() { - return Err(ErrorKind::InvalidArgument { - message: "No documents provided to insert_many".to_string(), - } - .into()); - } - - let ordered = options.as_ref().and_then(|o| o.ordered).unwrap_or(true); - #[cfg(feature = "in-use-encryption-unstable")] - let encrypted = self.client().auto_encryption_opts().await.is_some(); - #[cfg(not(feature = "in-use-encryption-unstable"))] - let encrypted = false; - - let mut cumulative_failure: Option = None; - let mut error_labels: HashSet = Default::default(); - let mut cumulative_result: Option = None; - - let mut n_attempted = 0; - - while n_attempted < ds.len() { - let docs: Vec<&T> = ds.iter().skip(n_attempted).map(Borrow::borrow).collect(); - let insert: Insert = unreachable!(); - - match self - .client() - .execute_operation(insert, session.as_deref_mut()) - .await - { - Ok(result) => { - let current_batch_size = result.inserted_ids.len(); - - let cumulative_result = - cumulative_result.get_or_insert_with(InsertManyResult::new); - for (index, id) in result.inserted_ids { - cumulative_result - .inserted_ids - .insert(index + n_attempted, id); - } - - n_attempted += current_batch_size; - } - Err(e) => { - let labels = e.labels().clone(); - match *e.kind { - ErrorKind::BulkWrite(bw) => { - // for ordered inserts this size will be incorrect, but knowing the - // batch size isn't needed for ordered - // failures since we return immediately from - // them anyways. - let current_batch_size = bw.inserted_ids.len() - + bw.write_errors.as_ref().map(|we| we.len()).unwrap_or(0); - - let failure_ref = - cumulative_failure.get_or_insert_with(BulkWriteFailure::new); - if let Some(write_errors) = bw.write_errors { - for err in write_errors { - let index = n_attempted + err.index; - - failure_ref - .write_errors - .get_or_insert_with(Default::default) - .push(BulkWriteError { index, ..err }); - } - } - - if let Some(wc_error) = bw.write_concern_error { - failure_ref.write_concern_error = Some(wc_error); - } - - error_labels.extend(labels); - - if ordered { - // this will always be true since we invoked get_or_insert_with - // above. - if let Some(failure) = cumulative_failure { - return Err(Error::new( - ErrorKind::BulkWrite(failure), - Some(error_labels), - )); - } - } - n_attempted += current_batch_size; - } - _ => return Err(e), - } - } - } - } - - match cumulative_failure { - Some(failure) => Err(Error::new( - ErrorKind::BulkWrite(failure), - Some(error_labels), - )), - None => Ok(cumulative_result.unwrap_or_else(InsertManyResult::new)), - } - } - - /// Inserts the data in `docs` into the collection. - /// - /// Note that this method accepts both owned and borrowed values, so the input documents - /// do not need to be cloned in order to be passed in. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub async fn insert_many( - &self, - docs: impl IntoIterator>, - options: impl Into>, - ) -> Result { - self.insert_many_common(docs, options, None).await - } - - /// Inserts the data in `docs` into the collection using the provided `ClientSession`. - /// - /// Note that this method accepts both owned and borrowed values, so the input documents - /// do not need to be cloned in order to be passed in. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub async fn insert_many_with_session( - &self, - docs: impl IntoIterator>, - options: impl Into>, - session: &mut ClientSession, - ) -> Result { - self.insert_many_common(docs, options, Some(session)).await - } - async fn insert_one_common( &self, doc: &T, diff --git a/src/concern/test.rs b/src/concern/test.rs index f37e065ed..ff8f44d5c 100644 --- a/src/concern/test.rs +++ b/src/concern/test.rs @@ -5,7 +5,6 @@ use crate::{ error::ErrorKind, options::{ Acknowledgment, - InsertManyOptions, InsertOneOptions, ReadConcern, ReplaceOptions, @@ -244,32 +243,24 @@ async fn command_contains_write_concern_insert_many() { let coll: Collection = client.database("test").collection(function_name!()); coll.drop().await.unwrap(); - coll.insert_many( - &[doc! { "foo": "bar" }], - InsertManyOptions::builder() - .write_concern( - WriteConcern::builder() - .w(Acknowledgment::Nodes(1)) - .journal(true) - .build(), - ) - .build(), - ) - .await - .unwrap(); - coll.insert_many( - &[doc! { "foo": "bar" }], - InsertManyOptions::builder() - .write_concern( - WriteConcern::builder() - .w(Acknowledgment::Nodes(1)) - .journal(false) - .build(), - ) - .build(), - ) - .await - .unwrap(); + coll.insert_many(&[doc! { "foo": "bar" }]) + .write_concern( + WriteConcern::builder() + .w(Acknowledgment::Nodes(1)) + .journal(true) + .build(), + ) + .await + .unwrap(); + coll.insert_many(&[doc! { "foo": "bar" }]) + .write_concern( + WriteConcern::builder() + .w(Acknowledgment::Nodes(1)) + .journal(false) + .build(), + ) + .await + .unwrap(); assert_eq!( command_write_concerns(&client, "insert"), @@ -335,7 +326,7 @@ async fn command_contains_write_concern_update_many() { let coll: Collection = client.database("test").collection(function_name!()); coll.drop().await.unwrap(); - coll.insert_many(&[doc! { "foo": "bar" }, doc! { "foo": "bar" }], None) + coll.insert_many(&[doc! { "foo": "bar" }, doc! { "foo": "bar" }]) .await .unwrap(); coll.update_many(doc! { "foo": "bar" }, doc! { "$set": { "foo": "baz" } }) @@ -431,7 +422,7 @@ async fn command_contains_write_concern_delete_one() { let coll: Collection = client.database("test").collection(function_name!()); coll.drop().await.unwrap(); - coll.insert_many(&[doc! { "foo": "bar" }, doc! { "foo": "bar" }], None) + coll.insert_many(&[doc! { "foo": "bar" }, doc! { "foo": "bar" }]) .await .unwrap(); coll.delete_one(doc! { "foo": "bar" }) @@ -475,7 +466,7 @@ async fn command_contains_write_concern_delete_many() { let coll: Collection = client.database("test").collection(function_name!()); coll.drop().await.unwrap(); - coll.insert_many(&[doc! { "foo": "bar" }, doc! { "foo": "bar" }], None) + coll.insert_many(&[doc! { "foo": "bar" }, doc! { "foo": "bar" }]) .await .unwrap(); coll.delete_many(doc! { "foo": "bar" }) @@ -487,7 +478,7 @@ async fn command_contains_write_concern_delete_many() { ) .await .unwrap(); - coll.insert_many(&[doc! { "foo": "bar" }, doc! { "foo": "bar" }], None) + coll.insert_many(&[doc! { "foo": "bar" }, doc! { "foo": "bar" }]) .await .unwrap(); coll.delete_many(doc! { "foo": "bar" }) @@ -522,7 +513,7 @@ async fn command_contains_write_concern_find_one_and_delete() { let coll: Collection = client.database("test").collection(function_name!()); coll.drop().await.unwrap(); - coll.insert_many(&[doc! { "foo": "bar" }, doc! { "foo": "bar" }], None) + coll.insert_many(&[doc! { "foo": "bar" }, doc! { "foo": "bar" }]) .await .unwrap(); coll.find_one_and_delete(doc! { "foo": "bar" }) @@ -566,7 +557,7 @@ async fn command_contains_write_concern_find_one_and_replace() { let coll: Collection = client.database("test").collection(function_name!()); coll.drop().await.unwrap(); - coll.insert_many(&[doc! { "foo": "bar" }, doc! { "foo": "bar" }], None) + coll.insert_many(&[doc! { "foo": "bar" }, doc! { "foo": "bar" }]) .await .unwrap(); coll.find_one_and_replace(doc! { "foo": "bar" }, doc! { "baz": "fun" }) @@ -610,7 +601,7 @@ async fn command_contains_write_concern_find_one_and_update() { let coll: Collection = client.database("test").collection(function_name!()); coll.drop().await.unwrap(); - coll.insert_many(&[doc! { "foo": "bar" }, doc! { "foo": "bar" }], None) + coll.insert_many(&[doc! { "foo": "bar" }, doc! { "foo": "bar" }]) .await .unwrap(); coll.find_one_and_update(doc! { "foo": "bar" }, doc! { "$set": { "foo": "fun" } }) diff --git a/src/gridfs/upload.rs b/src/gridfs/upload.rs index 2adfe2cda..2d66816e0 100644 --- a/src/gridfs/upload.rs +++ b/src/gridfs/upload.rs @@ -530,7 +530,7 @@ async fn write_bytes( chunks.push(chunk); } - match bucket.chunks().insert_many(chunks, None).await { + match bucket.chunks().insert_many(chunks).await { Ok(_) => { buffer.drain(..(n * chunk_size_bytes).get()?); Ok((n.try_into()?, buffer)) diff --git a/src/operation/insert.rs b/src/operation/insert.rs index a49fb3e1c..e2b7ca282 100644 --- a/src/operation/insert.rs +++ b/src/operation/insert.rs @@ -1,6 +1,3 @@ -#[cfg(test)] -mod test; - use std::{collections::HashMap, convert::TryInto}; use bson::{oid::ObjectId, Bson, RawArrayBuf, RawDocument, RawDocumentBuf}; diff --git a/src/operation/insert/test.rs b/src/operation/insert/test.rs deleted file mode 100644 index c8385338f..000000000 --- a/src/operation/insert/test.rs +++ /dev/null @@ -1,153 +0,0 @@ -use once_cell::sync::Lazy; -use serde::{Deserialize, Serialize}; - -use crate::{ - bson::{doc, Document}, - cmap::StreamDescription, - concern::WriteConcern, - error::{BulkWriteError, ErrorKind, WriteConcernError}, - operation::{test::handle_response_test, Insert, Operation}, - options::InsertManyOptions, - Namespace, -}; - -struct TestFixtures { - op: Insert<'static, Document>, - documents: Vec, -} - -/// Get an Insert operation and the documents/options used to construct it. -fn fixtures(opts: Option) -> TestFixtures { - static DOCUMENTS: Lazy> = Lazy::new(|| { - vec![ - Document::new(), - doc! {"_id": 1234, "a": 1}, - doc! {"a": 123, "b": "hello world" }, - ] - }); - - let options = opts.unwrap_or(InsertManyOptions { - ordered: Some(true), - write_concern: Some(WriteConcern::builder().journal(true).build()), - ..Default::default() - }); - - let op = Insert::new( - Namespace { - db: "test_db".to_string(), - coll: "test_coll".to_string(), - }, - DOCUMENTS.iter().collect(), - Some(options.clone()), - false, - false, - ); - - TestFixtures { - op, - documents: DOCUMENTS.clone(), - } -} - -#[derive(Debug, Serialize, Deserialize)] -struct Documents { - documents: Vec, -} - -#[test] -fn handle_success() { - let mut fixtures = fixtures(None); - - // populate _id for documents that don't provide it - fixtures - .op - .build(&StreamDescription::new_testing()) - .unwrap(); - let response = handle_response_test(&fixtures.op, doc! { "ok": 1.0, "n": 3 }).unwrap(); - let inserted_ids = response.inserted_ids; - assert_eq!(inserted_ids.len(), 3); - assert_eq!( - inserted_ids.get(&1).unwrap(), - fixtures.documents[1].get("_id").unwrap() - ); -} - -#[test] -fn handle_invalid_response() { - let fixtures = fixtures(None); - handle_response_test(&fixtures.op, doc! { "ok": 1.0, "asdfadsf": 123123 }).unwrap_err(); -} - -#[test] -fn handle_write_failure() { - let mut fixtures = fixtures(None); - - // generate _id for operations missing it. - let _ = fixtures - .op - .build(&StreamDescription::new_testing()) - .unwrap(); - - let write_error_response = doc! { - "ok": 1.0, - "n": 1, - "writeErrors": [ - { - "index": 1, - "code": 11000, - "errmsg": "duplicate key", - "errInfo": { - "test key": "test value", - } - } - ], - "writeConcernError": { - "code": 123, - "codeName": "woohoo", - "errmsg": "error message", - "errInfo": { - "writeConcern": { - "w": 2, - "wtimeout": 0, - "provenance": "clientSupplied" - } - } - } - }; - - let write_error_response = - handle_response_test(&fixtures.op, write_error_response).unwrap_err(); - match *write_error_response.kind { - ErrorKind::BulkWrite(bwe) => { - let write_errors = bwe.write_errors.expect("write errors should be present"); - assert_eq!(write_errors.len(), 1); - let expected_err = BulkWriteError { - index: 1, - code: 11000, - code_name: None, - message: "duplicate key".to_string(), - details: Some(doc! { "test key": "test value" }), - }; - assert_eq!(write_errors.first().unwrap(), &expected_err); - - let write_concern_error = bwe - .write_concern_error - .expect("write concern error should be present"); - let expected_wc_err = WriteConcernError { - code: 123, - code_name: "woohoo".to_string(), - message: "error message".to_string(), - details: Some(doc! { "writeConcern": { - "w": 2, - "wtimeout": 0, - "provenance": "clientSupplied" - } }), - labels: vec![], - }; - assert_eq!(write_concern_error, expected_wc_err); - - assert_eq!(bwe.inserted_ids.len(), 1); - } - e => panic!("expected bulk write error, got {:?}", e), - }; -} diff --git a/src/sync/coll.rs b/src/sync/coll.rs index 524da3f99..58ace681a 100644 --- a/src/sync/coll.rs +++ b/src/sync/coll.rs @@ -6,15 +6,8 @@ use super::ClientSession; use crate::{ bson::Document, error::Result, - options::{ - InsertManyOptions, - InsertOneOptions, - ReadConcern, - ReplaceOptions, - SelectionCriteria, - WriteConcern, - }, - results::{InsertManyResult, InsertOneResult, UpdateResult}, + options::{InsertOneOptions, ReadConcern, ReplaceOptions, SelectionCriteria, WriteConcern}, + results::{InsertOneResult, UpdateResult}, Collection as AsyncCollection, Namespace, }; @@ -111,39 +104,6 @@ impl Collection where T: Serialize + Send + Sync, { - /// Inserts the documents in `docs` into the collection. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub fn insert_many( - &self, - docs: impl IntoIterator>, - options: impl Into>, - ) -> Result { - crate::sync::TOKIO_RUNTIME.block_on(self.async_collection.insert_many(docs, options.into())) - } - - /// Inserts the documents in `docs` into the collection using the provided `ClientSession`. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub fn insert_many_with_session( - &self, - docs: impl IntoIterator>, - options: impl Into>, - session: &mut ClientSession, - ) -> Result { - crate::sync::TOKIO_RUNTIME.block_on(self.async_collection.insert_many_with_session( - docs, - options.into(), - &mut session.async_client_session, - )) - } - /// Inserts `doc` into the collection. /// /// This operation will retry once upon failure if the connection and encountered error support diff --git a/src/sync/test.rs b/src/sync/test.rs index 88a75887a..786362f48 100644 --- a/src/sync/test.rs +++ b/src/sync/test.rs @@ -378,7 +378,7 @@ fn borrowed_deserialization() { Doc { id: 5, foo: "1" }, ]; - coll.insert_many(&docs, None).unwrap(); + coll.insert_many(&docs).run().unwrap(); let options = FindOptions::builder() .batch_size(2) .sort(doc! { "_id": 1 }) diff --git a/src/test/change_stream.rs b/src/test/change_stream.rs index 53b2f4c5e..c41e20d6c 100644 --- a/src/test/change_stream.rs +++ b/src/test/change_stream.rs @@ -430,7 +430,7 @@ async fn batch_mid_resume_token() -> Result<()> { } // If we're out of events, make some more. None => { - coll.insert_many((0..3).map(|_| doc! {}), None).await?; + coll.insert_many((0..3).map(|_| doc! {})).await?; } }; diff --git a/src/test/client.rs b/src/test/client.rs index 9f79fc931..3c90f9d75 100644 --- a/src/test/client.rs +++ b/src/test/client.rs @@ -810,7 +810,7 @@ async fn manual_shutdown_with_resources() { let db = client.database("shutdown_test"); db.drop().await.unwrap(); let coll = db.collection::("test"); - coll.insert_many([doc! {}, doc! {}], None).await.unwrap(); + coll.insert_many([doc! {}, doc! {}]).await.unwrap(); let bucket = db.gridfs_bucket(None); // Scope to force drop of resources { @@ -870,7 +870,7 @@ async fn manual_shutdown_immediate_with_resources() { let db = client.database("shutdown_test"); db.drop().await.unwrap(); let coll = db.collection::("test"); - coll.insert_many([doc! {}, doc! {}], None).await.unwrap(); + coll.insert_many([doc! {}, doc! {}]).await.unwrap(); let bucket = db.gridfs_bucket(None); // Resources are scoped to past the `shutdown_immediate`. diff --git a/src/test/coll.rs b/src/test/coll.rs index f80c59f5e..629aef803 100644 --- a/src/test/coll.rs +++ b/src/test/coll.rs @@ -24,7 +24,6 @@ use crate::{ FindOptions, Hint, IndexOptions, - InsertManyOptions, ReadConcern, ReadPreference, SelectionCriteria, @@ -109,7 +108,7 @@ async fn count() { assert_eq!(coll.estimated_document_count().await.unwrap(), 1); let result = coll - .insert_many((1..4).map(|i| doc! { "x": i }).collect::>(), None) + .insert_many((1..4).map(|i| doc! { "x": i }).collect::>()) .await .unwrap(); assert_eq!(result.inserted_ids.len(), 3); @@ -125,7 +124,7 @@ async fn find() { .await; let result = coll - .insert_many((0i32..5).map(|i| doc! { "x": i }).collect::>(), None) + .insert_many((0i32..5).map(|i| doc! { "x": i }).collect::>()) .await .unwrap(); assert_eq!(result.inserted_ids.len(), 5); @@ -150,7 +149,7 @@ async fn update() { .await; let result = coll - .insert_many((0i32..5).map(|_| doc! { "x": 3 }).collect::>(), None) + .insert_many((0i32..5).map(|_| doc! { "x": 3 }).collect::>()) .await .unwrap(); assert_eq!(result.inserted_ids.len(), 5); @@ -187,7 +186,7 @@ async fn delete() { .await; let result = coll - .insert_many((0i32..5).map(|_| doc! { "x": 3 }).collect::>(), None) + .insert_many((0i32..5).map(|_| doc! { "x": 3 }).collect::>()) .await .unwrap(); assert_eq!(result.inserted_ids.len(), 5); @@ -211,7 +210,7 @@ async fn aggregate_out() { drop_collection(&coll).await; let result = coll - .insert_many((0i32..5).map(|n| doc! { "x": n }).collect::>(), None) + .insert_many((0i32..5).map(|n| doc! { "x": n }).collect::>()) .await .unwrap(); assert_eq!(result.inserted_ids.len(), 5); @@ -261,7 +260,7 @@ async fn kill_cursors_on_drop() { drop_collection(&coll).await; - coll.insert_many(vec![doc! { "x": 1 }, doc! { "x": 2 }], None) + coll.insert_many(vec![doc! { "x": 1 }, doc! { "x": 2 }]) .await .unwrap(); @@ -293,7 +292,7 @@ async fn no_kill_cursors_on_exhausted() { drop_collection(&coll).await; - coll.insert_many(vec![doc! { "x": 1 }, doc! { "x": 2 }], None) + coll.insert_many(vec![doc! { "x": 1 }, doc! { "x": 2 }]) .await .unwrap(); @@ -381,11 +380,7 @@ async fn large_insert() { .init_db_and_coll(function_name!(), function_name!()) .await; assert_eq!( - coll.insert_many(docs, None) - .await - .unwrap() - .inserted_ids - .len(), + coll.insert_many(docs).await.unwrap().inserted_ids.len(), 35000 ); } @@ -427,10 +422,10 @@ async fn large_insert_unordered_with_errors() { let coll = client .init_db_and_coll(function_name!(), function_name!()) .await; - let options = InsertManyOptions::builder().ordered(false).build(); match *coll - .insert_many(docs, options) + .insert_many(docs) + .ordered(false) .await .expect_err("should get error") .kind @@ -465,10 +460,10 @@ async fn large_insert_ordered_with_errors() { let coll = client .init_db_and_coll(function_name!(), function_name!()) .await; - let options = InsertManyOptions::builder().ordered(true).build(); match *coll - .insert_many(docs, options) + .insert_many(docs) + .ordered(true) .await .expect_err("should get error") .kind @@ -499,7 +494,7 @@ async fn empty_insert() { .database(function_name!()) .collection::(function_name!()); match *coll - .insert_many(Vec::::new(), None) + .insert_many(Vec::::new()) .await .expect_err("should get error") .kind @@ -771,7 +766,7 @@ async fn typed_insert_many() { str: "b".into(), }, ]; - coll.insert_many(insert_data.clone(), None).await.unwrap(); + coll.insert_many(insert_data.clone()).await.unwrap(); let actual: Vec = coll .find(doc! { "x": 2 }) @@ -978,7 +973,7 @@ async fn cursor_batch_size() { .await; let doc = Document::new(); - coll.insert_many(vec![&doc; 10], None).await.unwrap(); + coll.insert_many(vec![&doc; 10]).await.unwrap(); let opts = FindOptions::builder().batch_size(3).build(); let cursor_no_session = coll.find(doc! {}).with_options(opts.clone()).await.unwrap(); @@ -1047,7 +1042,7 @@ async fn invalid_utf8_response() { // test triggering an invalid error message via an insert_many. let insert_err = coll - .insert_many([&long_unicode_str_doc], None) + .insert_many([&long_unicode_str_doc]) .await .expect_err("second insert of document should fail") .kind; @@ -1274,7 +1269,7 @@ async fn insert_many_document_sequences() { rawdoc! { "s": "a".repeat((max_object_size / 2) as usize) }, rawdoc! { "s": "b".repeat((max_object_size / 2) as usize) }, ]; - collection.insert_many(docs, None).await.unwrap(); + collection.insert_many(docs).await.unwrap(); let event = subscriber .filter_map_event(Duration::from_millis(500), |e| match e { @@ -1302,7 +1297,7 @@ async fn insert_many_document_sequences() { docs.push(doc); } let total_docs = docs.len(); - collection.insert_many(docs, None).await.unwrap(); + collection.insert_many(docs).await.unwrap(); let first_event = subscriber .filter_map_event(Duration::from_millis(500), |e| match e { diff --git a/src/test/csfle.rs b/src/test/csfle.rs index 22261ae03..77e1275f3 100644 --- a/src/test/csfle.rs +++ b/src/test/csfle.rs @@ -581,19 +581,16 @@ async fn bson_size_limits() -> Result<()> { // Test operation 3 let value = "a".repeat(2_097_152); events.clear_events(Duration::from_millis(500)).await; - coll.insert_many( - vec![ - doc! { - "_id": "over_2mib_1", - "unencrypted": value.clone(), - }, - doc! { - "_id": "over_2mib_2", - "unencrypted": value, - }, - ], - None, - ) + coll.insert_many(vec![ + doc! { + "_id": "over_2mib_1", + "unencrypted": value.clone(), + }, + doc! { + "_id": "over_2mib_2", + "unencrypted": value, + }, + ]) .await?; let inserts = events .collect_events(Duration::from_millis(500), |ev| { @@ -613,7 +610,7 @@ async fn bson_size_limits() -> Result<()> { let mut doc2 = doc.clone(); doc2.insert("_id", "encryption_exceeds_2mib_2"); events.clear_events(Duration::from_millis(500)).await; - coll.insert_many(vec![doc, doc2], None).await?; + coll.insert_many(vec![doc, doc2]).await?; let inserts = events .collect_events(Duration::from_millis(500), |ev| { let ev = match ev.as_command_started_event() { diff --git a/src/test/cursor.rs b/src/test/cursor.rs index a7140157b..011db74b4 100644 --- a/src/test/cursor.rs +++ b/src/test/cursor.rs @@ -33,7 +33,7 @@ async fn tailable_cursor() { ) .await; - coll.insert_many((0..5).map(|i| doc! { "_id": i }), None) + coll.insert_many((0..5).map(|i| doc! { "_id": i })) .await .unwrap(); @@ -89,7 +89,8 @@ async fn session_cursor_next() { .create_fresh_collection(function_name!(), function_name!(), None) .await; - coll.insert_many_with_session((0..5).map(|i| doc! { "_id": i }), None, &mut session) + coll.insert_many((0..5).map(|i| doc! { "_id": i })) + .session(&mut session) .await .unwrap(); @@ -119,17 +120,14 @@ async fn batch_exhaustion() { None, ) .await; - coll.insert_many( - vec![ - doc! { "foo": 1 }, - doc! { "foo": 2 }, - doc! { "foo": 3 }, - doc! { "foo": 4 }, - doc! { "foo": 5 }, - doc! { "foo": 6 }, - ], - None, - ) + coll.insert_many(vec![ + doc! { "foo": 1 }, + doc! { "foo": 2 }, + doc! { "foo": 3 }, + doc! { "foo": 4 }, + doc! { "foo": 5 }, + doc! { "foo": 6 }, + ]) .await .unwrap(); @@ -189,7 +187,7 @@ async fn borrowed_deserialization() { Doc { id: 5, foo: "1" }, ]; - coll.insert_many(&docs, None).await.unwrap(); + coll.insert_many(&docs).await.unwrap(); let options = FindOptions::builder() .batch_size(2) @@ -233,13 +231,10 @@ async fn session_cursor_with_type() { let coll = client.database("db").collection("coll"); coll.drop().session(&mut session).await.unwrap(); - coll.insert_many_with_session( - vec![doc! { "x": 1 }, doc! { "x": 2 }, doc! { "x": 3 }], - None, - &mut session, - ) - .await - .unwrap(); + coll.insert_many(vec![doc! { "x": 1 }, doc! { "x": 2 }, doc! { "x": 3 }]) + .session(&mut session) + .await + .unwrap(); let mut cursor: crate::SessionCursor = coll.find(doc! {}).session(&mut session).await.unwrap(); @@ -257,16 +252,13 @@ async fn cursor_final_batch() { let coll = client .create_fresh_collection("test_cursor_final_batch", "test", None) .await; - coll.insert_many( - vec![ - doc! { "foo": 1 }, - doc! { "foo": 2 }, - doc! { "foo": 3 }, - doc! { "foo": 4 }, - doc! { "foo": 5 }, - ], - None, - ) + coll.insert_many(vec![ + doc! { "foo": 1 }, + doc! { "foo": 2 }, + doc! { "foo": 3 }, + doc! { "foo": 4 }, + doc! { "foo": 5 }, + ]) .await .unwrap(); diff --git a/src/test/documentation_examples.rs b/src/test/documentation_examples.rs index 08ca2ec99..cfbab61f6 100644 --- a/src/test/documentation_examples.rs +++ b/src/test/documentation_examples.rs @@ -97,7 +97,7 @@ async fn insert_examples(collection: &Collection) -> Result<()> { }, ]; - collection.insert_many(docs, None).await?; + collection.insert_many(docs).await?; // End Example 3 assert_coll_count!(collection, 4); @@ -162,7 +162,7 @@ async fn query_top_level_fields_examples(collection: &Collection) -> R }, ]; - collection.insert_many(docs, None).await?; + collection.insert_many(docs).await?; // End Example 6 assert_coll_count!(collection, 5); @@ -295,7 +295,7 @@ async fn query_embedded_documents_examples(collection: &Collection) -> }, ]; - collection.insert_many(docs, None).await?; + collection.insert_many(docs).await?; // End Example 14 assert_coll_count!(collection, 5); @@ -396,7 +396,7 @@ async fn query_arrays_examples(collection: &Collection) -> Result<()> }, ]; - collection.insert_many(docs, None).await?; + collection.insert_many(docs).await?; // End Example 20 assert_coll_count!(collection, 5); @@ -533,7 +533,7 @@ async fn query_array_embedded_documents_examples(collection: &Collection }, ]; - collection.insert_many(docs, None).await?; + collection.insert_many(docs).await?; // End Example 38 assert_coll_count!(collection, 2); @@ -782,7 +782,7 @@ async fn projection_examples(collection: &Collection) -> Result<()> { }, ]; - collection.insert_many(docs, None).await?; + collection.insert_many(docs).await?; // End Example 42 assert_coll_count!(collection, 5); @@ -1045,7 +1045,7 @@ async fn update_examples(collection: &Collection) -> Result<()> { }, ]; - collection.insert_many(docs, None).await?; + collection.insert_many(docs).await?; // End Example 51 assert_coll_count!(collection, 10); @@ -1211,7 +1211,7 @@ async fn delete_examples(collection: &Collection) -> Result<()> { }, ]; - collection.insert_many(docs, None).await?; + collection.insert_many(docs).await?; // End Example 55 assert_coll_count!(collection, 5); @@ -1324,7 +1324,7 @@ async fn stable_api_examples() -> GenericResult<()> { doc! { "_id" : 6, "item" : "xyz", "price" : 5, "quantity" : 5, "date" : iso_date("2021-02-15T12:05:10Z")? }, doc! { "_id" : 7, "item" : "xyz", "price" : 5, "quantity" : 10, "date" : iso_date("2021-02-15T14:12:12Z")? }, doc! { "_id" : 8, "item" : "abc", "price" : 10, "quantity" : 5, "date" : iso_date("2021-03-16T20:20:13Z")? } - ], None).await?; + ]).await?; // End Versioned API Example 5 // Start Versioned API Example 6 @@ -1536,46 +1536,40 @@ async fn index_examples() -> Result<()> { let db = client.database("index_examples"); db.drop().await?; db.collection::("records") - .insert_many( - vec![ - doc! { - "student": "Marty McFly", - "classYear": 1986, - "school": "Hill Valley High", - "score": 56.5, - }, - doc! { - "student": "Ferris F. Bueller", - "classYear": 1987, - "school": "Glenbrook North High", - "status": "Suspended", - "score": 76.0, - }, - ], - None, - ) + .insert_many(vec![ + doc! { + "student": "Marty McFly", + "classYear": 1986, + "school": "Hill Valley High", + "score": 56.5, + }, + doc! { + "student": "Ferris F. Bueller", + "classYear": 1987, + "school": "Glenbrook North High", + "status": "Suspended", + "score": 76.0, + }, + ]) .await?; db.collection::("restaurants") - .insert_many( - vec![ - doc! { - "name": "Chez Panisse", - "city": "Oakland", - "state": "California", - "country": "United States", - "rating": 4.4, - }, - doc! { - "name": "Eleven Madison Park", - "cuisine": "French", - "city": "New York City", - "state": "New York", - "country": "United States", - "rating": 7.1, - }, - ], - None, - ) + .insert_many(vec![ + doc! { + "name": "Chez Panisse", + "city": "Oakland", + "state": "California", + "country": "United States", + "rating": 4.4, + }, + doc! { + "name": "Eleven Madison Park", + "cuisine": "French", + "city": "New York City", + "state": "New York", + "country": "United States", + "rating": 7.1, + }, + ]) .await?; use crate::IndexModel; diff --git a/src/test/documentation_examples/aggregation_data.rs b/src/test/documentation_examples/aggregation_data.rs index d32476c0d..b4fd5d449 100644 --- a/src/test/documentation_examples/aggregation_data.rs +++ b/src/test/documentation_examples/aggregation_data.rs @@ -13,242 +13,233 @@ pub(crate) async fn populate(db: &Database) -> GenericResult<()> { let date_20180111 = DateTime::parse_rfc3339_str("2018-01-11T07:15:00.000Z")?; db.collection("sales") - .insert_many( - vec![ - doc! { - "date": date_20180208, - "items": [ - doc! { - "fruit": "kiwi", - "quantity": 2, - "price": 0.5, - }, - doc! { - "fruit": "apple", - "quantity": 1, - "price": 1.0, - }, - ], - }, - doc! { - "date": date_20180109, - "items": [ - doc! { - "fruit": "banana", - "quantity": 8, - "price": 1.0, - }, - doc! { - "fruit": "apple", - "quantity": 1, - "price": 1.0, - }, - doc! { - "fruit": "papaya", - "quantity": 1, - "price": 4.0, - }, - ], - }, - doc! { - "date": date_20180127, - "items": [ - doc! { - "fruit": "banana", - "quantity": 1, - "price": 1.0, - }, - ], - }, - doc! { - "date": date_20180203, - "items": [ - doc! { - "fruit": "banana", - "quantity": 1, - "price": 1.0, - }, - ], - }, - doc! { - "date": date_20180205, - "items": [ - doc! { - "fruit": "banana", - "quantity": 1, - "price": 1.0, - }, - doc! { - "fruit": "mango", - "quantity": 2, - "price": 2.0, - }, - doc! { - "fruit": "apple", - "quantity": 1, - "price": 1.0, - }, - ], - }, - doc! { - "date": date_20180111, - "items": [ - doc! { - "fruit": "banana", - "quantity": 1, - "price": 1.0, - }, - doc! { - "fruit": "apple", - "quantity": 1, - "price": 1.0, - }, - doc! { - "fruit": "papaya", - "quantity": 3, - "price": 4.0, - }, - ], - }, - ], - None, - ) + .insert_many(vec![ + doc! { + "date": date_20180208, + "items": [ + doc! { + "fruit": "kiwi", + "quantity": 2, + "price": 0.5, + }, + doc! { + "fruit": "apple", + "quantity": 1, + "price": 1.0, + }, + ], + }, + doc! { + "date": date_20180109, + "items": [ + doc! { + "fruit": "banana", + "quantity": 8, + "price": 1.0, + }, + doc! { + "fruit": "apple", + "quantity": 1, + "price": 1.0, + }, + doc! { + "fruit": "papaya", + "quantity": 1, + "price": 4.0, + }, + ], + }, + doc! { + "date": date_20180127, + "items": [ + doc! { + "fruit": "banana", + "quantity": 1, + "price": 1.0, + }, + ], + }, + doc! { + "date": date_20180203, + "items": [ + doc! { + "fruit": "banana", + "quantity": 1, + "price": 1.0, + }, + ], + }, + doc! { + "date": date_20180205, + "items": [ + doc! { + "fruit": "banana", + "quantity": 1, + "price": 1.0, + }, + doc! { + "fruit": "mango", + "quantity": 2, + "price": 2.0, + }, + doc! { + "fruit": "apple", + "quantity": 1, + "price": 1.0, + }, + ], + }, + doc! { + "date": date_20180111, + "items": [ + doc! { + "fruit": "banana", + "quantity": 1, + "price": 1.0, + }, + doc! { + "fruit": "apple", + "quantity": 1, + "price": 1.0, + }, + doc! { + "fruit": "papaya", + "quantity": 3, + "price": 4.0, + }, + ], + }, + ]) .await?; db.collection("airlines") - .insert_many( - vec![ - doc! { - "airline": 17, - "name": "Air Canada", - "alias": "AC", - "iata": "ACA", - "icao": "AIR CANADA", - "active": "Y", - "country": "Canada", - "base": "TAL", - }, - doc! { - "airline": 18, - "name": "Turkish Airlines", - "alias": "YK", - "iata": "TRK", - "icao": "TURKISH", - "active": "Y", - "country": "Turkey", - "base": "AET", - }, - doc! { - "airline": 22, - "name": "Saudia", - "alias": "SV", - "iata": "SVA", - "icao": "SAUDIA", - "active": "Y", - "country": "Saudi Arabia", - "base": "JSU", - }, - doc! { - "airline": 29, - "name": "Finnair", - "alias": "AY", - "iata": "FIN", - "icao": "FINNAIR", - "active": "Y", - "country": "Finland", - "base": "JMZ", - }, - doc! { - "airline": 34, - "name": "Afric'air Express", - "alias": "", - "iata": "AAX", - "icao": "AFREX", - "active": "N", - "country": "Ivory Coast", - "base": "LOK", - }, - doc! { - "airline": 37, - "name": "Artem-Avia", - "alias": "", - "iata": "ABA", - "icao": "ARTEM-AVIA", - "active": "N", - "country": "Ukraine", - "base": "JBR", - }, - doc! { - "airline": 38, - "name": "Lufthansa", - "alias": "LH", - "iata": "DLH", - "icao": "LUFTHANSA", - "active": "Y", - "country": "Germany", - "base": "CYS", - }, - ], - None, - ) + .insert_many(vec![ + doc! { + "airline": 17, + "name": "Air Canada", + "alias": "AC", + "iata": "ACA", + "icao": "AIR CANADA", + "active": "Y", + "country": "Canada", + "base": "TAL", + }, + doc! { + "airline": 18, + "name": "Turkish Airlines", + "alias": "YK", + "iata": "TRK", + "icao": "TURKISH", + "active": "Y", + "country": "Turkey", + "base": "AET", + }, + doc! { + "airline": 22, + "name": "Saudia", + "alias": "SV", + "iata": "SVA", + "icao": "SAUDIA", + "active": "Y", + "country": "Saudi Arabia", + "base": "JSU", + }, + doc! { + "airline": 29, + "name": "Finnair", + "alias": "AY", + "iata": "FIN", + "icao": "FINNAIR", + "active": "Y", + "country": "Finland", + "base": "JMZ", + }, + doc! { + "airline": 34, + "name": "Afric'air Express", + "alias": "", + "iata": "AAX", + "icao": "AFREX", + "active": "N", + "country": "Ivory Coast", + "base": "LOK", + }, + doc! { + "airline": 37, + "name": "Artem-Avia", + "alias": "", + "iata": "ABA", + "icao": "ARTEM-AVIA", + "active": "N", + "country": "Ukraine", + "base": "JBR", + }, + doc! { + "airline": 38, + "name": "Lufthansa", + "alias": "LH", + "iata": "DLH", + "icao": "LUFTHANSA", + "active": "Y", + "country": "Germany", + "base": "CYS", + }, + ]) .await?; db.collection("air_alliances") - .insert_many( - vec![ - doc! { - "name": "Star Alliance", - "airlines": [ - "Air Canada", - "Avianca", - "Air China", - "Air New Zealand", - "Asiana Airlines", - "Brussels Airlines", - "Copa Airlines", - "Croatia Airlines", - "EgyptAir", - "TAP Portugal", - "United Airlines", - "Turkish Airlines", - "Swiss International Air Lines", - "Lufthansa", - ], - }, - doc! { - "name": "SkyTeam", - "airlines": [ - "Aerolinias Argentinas", - "Aeromexico", - "Air Europa", - "Air France", - "Alitalia", - "Delta Air Lines", - "Garuda Indonesia", - "Kenya Airways", - "KLM", - "Korean Air", - "Middle East Airlines", - "Saudia", - ], - }, - doc! { - "name": "OneWorld", - "airlines": [ - "Air Berlin", - "American Airlines", - "British Airways", - "Cathay Pacific", - "Finnair", - "Iberia Airlines", - "Japan Airlines", - "LATAM Chile", - "LATAM Brasil", - "Malasya Airlines", - "Canadian Airlines", - ], - }, - ], - None, - ) + .insert_many(vec![ + doc! { + "name": "Star Alliance", + "airlines": [ + "Air Canada", + "Avianca", + "Air China", + "Air New Zealand", + "Asiana Airlines", + "Brussels Airlines", + "Copa Airlines", + "Croatia Airlines", + "EgyptAir", + "TAP Portugal", + "United Airlines", + "Turkish Airlines", + "Swiss International Air Lines", + "Lufthansa", + ], + }, + doc! { + "name": "SkyTeam", + "airlines": [ + "Aerolinias Argentinas", + "Aeromexico", + "Air Europa", + "Air France", + "Alitalia", + "Delta Air Lines", + "Garuda Indonesia", + "Kenya Airways", + "KLM", + "Korean Air", + "Middle East Airlines", + "Saudia", + ], + }, + doc! { + "name": "OneWorld", + "airlines": [ + "Air Berlin", + "American Airlines", + "British Airways", + "Cathay Pacific", + "Finnair", + "Iberia Airlines", + "Japan Airlines", + "LATAM Chile", + "LATAM Brasil", + "Malasya Airlines", + "Canadian Airlines", + ], + }, + ]) .await?; Ok(()) diff --git a/src/test/spec/connection_stepdown.rs b/src/test/spec/connection_stepdown.rs index 16c47f5f0..5eb3802e4 100644 --- a/src/test/spec/connection_stepdown.rs +++ b/src/test/spec/connection_stepdown.rs @@ -5,7 +5,7 @@ use futures::stream::StreamExt; use crate::{ bson::{doc, Document}, error::{CommandError, ErrorKind}, - options::{Acknowledgment, ClientOptions, InsertManyOptions, WriteConcern}, + options::{Acknowledgment, ClientOptions, WriteConcern}, selection_criteria::SelectionCriteria, test::{get_client_options, log_uncaptured, util::EventClient}, Collection, @@ -57,16 +57,10 @@ async fn get_more() { } let docs = vec![doc! { "x": 1 }; 5]; - coll.insert_many( - docs, - Some( - InsertManyOptions::builder() - .write_concern(WriteConcern::builder().w(Acknowledgment::Majority).build()) - .build(), - ), - ) - .await - .unwrap(); + coll.insert_many(docs) + .write_concern(WriteConcern::majority()) + .await + .unwrap(); let mut cursor = coll.find(doc! {}).batch_size(2).await.unwrap(); diff --git a/src/test/spec/crud_v1/aggregate.rs b/src/test/spec/crud_v1/aggregate.rs index c6ed4acfb..b126b895e 100644 --- a/src/test/spec/crud_v1/aggregate.rs +++ b/src/test/spec/crud_v1/aggregate.rs @@ -34,7 +34,7 @@ async fn run_aggregate_test(test_file: TestFile) { &test_case.description.replace('$', "%").replace(' ', "_"), ) .await; - coll.insert_many(data.clone(), None) + coll.insert_many(data.clone()) .await .expect(&test_case.description); diff --git a/src/test/spec/crud_v1/count.rs b/src/test/spec/crud_v1/count.rs index 5fca5a8d3..9ea6c9790 100644 --- a/src/test/spec/crud_v1/count.rs +++ b/src/test/spec/crud_v1/count.rs @@ -35,7 +35,7 @@ async fn run_count_test(test_file: TestFile) { .await; if !data.is_empty() { - coll.insert_many(data.clone(), None) + coll.insert_many(data.clone()) .await .expect(&test_case.description); } diff --git a/src/test/spec/crud_v1/delete_many.rs b/src/test/spec/crud_v1/delete_many.rs index adf8a372d..aade4c8c0 100644 --- a/src/test/spec/crud_v1/delete_many.rs +++ b/src/test/spec/crud_v1/delete_many.rs @@ -36,7 +36,7 @@ async fn run_delete_many_test(test_file: TestFile) { let coll = client .init_db_and_coll(function_name!(), &test_case.description) .await; - coll.insert_many(data.clone(), None) + coll.insert_many(data.clone()) .await .expect(&test_case.description); diff --git a/src/test/spec/crud_v1/delete_one.rs b/src/test/spec/crud_v1/delete_one.rs index 48e1b6f7a..84fb209a8 100644 --- a/src/test/spec/crud_v1/delete_one.rs +++ b/src/test/spec/crud_v1/delete_one.rs @@ -36,7 +36,7 @@ async fn run_delete_one_test(test_file: TestFile) { let coll = client .init_db_and_coll(function_name!(), &test_case.description) .await; - coll.insert_many(data.clone(), None) + coll.insert_many(data.clone()) .await .expect(&test_case.description); diff --git a/src/test/spec/crud_v1/distinct.rs b/src/test/spec/crud_v1/distinct.rs index 28dc84fe4..112f527d6 100644 --- a/src/test/spec/crud_v1/distinct.rs +++ b/src/test/spec/crud_v1/distinct.rs @@ -30,7 +30,7 @@ async fn run_distinct_test(test_file: TestFile) { let coll = client .init_db_and_coll(function_name!(), &test_case.description) .await; - coll.insert_many(data.clone(), None) + coll.insert_many(data.clone()) .await .expect(&test_case.description); diff --git a/src/test/spec/crud_v1/find.rs b/src/test/spec/crud_v1/find.rs index eaeafd6f5..2db5a522b 100644 --- a/src/test/spec/crud_v1/find.rs +++ b/src/test/spec/crud_v1/find.rs @@ -33,7 +33,7 @@ async fn run_find_test(test_file: TestFile) { let coll = client .init_db_and_coll(function_name!(), &test_case.description) .await; - coll.insert_many(data.clone(), None) + coll.insert_many(data.clone()) .await .expect(&test_case.description); diff --git a/src/test/spec/crud_v1/find_one_and_delete.rs b/src/test/spec/crud_v1/find_one_and_delete.rs index 3f85aa9c1..23aca2c7e 100644 --- a/src/test/spec/crud_v1/find_one_and_delete.rs +++ b/src/test/spec/crud_v1/find_one_and_delete.rs @@ -31,7 +31,7 @@ async fn run_find_one_and_delete_test(test_file: TestFile) { let coll = client .init_db_and_coll(function_name!(), &test_case.description) .await; - coll.insert_many(data.clone(), None) + coll.insert_many(data.clone()) .await .expect(&test_case.description); diff --git a/src/test/spec/crud_v1/find_one_and_replace.rs b/src/test/spec/crud_v1/find_one_and_replace.rs index 12e6770f3..fd0728bfb 100644 --- a/src/test/spec/crud_v1/find_one_and_replace.rs +++ b/src/test/spec/crud_v1/find_one_and_replace.rs @@ -37,7 +37,7 @@ async fn run_find_one_and_replace_test(test_file: TestFile) { let coll = client .init_db_and_coll(function_name!(), &test_case.description[..sub]) .await; - coll.insert_many(data.clone(), None) + coll.insert_many(data.clone()) .await .expect(&test_case.description); diff --git a/src/test/spec/crud_v1/find_one_and_update.rs b/src/test/spec/crud_v1/find_one_and_update.rs index d87f3acb6..db91c4c87 100644 --- a/src/test/spec/crud_v1/find_one_and_update.rs +++ b/src/test/spec/crud_v1/find_one_and_update.rs @@ -38,7 +38,7 @@ async fn run_find_one_and_update_test(test_file: TestFile) { let coll = client .init_db_and_coll(function_name!(), &test_case.description[..sub]) .await; - coll.insert_many(data.clone(), None) + coll.insert_many(data.clone()) .await .expect(&test_case.description); diff --git a/src/test/spec/crud_v1/insert_many.rs b/src/test/spec/crud_v1/insert_many.rs index 7a59c4725..5a3b46ae7 100644 --- a/src/test/spec/crud_v1/insert_many.rs +++ b/src/test/spec/crud_v1/insert_many.rs @@ -3,7 +3,6 @@ use serde::Deserialize; use super::{run_crud_v1_test, Outcome, TestFile}; use crate::{ bson::{Bson, Document}, - options::InsertManyOptions, test::util::TestClient, }; @@ -37,7 +36,7 @@ async fn run_insert_many_test(test_file: TestFile) { let coll = client .init_db_and_coll(function_name!(), &test_case.description) .await; - coll.insert_many(data.clone(), None) + coll.insert_many(data.clone()) .await .expect(&test_case.description); @@ -46,11 +45,11 @@ async fn run_insert_many_test(test_file: TestFile) { let outcome: Outcome = bson::from_bson(Bson::Document(test_case.outcome)).expect(&test_case.description); - let options = InsertManyOptions::builder() + let result = match coll + .insert_many(arguments.documents) .ordered(arguments.options.ordered) - .build(); - - let result = match coll.insert_many(arguments.documents, options).await { + .await + { Ok(result) => { assert_ne!(outcome.error, Some(true), "{}", test_case.description); result.inserted_ids diff --git a/src/test/spec/crud_v1/insert_one.rs b/src/test/spec/crud_v1/insert_one.rs index 8ed8ec5f8..b341bc28e 100644 --- a/src/test/spec/crud_v1/insert_one.rs +++ b/src/test/spec/crud_v1/insert_one.rs @@ -32,7 +32,7 @@ async fn run_insert_one_test(test_file: TestFile) { let coll = client .init_db_and_coll(function_name!(), &test_case.description) .await; - coll.insert_many(data.clone(), None) + coll.insert_many(data.clone()) .await .expect(&test_case.description); diff --git a/src/test/spec/crud_v1/replace_one.rs b/src/test/spec/crud_v1/replace_one.rs index f5cfb1609..e1dda8e6f 100644 --- a/src/test/spec/crud_v1/replace_one.rs +++ b/src/test/spec/crud_v1/replace_one.rs @@ -41,7 +41,7 @@ async fn run_replace_one_test(test_file: TestFile) { &test_case.description.replace('$', "%").replace(' ', "_"), ) .await; - coll.insert_many(data.clone(), None) + coll.insert_many(data.clone()) .await .expect(&test_case.description); diff --git a/src/test/spec/crud_v1/update_many.rs b/src/test/spec/crud_v1/update_many.rs index 1f4e33c0e..7b53a92c8 100644 --- a/src/test/spec/crud_v1/update_many.rs +++ b/src/test/spec/crud_v1/update_many.rs @@ -41,7 +41,7 @@ async fn run_update_many_test(test_file: TestFile) { let coll = client .init_db_and_coll(function_name!(), &test_case.description) .await; - coll.insert_many(data.clone(), None) + coll.insert_many(data.clone()) .await .expect(&test_case.description); diff --git a/src/test/spec/crud_v1/update_one.rs b/src/test/spec/crud_v1/update_one.rs index cfe08e259..3b7288231 100644 --- a/src/test/spec/crud_v1/update_one.rs +++ b/src/test/spec/crud_v1/update_one.rs @@ -41,7 +41,7 @@ async fn run_update_one_test(test_file: TestFile) { let coll = client .init_db_and_coll(function_name!(), &test_case.description) .await; - coll.insert_many(data.clone(), None) + coll.insert_many(data.clone()) .await .expect(&test_case.description); diff --git a/src/test/spec/retryable_writes.rs b/src/test/spec/retryable_writes.rs index 9fb837aab..7e7bfbe07 100644 --- a/src/test/spec/retryable_writes.rs +++ b/src/test/spec/retryable_writes.rs @@ -16,7 +16,7 @@ use crate::{ cmap::{CmapEvent, ConnectionCheckoutFailedReason}, command::CommandEvent, }, - options::{ClientOptions, InsertManyOptions}, + options::ClientOptions, runtime, runtime::{spawn, AcknowledgedMessage, AsyncJoinHandle}, sdam::MIN_HEARTBEAT_FREQUENCY, @@ -78,7 +78,7 @@ async fn run_legacy() { let coll = client.init_db_and_coll(&db_name, coll_name).await; if !test_file.data.is_empty() { - coll.insert_many(test_file.data.clone(), None) + coll.insert_many(test_file.data.clone()) .await .expect(&test_case.description); } @@ -271,14 +271,14 @@ async fn transaction_ids_included() { .unwrap(); assert!(includes_txn_number("findAndModify")); - let options = InsertManyOptions::builder().ordered(true).build(); - coll.insert_many(vec![doc! { "x": 1 }], options) + coll.insert_many(vec![doc! { "x": 1 }]) + .ordered(true) .await .unwrap(); assert!(includes_txn_number("insert")); - let options = InsertManyOptions::builder().ordered(false).build(); - coll.insert_many(vec![doc! { "x": 1 }], options) + coll.insert_many(vec![doc! { "x": 1 }]) + .ordered(false) .await .unwrap(); assert!(includes_txn_number("insert")); diff --git a/src/test/spec/trace.rs b/src/test/spec/trace.rs index 745756311..015168bd0 100644 --- a/src/test/spec/trace.rs +++ b/src/test/spec/trace.rs @@ -87,7 +87,7 @@ async fn command_logging_truncation_default_limit() { let mut tracing_subscriber = DEFAULT_GLOBAL_TRACING_HANDLER.subscribe(); let docs = iter::repeat(doc! { "x": "y" }).take(100); - coll.insert_many(docs, None) + coll.insert_many(docs) .await .expect("insert many should succeed"); @@ -178,7 +178,7 @@ async fn command_logging_truncation_mid_codepoint() { let mut tracing_subscriber = DEFAULT_GLOBAL_TRACING_HANDLER.subscribe(); let docs = iter::repeat(doc! { "🤔": "🤔🤔🤔🤔🤔🤔" }).take(10); - coll.insert_many(docs, None) + coll.insert_many(docs) .await .expect("insert many should succeed"); diff --git a/src/test/spec/unified_runner/operation.rs b/src/test/spec/unified_runner/operation.rs index 76aa4b22c..4b992078c 100644 --- a/src/test/spec/unified_runner/operation.rs +++ b/src/test/spec/unified_runner/operation.rs @@ -727,27 +727,17 @@ impl TestOperation for InsertMany { ) -> BoxFuture<'a, Result>> { async move { let collection = test_runner.get_collection(id).await; + let action = collection + .insert_many(&self.documents) + .with_options(self.options.clone()); let result = match &self.session { Some(session_id) => { with_mut_session!(test_runner, session_id, |session| { - async move { - collection - .insert_many_with_session( - self.documents.clone(), - self.options.clone(), - session, - ) - .await - } - .boxed() + async move { action.session(session.deref_mut()).await }.boxed() }) .await? } - None => { - collection - .insert_many(self.documents.clone(), self.options.clone()) - .await? - } + None => action.await?, }; let ids: HashMap = result .inserted_ids diff --git a/src/test/spec/unified_runner/test_runner.rs b/src/test/spec/unified_runner/test_runner.rs index 45dc44671..5f1a3f927 100644 --- a/src/test/spec/unified_runner/test_runner.rs +++ b/src/test/spec/unified_runner/test_runner.rs @@ -386,9 +386,7 @@ impl TestRunner { collection_options, ) .await; - coll.insert_many(data.documents.clone(), None) - .await - .unwrap(); + coll.insert_many(data.documents.clone()).await.unwrap(); } else { let collection_options = CreateCollectionOptions::builder() .write_concern(write_concern) diff --git a/src/test/spec/v2_runner.rs b/src/test/spec/v2_runner.rs index 88066d181..059719923 100644 --- a/src/test/spec/v2_runner.rs +++ b/src/test/spec/v2_runner.rs @@ -13,7 +13,7 @@ use crate::{ bson::{doc, from_bson}, coll::options::DropCollectionOptions, concern::WriteConcern, - options::{ClientOptions, CreateCollectionOptions, InsertManyOptions}, + options::{ClientOptions, CreateCollectionOptions}, sdam::{ServerInfo, MIN_HEARTBEAT_FREQUENCY}, selection_criteria::SelectionCriteria, test::{ @@ -180,10 +180,10 @@ impl TestContext { match data { TestData::Single(data) => { if !data.is_empty() { - let options = InsertManyOptions::builder() + coll.insert_many(data.clone()) .write_concern(WriteConcern::majority()) - .build(); - coll.insert_many(data.clone(), options).await.unwrap(); + .await + .unwrap(); } } TestData::Many(_) => panic!("{}: invalid data format", &test.description), diff --git a/src/test/spec/v2_runner/csfle.rs b/src/test/spec/v2_runner/csfle.rs index 310eb57ad..f80e23172 100644 --- a/src/test/spec/v2_runner/csfle.rs +++ b/src/test/spec/v2_runner/csfle.rs @@ -22,7 +22,7 @@ pub(crate) async fn populate_key_vault(client: &Client, kv_data: Option<&Vec { - collection - .insert_many_with_session(documents, options, session) - .await? - } - None => collection.insert_many(documents, options).await?, - }; + let result = collection + .insert_many(documents) + .with_options(options) + .optional(session, |a, s| a.session(s)) + .await?; let ids: HashMap = result .inserted_ids .into_iter() diff --git a/tests/readme_examples.rs b/tests/readme_examples.rs index 2310e40ca..1b291c54d 100644 --- a/tests/readme_examples.rs +++ b/tests/readme_examples.rs @@ -56,7 +56,7 @@ async fn _inserting_documents_into_a_collection(db: mongodb::Database) -> Result ]; // Insert some documents into the "mydb.books" collection. - collection.insert_many(docs, None).await?; + collection.insert_many(docs).await?; Ok(()) } @@ -84,7 +84,7 @@ async fn _inserting_documents_into_a_typed_collection(db: mongodb::Database) -> ]; // Insert the books into "mydb.books" collection, no manual conversion to BSON necessary. - typed_collection.insert_many(books, None).await?; + typed_collection.insert_many(books).await?; Ok(()) } @@ -132,7 +132,7 @@ async fn _using_the_sync_api() -> Result<()> { ]; // Insert some books into the "mydb.books" collection. - collection.insert_many(docs, None)?; + collection.insert_many(docs).run()?; let cursor = collection.find(doc! { "author": "George Orwell" }).run()?; for result in cursor { From e87ccbf73ec5ffb325ab001cd0cdd0caafc06757 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Thu, 7 Mar 2024 13:37:43 -0500 Subject: [PATCH 22/39] convert insert_one --- src/action.rs | 2 + src/action/insert_one.rs | 112 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 114 insertions(+) create mode 100644 src/action/insert_one.rs diff --git a/src/action.rs b/src/action.rs index 3408fe210..95849c594 100644 --- a/src/action.rs +++ b/src/action.rs @@ -13,6 +13,7 @@ mod drop_index; mod find; mod find_and_modify; mod insert_many; +mod insert_one; mod list_collections; mod list_databases; mod list_indexes; @@ -37,6 +38,7 @@ pub use drop_index::DropIndex; pub use find::Find; pub use find_and_modify::FindAndModify; pub use insert_many::InsertMany; +pub use insert_one::InsertOne; pub use list_collections::ListCollections; pub use list_databases::ListDatabases; pub use list_indexes::ListIndexes; diff --git a/src/action/insert_one.rs b/src/action/insert_one.rs new file mode 100644 index 000000000..fd2e09e88 --- /dev/null +++ b/src/action/insert_one.rs @@ -0,0 +1,112 @@ +use std::{borrow::Borrow, ops::Deref}; + +use bson::{Bson, RawDocumentBuf}; +use serde::Serialize; + +use crate::{ + coll::options::{InsertManyOptions, InsertOneOptions}, + error::{convert_bulk_errors, Result}, + operation::Insert as Op, + options::WriteConcern, + results::InsertOneResult, + serde_util, + ClientSession, + Collection, +}; + +use super::{action_impl, option_setters, CollRef}; + +impl Collection { + /// Inserts `doc` into the collection. + /// + /// Note that either an owned or borrowed value can be inserted here, so the input document + /// does not need to be cloned to be passed in. + /// + /// This operation will retry once upon failure if the connection and encountered error support + /// retryability. See the documentation + /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on + /// retryable writes. + /// + /// `await` will return `Result`. + pub fn insert_one_2(&self, doc: impl Borrow) -> InsertOne { + InsertOne { + coll: CollRef::new(self), + doc: serde_util::to_raw_document_buf_with_options( + doc.borrow(), + self.human_readable_serialization(), + ), + options: None, + session: None, + } + } +} + +#[cfg(feature = "sync")] +impl crate::sync::Collection { + /// Inserts `doc` into the collection. + /// + /// Note that either an owned or borrowed value can be inserted here, so the input document + /// does not need to be cloned to be passed in. + /// + /// This operation will retry once upon failure if the connection and encountered error support + /// retryability. See the documentation + /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on + /// retryable writes. + /// + /// [`run`](InsertOne::run) will return `Result`. + pub fn insert_one_2(&self, doc: impl Borrow) -> InsertOne { + self.async_collection.insert_one_2(doc) + } +} + +/// Inserts a document into a collection. Construct with ['Collection::insert_one`]. +#[must_use] +pub struct InsertOne<'a> { + coll: CollRef<'a>, + doc: Result, + options: Option, + session: Option<&'a mut ClientSession>, +} + +impl<'a> InsertOne<'a> { + option_setters! { options: InsertOneOptions; + bypass_document_validation: bool, + write_concern: WriteConcern, + comment: Bson, + } + + /// Runs the operation using the provided session. + pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { + self.session = Some(value.into()); + self + } +} + +action_impl! { + impl<'a> Action for InsertOne<'a> { + type Future = InsertOneFuture; + + async fn execute(mut self) -> Result { + resolve_write_concern_with_session!(self.coll, self.options, self.session.as_ref())?; + + #[cfg(feature = "in-use-encryption-unstable")] + let encrypted = self.coll.client().auto_encryption_opts().await.is_some(); + #[cfg(not(feature = "in-use-encryption-unstable"))] + let encrypted = false; + + let doc = self.doc?; + + let insert = Op::new( + self.coll.namespace(), + vec![doc.deref()], + self.options.map(InsertManyOptions::from_insert_one_options), + encrypted, + ); + self.coll.client() + .execute_operation(insert, self.session) + .await + .map(InsertOneResult::from_insert_many_result) + .map_err(convert_bulk_errors) + } + } +} From c7dd86e424be2b46993e5fd8f3a98ad132912427 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Thu, 7 Mar 2024 13:59:42 -0500 Subject: [PATCH 23/39] convert insert_one callers --- src/action/insert_one.rs | 6 +- .../client_encryption/create_data_key.rs | 2 +- src/client/session/test.rs | 6 +- src/client/session/test/causal_consistency.rs | 2 +- src/coll.rs | 65 +--------- src/concern/test.rs | 71 +++++------ src/gridfs/upload.rs | 8 +- .../server_selection/test/in_window.rs | 2 +- src/sdam/description/topology/test/sdam.rs | 8 +- src/sync/coll.rs | 40 +----- src/sync/test.rs | 20 +-- src/test/change_stream.rs | 36 +++--- src/test/client.rs | 19 +-- src/test/coll.rs | 44 +++---- src/test/csfle.rs | 119 +++++++----------- src/test/cursor.rs | 2 +- src/test/db.rs | 6 +- src/test/documentation_examples.rs | 54 ++++---- src/test/spec/connection_stepdown.rs | 16 +-- src/test/spec/crud_v1/insert_one.rs | 2 +- src/test/spec/retryable_reads.rs | 4 +- src/test/spec/retryable_writes.rs | 12 +- src/test/spec/sessions.rs | 15 ++- src/test/spec/transactions.rs | 2 +- src/test/spec/unified_runner/operation.rs | 17 +-- src/test/spec/v2_runner/operation.rs | 13 +- src/test/spec/write_error.rs | 2 +- src/test/util/event.rs | 2 +- tests/transactions_example.rs | 7 +- 29 files changed, 228 insertions(+), 374 deletions(-) diff --git a/src/action/insert_one.rs b/src/action/insert_one.rs index fd2e09e88..a89525a44 100644 --- a/src/action/insert_one.rs +++ b/src/action/insert_one.rs @@ -28,7 +28,7 @@ impl Collection { /// retryable writes. /// /// `await` will return `Result`. - pub fn insert_one_2(&self, doc: impl Borrow) -> InsertOne { + pub fn insert_one(&self, doc: impl Borrow) -> InsertOne { InsertOne { coll: CollRef::new(self), doc: serde_util::to_raw_document_buf_with_options( @@ -54,8 +54,8 @@ impl crate::sync::Collection { /// retryable writes. /// /// [`run`](InsertOne::run) will return `Result`. - pub fn insert_one_2(&self, doc: impl Borrow) -> InsertOne { - self.async_collection.insert_one_2(doc) + pub fn insert_one(&self, doc: impl Borrow) -> InsertOne { + self.async_collection.insert_one(doc) } } diff --git a/src/client/csfle/client_encryption/create_data_key.rs b/src/client/csfle/client_encryption/create_data_key.rs index b4307bee2..358e1826e 100644 --- a/src/client/csfle/client_encryption/create_data_key.rs +++ b/src/client/csfle/client_encryption/create_data_key.rs @@ -24,7 +24,7 @@ action_impl! { } let ctx = self.client_enc.create_data_key_ctx(provider, self.master_key, self.options)?; let data_key = self.client_enc.exec.run_ctx(ctx, None).await?; - self.client_enc.key_vault.insert_one(&data_key, None).await?; + self.client_enc.key_vault.insert_one(&data_key).await?; let bin_ref = data_key .get_binary("_id") .map_err(|e| Error::internal(format!("invalid data key id: {}", e)))?; diff --git a/src/client/session/test.rs b/src/client/session/test.rs index 3687a1a98..da40e6b05 100644 --- a/src/client/session/test.rs +++ b/src/client/session/test.rs @@ -58,7 +58,7 @@ macro_rules! for_each_op { // collection operations $test_func( "insert", - collection_op!($test_name, coll, coll.insert_one(doc! { "x": 1 }, None)), + collection_op!($test_name, coll, coll.insert_one(doc! { "x": 1 })), ) .await; $test_func( @@ -363,7 +363,7 @@ async fn cluster_time_in_commands() { client .database(function_name!()) .collection::(function_name!()) - .insert_one(doc! {}, None) + .insert_one(doc! {}) .await }) .await; @@ -449,7 +449,7 @@ async fn implicit_session_returned_after_exhaust_by_get_more() { .init_db_and_coll(function_name!(), function_name!()) .await; for _ in 0..5 { - coll.insert_one(doc! {}, None) + coll.insert_one(doc! {}) .await .expect("insert should succeed"); } diff --git a/src/client/session/test/causal_consistency.rs b/src/client/session/test/causal_consistency.rs index 78411f3e6..0238cc228 100644 --- a/src/client/session/test/causal_consistency.rs +++ b/src/client/session/test/causal_consistency.rs @@ -45,7 +45,7 @@ fn all_session_ops() -> impl Iterator { let mut ops = vec![]; ops.push(op!("insert", false, |coll, session| { - coll.insert_one_with_session(doc! { "x": 1 }, None, session) + coll.insert_one(doc! { "x": 1 }).session(session) })); ops.push(op!("insert", false, |coll, session| { diff --git a/src/coll.rs b/src/coll.rs index 2a959644d..e1b0f7de7 100644 --- a/src/coll.rs +++ b/src/coll.rs @@ -11,9 +11,9 @@ use crate::{ client::options::ServerAddress, cmap::conn::PinnedConnectionHandle, concern::{ReadConcern, WriteConcern}, - error::{convert_bulk_errors, Error, Result}, - operation::{Insert, Update}, - results::{InsertOneResult, UpdateResult}, + error::{Error, Result}, + operation::Update, + results::UpdateResult, selection_criteria::SelectionCriteria, Client, ClientSession, @@ -226,65 +226,6 @@ impl Collection where T: Serialize + Send + Sync, { - async fn insert_one_common( - &self, - doc: &T, - options: impl Into>, - session: impl Into>, - ) -> Result { - let session = session.into(); - - let mut options = options.into(); - resolve_write_concern_with_session!(self, options, session.as_ref())?; - - #[cfg(feature = "in-use-encryption-unstable")] - let encrypted = self.client().auto_encryption_opts().await.is_some(); - #[cfg(not(feature = "in-use-encryption-unstable"))] - let encrypted = false; - - let insert: Insert = unreachable!(); - self.client() - .execute_operation(insert, session) - .await - .map(InsertOneResult::from_insert_many_result) - .map_err(convert_bulk_errors) - } - - /// Inserts `doc` into the collection. - /// - /// Note that either an owned or borrowed value can be inserted here, so the input document - /// does not need to be cloned to be passed in. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub async fn insert_one( - &self, - doc: impl Borrow, - options: impl Into>, - ) -> Result { - self.insert_one_common(doc.borrow(), options, None).await - } - - /// Inserts `doc` into the collection using the provided `ClientSession`. - /// - /// Note that either an owned or borrowed value can be inserted here, so the input document - /// does not need to be cloned to be passed in. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub async fn insert_one_with_session( - &self, - doc: impl Borrow, - options: impl Into>, - session: &mut ClientSession, - ) -> Result { - self.insert_one_common(doc.borrow(), options, session).await - } - async fn replace_one_common( &self, query: Document, diff --git a/src/concern/test.rs b/src/concern/test.rs index ff8f44d5c..1bf6cdb5d 100644 --- a/src/concern/test.rs +++ b/src/concern/test.rs @@ -3,14 +3,7 @@ use std::time::Duration; use crate::{ bson::{doc, Bson, Document}, error::ErrorKind, - options::{ - Acknowledgment, - InsertOneOptions, - ReadConcern, - ReplaceOptions, - TransactionOptions, - WriteConcern, - }, + options::{Acknowledgment, ReadConcern, ReplaceOptions, TransactionOptions, WriteConcern}, test::{EventClient, TestClient}, Collection, }; @@ -107,9 +100,9 @@ async fn inconsistent_write_concern_rejected() { journal: true.into(), w_timeout: None, }; - let options = InsertOneOptions::builder().write_concern(wc).build(); let error = coll - .insert_one(doc! {}, options) + .insert_one(doc! {}) + .write_concern(wc) .await .expect_err("insert should fail"); assert!(matches!(*error.kind, ErrorKind::InvalidArgument { .. })); @@ -126,9 +119,9 @@ async fn unacknowledged_write_concern_rejected() { journal: false.into(), w_timeout: None, }; - let options = InsertOneOptions::builder().write_concern(wc).build(); let error = coll - .insert_one(doc! {}, options) + .insert_one(doc! {}) + .write_concern(wc) .await .expect_err("insert should fail"); assert!(matches!(*error.kind, ErrorKind::InvalidArgument { .. })); @@ -194,32 +187,24 @@ async fn command_contains_write_concern_insert_one() { let coll: Collection = client.database("test").collection(function_name!()); coll.drop().await.unwrap(); - coll.insert_one( - doc! { "foo": "bar" }, - InsertOneOptions::builder() - .write_concern( - WriteConcern::builder() - .w(Acknowledgment::Nodes(1)) - .journal(true) - .build(), - ) - .build(), - ) - .await - .unwrap(); - coll.insert_one( - doc! { "foo": "bar" }, - InsertOneOptions::builder() - .write_concern( - WriteConcern::builder() - .w(Acknowledgment::Nodes(1)) - .journal(false) - .build(), - ) - .build(), - ) - .await - .unwrap(); + coll.insert_one(doc! { "foo": "bar" }) + .write_concern( + WriteConcern::builder() + .w(Acknowledgment::Nodes(1)) + .journal(true) + .build(), + ) + .await + .unwrap(); + coll.insert_one(doc! { "foo": "bar" }) + .write_concern( + WriteConcern::builder() + .w(Acknowledgment::Nodes(1)) + .journal(false) + .build(), + ) + .await + .unwrap(); assert_eq!( command_write_concerns(&client, "insert"), @@ -284,7 +269,7 @@ async fn command_contains_write_concern_update_one() { let coll: Collection = client.database("test").collection(function_name!()); coll.drop().await.unwrap(); - coll.insert_one(doc! { "foo": "bar" }, None).await.unwrap(); + coll.insert_one(doc! { "foo": "bar" }).await.unwrap(); coll.update_one(doc! { "foo": "bar" }, doc! { "$set": { "foo": "baz" } }) .write_concern( WriteConcern::builder() @@ -370,7 +355,7 @@ async fn command_contains_write_concern_replace_one() { let coll: Collection = client.database("test").collection(function_name!()); coll.drop().await.unwrap(); - coll.insert_one(doc! { "foo": "bar" }, None).await.unwrap(); + coll.insert_one(doc! { "foo": "bar" }).await.unwrap(); coll.replace_one( doc! { "foo": "bar" }, doc! { "baz": "fun" }, @@ -645,7 +630,7 @@ async fn command_contains_write_concern_aggregate() { let coll: Collection = client.database("test").collection(function_name!()); coll.drop().await.unwrap(); - coll.insert_one(doc! { "foo": "bar" }, None).await.unwrap(); + coll.insert_one(doc! { "foo": "bar" }).await.unwrap(); coll.aggregate(vec![ doc! { "$match": { "foo": "bar" } }, doc! { "$addFields": { "foo": "baz" } }, @@ -696,7 +681,7 @@ async fn command_contains_write_concern_drop() { coll.drop().await.unwrap(); client.clear_cached_events(); - coll.insert_one(doc! { "foo": "bar" }, None).await.unwrap(); + coll.insert_one(doc! { "foo": "bar" }).await.unwrap(); coll.drop() .write_concern( WriteConcern::builder() @@ -706,7 +691,7 @@ async fn command_contains_write_concern_drop() { ) .await .unwrap(); - coll.insert_one(doc! { "foo": "bar" }, None).await.unwrap(); + coll.insert_one(doc! { "foo": "bar" }).await.unwrap(); coll.drop() .write_concern( WriteConcern::builder() diff --git a/src/gridfs/upload.rs b/src/gridfs/upload.rs index 2d66816e0..2b978f671 100644 --- a/src/gridfs/upload.rs +++ b/src/gridfs/upload.rs @@ -132,7 +132,7 @@ impl GridFsBucket { bytes: &buf[..bytes_read], }, }; - self.chunks().insert_one(chunk, None).await?; + self.chunks().insert_one(chunk).await?; length += bytes_read as u64; n += 1; @@ -146,7 +146,7 @@ impl GridFsBucket { filename: Some(filename.as_ref().to_string()), metadata: options.and_then(|opts| opts.metadata), }; - self.files().insert_one(file, None).await?; + self.files().insert_one(file).await?; Ok(()) } @@ -557,9 +557,9 @@ async fn close(bucket: GridFsBucket, buffer: Vec, file: FilesCollectionDocum bytes: &buffer[..], }, }; - bucket.chunks().insert_one(final_chunk, None).await?; + bucket.chunks().insert_one(final_chunk).await?; } - bucket.files().insert_one(&file, None).await?; + bucket.files().insert_one(&file).await?; Ok(()) } .await; diff --git a/src/sdam/description/topology/server_selection/test/in_window.rs b/src/sdam/description/topology/server_selection/test/in_window.rs index c278b45be..1e81cefa5 100644 --- a/src/sdam/description/topology/server_selection/test/in_window.rs +++ b/src/sdam/description/topology/server_selection/test/in_window.rs @@ -155,7 +155,7 @@ async fn load_balancing_test() { setup_client .database("load_balancing_test") .collection("load_balancing_test") - .insert_one(doc! {}, None) + .insert_one(doc! {}) .await .unwrap(); diff --git a/src/sdam/description/topology/test/sdam.rs b/src/sdam/description/topology/test/sdam.rs index cd45346b7..58750785f 100644 --- a/src/sdam/description/topology/test/sdam.rs +++ b/src/sdam/description/topology/test/sdam.rs @@ -603,7 +603,7 @@ async fn topology_closed_event_last() { client .database(function_name!()) .collection(function_name!()) - .insert_one(doc! { "x": 1 }, None) + .insert_one(doc! { "x": 1 }) .await .unwrap(); drop(client); @@ -722,7 +722,7 @@ async fn direct_connection() { direct_false_client .database(function_name!()) .collection(function_name!()) - .insert_one(doc! {}, None) + .insert_one(doc! {}) .await .expect("write should succeed with directConnection=false on secondary"); @@ -733,7 +733,7 @@ async fn direct_connection() { let error = direct_true_client .database(function_name!()) .collection(function_name!()) - .insert_one(doc! {}, None) + .insert_one(doc! {}) .await .expect_err("write should fail with directConnection=true on secondary"); assert!(error.is_notwritableprimary()); @@ -743,7 +743,7 @@ async fn direct_connection() { client .database(function_name!()) .collection(function_name!()) - .insert_one(doc! {}, None) + .insert_one(doc! {}) .await .expect("write should succeed with directConnection unspecified"); } diff --git a/src/sync/coll.rs b/src/sync/coll.rs index 58ace681a..7e7a136fb 100644 --- a/src/sync/coll.rs +++ b/src/sync/coll.rs @@ -6,8 +6,8 @@ use super::ClientSession; use crate::{ bson::Document, error::Result, - options::{InsertOneOptions, ReadConcern, ReplaceOptions, SelectionCriteria, WriteConcern}, - results::{InsertOneResult, UpdateResult}, + options::{ReadConcern, ReplaceOptions, SelectionCriteria, WriteConcern}, + results::UpdateResult, Collection as AsyncCollection, Namespace, }; @@ -104,42 +104,6 @@ impl Collection where T: Serialize + Send + Sync, { - /// Inserts `doc` into the collection. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub fn insert_one( - &self, - doc: impl Borrow, - options: impl Into>, - ) -> Result { - crate::sync::TOKIO_RUNTIME.block_on( - self.async_collection - .insert_one(doc.borrow(), options.into()), - ) - } - - /// Inserts `doc` into the collection using the provided `ClientSession`. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub fn insert_one_with_session( - &self, - doc: impl Borrow, - options: impl Into>, - session: &mut ClientSession, - ) -> Result { - crate::sync::TOKIO_RUNTIME.block_on(self.async_collection.insert_one_with_session( - doc.borrow(), - options.into(), - &mut session.async_client_session, - )) - } - /// Replaces up to one document matching `query` in the collection with `replacement`. /// /// This operation will retry once upon failure if the connection and encountered error support diff --git a/src/sync/test.rs b/src/sync/test.rs index 786362f48..8df3df920 100644 --- a/src/sync/test.rs +++ b/src/sync/test.rs @@ -69,7 +69,8 @@ fn client() { client .database(function_name!()) .collection(function_name!()) - .insert_one(Document::new(), None) + .insert_one(Document::new()) + .run() .expect("insert should succeed"); let db_names = client @@ -116,7 +117,8 @@ fn database() { let coll = init_db_and_coll(&client, function_name!(), function_name!()); - coll.insert_one(doc! { "x": 1 }, None) + coll.insert_one(doc! { "x": 1 }) + .run() .expect("insert should succeed"); let coll_names = db @@ -159,7 +161,8 @@ fn collection() { let client = Client::with_options(options).expect("client creation should succeed"); let coll = init_db_and_coll(&client, function_name!(), function_name!()); - coll.insert_one(doc! { "x": 1 }, None) + coll.insert_one(doc! { "x": 1 }) + .run() .expect("insert should succeed"); let cursor = coll @@ -222,7 +225,7 @@ fn typed_collection() { str: "hello".into(), }; - assert!(coll.insert_one(my_type, None).is_ok()); + assert!(coll.insert_one(my_type).run().is_ok()); } #[test] @@ -276,7 +279,7 @@ fn transactions() { .expect("start transaction should succeed"); run_transaction_with_retry(&mut session, |s| { - coll.insert_one_with_session(doc! { "x": 1 }, None, s)?; + coll.insert_one(doc! { "x": 1 }).session(s).run()?; Ok(()) }) .unwrap(); @@ -300,7 +303,7 @@ fn transactions() { .start_transaction(None) .expect("start transaction should succeed"); run_transaction_with_retry(&mut session, |s| { - coll.insert_one_with_session(doc! { "x": 1 }, None, s)?; + coll.insert_one(doc! { "x": 1 }).session(s).run()?; Ok(()) }) .unwrap(); @@ -334,7 +337,7 @@ fn collection_generic_bounds() { let coll: Collection = client .database(function_name!()) .collection(function_name!()); - let _result = coll.insert_one(Bar {}, None); + let _result = coll.insert_one(Bar {}); } #[test] @@ -424,7 +427,8 @@ fn mixed_sync_and_async() -> Result<()> { sync_db.drop().run()?; sync_db .collection::(COLL_NAME) - .insert_one(doc! { "a": 1 }, None)?; + .insert_one(doc! { "a": 1 }) + .run()?; let mut found = crate::sync::TOKIO_RUNTIME .block_on(async { async_client diff --git a/src/test/change_stream.rs b/src/test/change_stream.rs index c41e20d6c..05f409114 100644 --- a/src/test/change_stream.rs +++ b/src/test/change_stream.rs @@ -73,7 +73,7 @@ async fn tracks_resume_token() -> Result<()> { tokens.push(token.parsed()?); } for _ in 0..3 { - coll.insert_one(doc! {}, None).await?; + coll.insert_one(doc! {}).await?; stream.next().await.transpose()?; tokens.push(stream.resume_token().unwrap().parsed()?); } @@ -140,7 +140,7 @@ async fn errors_on_missing_token() -> Result<()> { .watch() .pipeline(vec![doc! { "$project": { "_id": 0 } }]) .await?; - coll.insert_one(doc! {}, None).await?; + coll.insert_one(doc! {}).await?; assert!(stream.next().await.transpose().is_err()); Ok(()) @@ -155,7 +155,7 @@ async fn resumes_on_error() -> Result<()> { None => return Ok(()), }; - coll.insert_one(doc! { "_id": 1 }, None).await?; + coll.insert_one(doc! { "_id": 1 }).await?; assert!(matches!(stream.next().await.transpose()?, Some(ChangeStreamEvent { operation_type: OperationType::Insert, @@ -172,7 +172,7 @@ async fn resumes_on_error() -> Result<()> { .enable(&client, None) .await?; - coll.insert_one(doc! { "_id": 2 }, None).await?; + coll.insert_one(doc! { "_id": 2 }).await?; assert!(matches!(stream.next().await.transpose()?, Some(ChangeStreamEvent { operation_type: OperationType::Insert, @@ -227,7 +227,7 @@ async fn empty_batch_not_closed() -> Result<()> { assert!(stream.next_if_any().await?.is_none()); - coll.insert_one(doc! {}, None).await?; + coll.insert_one(doc! {}).await?; stream.next().await.transpose()?; let events = client.get_command_events(&["aggregate", "getMore"]); @@ -256,7 +256,7 @@ async fn resume_kill_cursor_error_suppressed() -> Result<()> { None => return Ok(()), }; - coll.insert_one(doc! { "_id": 1 }, None).await?; + coll.insert_one(doc! { "_id": 1 }).await?; assert!(matches!(stream.next().await.transpose()?, Some(ChangeStreamEvent { operation_type: OperationType::Insert, @@ -273,7 +273,7 @@ async fn resume_kill_cursor_error_suppressed() -> Result<()> { .enable(&client, None) .await?; - coll.insert_one(doc! { "_id": 2 }, None).await?; + coll.insert_one(doc! { "_id": 2 }).await?; assert!(matches!(stream.next().await.transpose()?, Some(ChangeStreamEvent { operation_type: OperationType::Insert, @@ -318,7 +318,7 @@ async fn resume_start_at_operation_time() -> Result<()> { .enable(&client, None) .await?; - coll.insert_one(doc! { "_id": 2 }, None).await?; + coll.insert_one(doc! { "_id": 2 }).await?; stream.next().await.transpose()?; let events = client.get_command_events(&["aggregate"]); @@ -399,7 +399,7 @@ async fn batch_end_resume_token_legacy() -> Result<()> { assert_eq!(stream.resume_token(), None); // Case: end of batch - coll.insert_one(doc! {}, None).await?; + coll.insert_one(doc! {}).await?; let expected_id = stream.next_if_any().await?.unwrap().id; assert_eq!(stream.next_if_any().await?, None); assert_eq!(stream.resume_token().as_ref(), Some(&expected_id)); @@ -470,13 +470,13 @@ async fn aggregate_batch() -> Result<()> { } // Synthesize a resume token for the new stream to start at. - coll.insert_one(doc! {}, None).await?; + coll.insert_one(doc! {}).await?; stream.next().await; let token = stream.resume_token().unwrap(); // Populate the initial batch of the new stream. - coll.insert_one(doc! {}, None).await?; - coll.insert_one(doc! {}, None).await?; + coll.insert_one(doc! {}).await?; + coll.insert_one(doc! {}).await?; // Case: `start_after` is given let stream = coll.watch().start_after(token.clone()).await?; @@ -510,14 +510,14 @@ async fn resume_uses_start_after() -> Result<()> { return Ok(()); } - coll.insert_one(doc! {}, None).await?; + coll.insert_one(doc! {}).await?; stream.next().await.transpose()?; let token = stream.resume_token().unwrap(); let mut stream = coll.watch().start_after(token.clone()).await?; // Create an event, and synthesize a resumable error when calling `getMore` for that event. - coll.insert_one(doc! {}, None).await?; + coll.insert_one(doc! {}).await?; let _guard = FailPoint::fail_command( &["getMore"], FailPointMode::Times(1), @@ -566,18 +566,18 @@ async fn resume_uses_resume_after() -> Result<()> { return Ok(()); } - coll.insert_one(doc! {}, None).await?; + coll.insert_one(doc! {}).await?; stream.next().await.transpose()?; let token = stream.resume_token().unwrap(); let mut stream = coll.watch().start_after(token.clone()).await?; // Create an event and read it. - coll.insert_one(doc! {}, None).await?; + coll.insert_one(doc! {}).await?; stream.next().await.transpose()?; // Create an event, and synthesize a resumable error when calling `getMore` for that event. - coll.insert_one(doc! {}, None).await?; + coll.insert_one(doc! {}).await?; let _guard = FailPoint::fail_command( &["getMore"], FailPointMode::Times(1), @@ -658,7 +658,7 @@ async fn split_large_event() -> Result<()> { .await?; let coll = db.collection::("split_large_event"); - coll.insert_one(doc! { "value": "q".repeat(10 * 1024 * 1024) }, None) + coll.insert_one(doc! { "value": "q".repeat(10 * 1024 * 1024) }) .await?; let stream = coll .watch() diff --git a/src/test/client.rs b/src/test/client.rs index 3c90f9d75..369160f23 100644 --- a/src/test/client.rs +++ b/src/test/client.rs @@ -96,7 +96,7 @@ async fn connection_drop_during_read() { let db = client.database("test"); db.collection(function_name!()) - .insert_one(doc! { "x": 1 }, None) + .insert_one(doc! { "x": 1 }) .await .unwrap(); @@ -178,7 +178,7 @@ async fn list_databases() { let db = client.database(name); db.collection("foo") - .insert_one(doc! { "x": 1 }, None) + .insert_one(doc! { "x": 1 }) .await .unwrap(); } @@ -223,7 +223,7 @@ async fn list_database_names() { let db = client.database(name); db.collection("foo") - .insert_one(doc! { "x": 1 }, None) + .insert_one(doc! { "x": 1 }) .await .unwrap(); } @@ -682,7 +682,7 @@ async fn retry_commit_txn_check_out() { setup_client .database("retry_commit_txn_check_out") .collection("retry_commit_txn_check_out") - .insert_one(doc! {}, None) + .insert_one(doc! {}) .await .unwrap(); @@ -701,7 +701,8 @@ async fn retry_commit_txn_check_out() { client .database("retry_commit_txn_check_out") .collection("retry_commit_txn_check_out") - .insert_one_with_session(doc! {}, None, &mut session) + .insert_one(doc! {}) + .session(&mut session) .await .unwrap(); @@ -824,7 +825,8 @@ async fn manual_shutdown_with_resources() { return; } if coll - .insert_one_with_session(doc! {}, None, &mut session) + .insert_one(doc! {}) + .session(&mut session) .await .is_err() { @@ -880,7 +882,8 @@ async fn manual_shutdown_immediate_with_resources() { // Similarly, sessions need an in-progress transaction to have cleanup. let mut session = client.start_session().await.unwrap(); session.start_transaction(None).await.unwrap(); - coll.insert_one_with_session(doc! {}, None, &mut session) + coll.insert_one(doc! {}) + .session(&mut session) .await .unwrap(); let _stream = bucket.open_upload_stream("test", None); @@ -914,7 +917,7 @@ async fn find_one_and_delete_serde_consistency() { problematic: vec![0, 1, 2, 3, 4, 5, 6, 7], }; - coll.insert_one(&doc, None).await.unwrap(); + coll.insert_one(&doc).await.unwrap(); let rec: Foo = coll.find_one(doc! {}).await.unwrap().unwrap(); assert_eq!(doc.problematic, rec.problematic); let rec: Foo = coll.find_one_and_delete(doc! {}).await.unwrap().unwrap(); diff --git a/src/test/coll.rs b/src/test/coll.rs index 629aef803..a6ca90d44 100644 --- a/src/test/coll.rs +++ b/src/test/coll.rs @@ -74,7 +74,7 @@ async fn insert_err_details() { .await .unwrap(); - let wc_error_result = coll.insert_one(doc! { "test": 1 }, None).await; + let wc_error_result = coll.insert_one(doc! { "test": 1 }).await; match *wc_error_result.unwrap_err().kind { ErrorKind::Write(WriteFailure::WriteConcernError(ref wc_error)) => { match &wc_error.details { @@ -104,7 +104,7 @@ async fn count() { assert_eq!(coll.estimated_document_count().await.unwrap(), 0); - let _ = coll.insert_one(doc! { "x": 1 }, None).await.unwrap(); + let _ = coll.insert_one(doc! { "x": 1 }).await.unwrap(); assert_eq!(coll.estimated_document_count().await.unwrap(), 1); let result = coll @@ -735,7 +735,7 @@ async fn insert_one_and_find(coll: &Collection, insert_data: T) where T: Serialize + DeserializeOwned + Clone + PartialEq + Debug + Unpin + Send + Sync, { - coll.insert_one(insert_data.clone(), None).await.unwrap(); + coll.insert_one(insert_data.clone()).await.unwrap(); let result = coll .find_one(to_document(&insert_data).unwrap()) .await @@ -791,7 +791,7 @@ async fn typed_find_one_and_replace() { x: 1, str: "a".into(), }; - coll.insert_one(insert_data.clone(), None).await.unwrap(); + coll.insert_one(insert_data.clone()).await.unwrap(); let replacement = UserType { x: 2, @@ -824,7 +824,7 @@ async fn typed_replace_one() { x: 2, str: "b".into(), }; - coll.insert_one(insert_data, None).await.unwrap(); + coll.insert_one(insert_data).await.unwrap(); coll.replace_one(doc! { "x": 1 }, replacement.clone(), None) .await .unwrap(); @@ -845,7 +845,7 @@ async fn typed_returns() { x: 1, str: "a".into(), }; - coll.insert_one(insert_data.clone(), None).await.unwrap(); + coll.insert_one(insert_data.clone()).await.unwrap(); let result = coll .find_one_and_update(doc! { "x": 1 }, doc! { "$inc": { "x": 1 } }) @@ -883,7 +883,7 @@ async fn count_documents_with_wc() { .database(function_name!()) .collection(function_name!()); - coll.insert_one(doc! {}, None).await.unwrap(); + coll.insert_one(doc! {}).await.unwrap(); coll.count_documents(doc! {}) .await @@ -960,7 +960,7 @@ async fn collection_generic_bounds() { let coll: Collection = client .database(function_name!()) .collection(function_name!()); - let _result = coll.insert_one(Bar {}, None).await; + let _result = coll.insert_one(Bar {}).await; } /// Verify that a cursor with multiple batches whose last batch isn't full @@ -1028,13 +1028,13 @@ async fn invalid_utf8_response() { // a document containing a long string with multi-byte unicode characters. taken from a user // repro in RUBY-2560. let long_unicode_str_doc = doc! {"name": "(╯°□°)╯︵ ┻━┻(╯°□°)╯︵ ┻━┻(╯°□°)╯︵ ┻━┻(╯°□°)╯︵ ┻━┻(╯°□°)╯︵ ┻━┻(╯°□°)╯︵ ┻━┻"}; - coll.insert_one(&long_unicode_str_doc, None) + coll.insert_one(&long_unicode_str_doc) .await .expect("first insert of document should succeed"); // test triggering an invalid error message via an insert_one. let insert_err = coll - .insert_one(&long_unicode_str_doc, None) + .insert_one(&long_unicode_str_doc) .await .expect_err("second insert of document should fail") .kind; @@ -1049,7 +1049,7 @@ async fn invalid_utf8_response() { assert_duplicate_key_error_with_utf8_replacement(&insert_err); // test triggering an invalid error message via an update_one. - coll.insert_one(doc! {"x": 1}, None) + coll.insert_one(doc! {"x": 1}) .await .expect("inserting new document should succeed"); @@ -1154,13 +1154,10 @@ async fn configure_human_readable_serialization() { non_human_readable_collection.drop().await.unwrap(); non_human_readable_collection - .insert_one( - Data { - id: 0, - s: StringOrBytes("non human readable!".into()), - }, - None, - ) + .insert_one(Data { + id: 0, + s: StringOrBytes("non human readable!".into()), + }) .await .unwrap(); @@ -1203,13 +1200,10 @@ async fn configure_human_readable_serialization() { human_readable_collection.drop().await.unwrap(); human_readable_collection - .insert_one( - Data { - id: 0, - s: StringOrBytes("human readable!".into()), - }, - None, - ) + .insert_one(Data { + id: 0, + s: StringOrBytes("human readable!".into()), + }) .await .unwrap(); diff --git a/src/test/csfle.rs b/src/test/csfle.rs index 77e1275f3..d7bf6e55d 100644 --- a/src/test/csfle.rs +++ b/src/test/csfle.rs @@ -39,7 +39,6 @@ use crate::{ Credential, FindOptions, IndexOptions, - InsertOneOptions, ReadConcern, TlsOptions, WriteConcern, @@ -218,7 +217,7 @@ async fn custom_key_material() -> Result<()> { datakeys.delete_one(doc! { "_id": id}).await?; let new_key_id = bson::Binary::from_uuid(bson::Uuid::from_bytes([0; 16])); key_doc.insert("_id", new_key_id.clone()); - datakeys.insert_one(key_doc, None).await?; + datakeys.insert_one(key_doc).await?; let encrypted = enc .encrypt( @@ -378,11 +377,8 @@ async fn data_key_double_encryption() -> Result<()> { let coll = client_encrypted .database("db") .collection::("coll"); - coll.insert_one( - doc! { "_id": provider.name(), "value": encrypted.clone() }, - None, - ) - .await?; + coll.insert_one(doc! { "_id": provider.name(), "value": encrypted.clone() }) + .await?; let found = coll.find_one(doc! { "_id": provider.name() }).await?; assert_eq!( found.as_ref().and_then(|doc| doc.get("value")), @@ -402,7 +398,7 @@ async fn data_key_double_encryption() -> Result<()> { // Attempt to auto-encrypt an already encrypted field. let result = coll - .insert_one(doc! { "encrypted_placeholder": encrypted }, None) + .insert_one(doc! { "encrypted_placeholder": encrypted }) .await; let err = result.unwrap_err(); assert!( @@ -438,7 +434,7 @@ async fn external_key_vault() -> Result<()> { // Setup: initialize db. let (client, datakeys) = init_client().await?; datakeys - .insert_one(load_testdata("external/external-key.json")?, None) + .insert_one(load_testdata("external/external-key.json")?) .await?; // Setup: test options. @@ -478,7 +474,7 @@ async fn external_key_vault() -> Result<()> { let result = client_encrypted .database("db") .collection::("coll") - .insert_one(doc! { "encrypted": "test" }, None) + .insert_one(doc! { "encrypted": "test" }) .await; if with_external_key_vault { let err = result.unwrap_err(); @@ -543,7 +539,7 @@ async fn bson_size_limits() -> Result<()> { .validator(doc! { "$jsonSchema": load_testdata("limits/limits-schema.json")? }) .await?; datakeys - .insert_one(load_testdata("limits/limits-key.json")?, None) + .insert_one(load_testdata("limits/limits-key.json")?) .await?; // Setup: encrypted client. @@ -563,20 +559,17 @@ async fn bson_size_limits() -> Result<()> { // Tests // Test operation 1 - coll.insert_one( - doc! { - "_id": "over_2mib_under_16mib", - "unencrypted": "a".repeat(2097152), - }, - None, - ) + coll.insert_one(doc! { + "_id": "over_2mib_under_16mib", + "unencrypted": "a".repeat(2097152), + }) .await?; // Test operation 2 let mut doc: Document = load_testdata("limits/limits-doc.json")?; doc.insert("_id", "encryption_exceeds_2mib"); doc.insert("unencrypted", "a".repeat(2_097_152 - 2_000)); - coll.insert_one(doc, None).await?; + coll.insert_one(doc).await?; // Test operation 3 let value = "a".repeat(2_097_152); @@ -627,13 +620,13 @@ async fn bson_size_limits() -> Result<()> { "_id": "under_16mib", "unencrypted": "a".repeat(16_777_216 - 2_000), }; - coll.insert_one(doc, None).await?; + coll.insert_one(doc).await?; // Test operation 6 let mut doc: Document = load_testdata("limits/limits-doc.json")?; doc.insert("_id", "encryption_exceeds_16mib"); doc.insert("unencrypted", "a".repeat(16_777_216 - 2_000)); - let result = coll.insert_one(doc, None).await; + let result = coll.insert_one(doc).await; let err = result.unwrap_err(); assert!( matches!(*err.kind, ErrorKind::Write(_)), @@ -679,7 +672,7 @@ async fn views_prohibited() -> Result<()> { let result = client_encrypted .database("db") .collection::("view") - .insert_one(doc! {}, None) + .insert_one(doc! {}) .await; let err = result.unwrap_err(); assert!( @@ -756,7 +749,7 @@ async fn run_corpus_test(local_schema: bool) -> Result<()> { "corpus/corpus-key-gcp.json", "corpus/corpus-key-kmip.json", ] { - datakeys.insert_one(load_testdata(f)?, None).await?; + datakeys.insert_one(load_testdata(f)?).await?; } // Setup: encrypted client and manual encryption. @@ -853,7 +846,7 @@ async fn run_corpus_test(local_schema: bool) -> Result<()> { let coll = client_encrypted .database("db") .collection::("coll"); - let id = coll.insert_one(corpus_copied, None).await?.inserted_id; + let id = coll.insert_one(corpus_copied).await?.inserted_id; let corpus_decrypted = coll .find_one(doc! { "_id": id.clone() }) .await? @@ -1278,7 +1271,7 @@ async fn bypass_mongocryptd_via_shared_library() -> Result<()> { client_encrypted .database("db") .collection::("coll") - .insert_one(doc! { "unencrypted": "test" }, None) + .insert_one(doc! { "unencrypted": "test" }) .await?; // Test: mongocryptd not spawned. assert!(!client_encrypted.mongocryptd_spawned().await); @@ -1319,7 +1312,7 @@ async fn bypass_mongocryptd_via_bypass_spawn() -> Result<()> { let err = client_encrypted .database("db") .collection::("coll") - .insert_one(doc! { "encrypted": "test" }, None) + .insert_one(doc! { "encrypted": "test" }) .await .unwrap_err(); assert!(err.is_server_selection_error(), "unexpected error: {}", err); @@ -1354,7 +1347,7 @@ async fn bypass_mongocryptd_unencrypted_insert(bypass: Bypass) -> Result<()> { client_encrypted .database("db") .collection::("coll") - .insert_one(doc! { "unencrypted": "test" }, None) + .insert_one(doc! { "unencrypted": "test" }) .await?; // Test: mongocryptd not spawned. assert!(!client_encrypted.mongocryptd_spawned().await); @@ -1525,12 +1518,8 @@ impl DeadlockTestCase { client_keyvault .database("keyvault") .collection::("datakeys") - .insert_one( - load_testdata("external/external-key.json")?, - InsertOneOptions::builder() - .write_concern(WriteConcern::majority()) - .build(), - ) + .insert_one(load_testdata("external/external-key.json")?) + .write_concern(WriteConcern::majority()) .await?; client_test .database("db") @@ -1576,13 +1565,13 @@ impl DeadlockTestCase { client_test .database("db") .collection::("coll") - .insert_one(doc! { "_id": 0, "encrypted": ciphertext }, None) + .insert_one(doc! { "_id": 0, "encrypted": ciphertext }) .await?; } else { client_encrypted .database("db") .collection::("coll") - .insert_one(doc! { "_id": 0, "encrypted": "string0" }, None) + .insert_one(doc! { "_id": 0, "encrypted": "string0" }) .await?; } @@ -2002,7 +1991,7 @@ async fn explicit_encryption_case_1() -> Result<()> { .contention_factor(0) .await?; enc_coll - .insert_one(doc! { "encryptedIndexed": insert_payload }, None) + .insert_one(doc! { "encryptedIndexed": insert_payload }) .await?; let find_payload = testdata @@ -2060,7 +2049,7 @@ async fn explicit_encryption_case_2() -> Result<()> { .contention_factor(10) .await?; enc_coll - .insert_one(doc! { "encryptedIndexed": insert_payload }, None) + .insert_one(doc! { "encryptedIndexed": insert_payload }) .await?; } @@ -2135,10 +2124,7 @@ async fn explicit_encryption_case_3() -> Result<()> { ) .await?; enc_coll - .insert_one( - doc! { "_id": 1, "encryptedUnindexed": insert_payload }, - None, - ) + .insert_one(doc! { "_id": 1, "encryptedUnindexed": insert_payload }) .await?; let found: Vec<_> = enc_coll @@ -2259,12 +2245,8 @@ async fn explicit_encryption_setup() -> Result("datakeys") - .insert_one( - key1_document, - InsertOneOptions::builder() - .write_concern(WriteConcern::majority()) - .build(), - ) + .insert_one(key1_document) + .write_concern(WriteConcern::majority()) .await?; let client_encryption = ClientEncryption::new( @@ -2486,7 +2468,7 @@ async fn decryption_events_decrypt_error() -> Result<()> { None => return Ok(()), }; td.decryption_events - .insert_one(doc! { "encrypted": td.malformed_ciphertext }, None) + .insert_one(doc! { "encrypted": td.malformed_ciphertext }) .await?; let err = td.decryption_events.aggregate(vec![]).await.unwrap_err(); assert!(err.is_csfle_error()); @@ -2517,7 +2499,7 @@ async fn decryption_events_decrypt_success() -> Result<()> { None => return Ok(()), }; td.decryption_events - .insert_one(doc! { "encrypted": td.ciphertext }, None) + .insert_one(doc! { "encrypted": td.ciphertext }) .await?; td.decryption_events.aggregate(vec![]).await?; let guard = td.ev_handler.succeeded.lock().unwrap(); @@ -2859,7 +2841,7 @@ async fn bypass_mongocryptd_client() -> Result<()> { client_encrypted .database("db") .collection::("coll") - .insert_one(doc! { "unencrypted": "test" }, None) + .insert_one(doc! { "unencrypted": "test" }) .await?; assert!(!client_encrypted.has_mongocryptd_client().await); @@ -2926,7 +2908,7 @@ async fn auto_encryption_keys(master_key: MasterKey) -> Result<()> { .await .1?; let coll = db.collection::("case_1"); - let result = coll.insert_one(doc! { "ssn": "123-45-6789" }, None).await; + let result = coll.insert_one(doc! { "ssn": "123-45-6789" }).await; assert!( result.as_ref().unwrap_err().code() == Some(121), "Expected error 121 (failed validation), got {:?}", @@ -2985,8 +2967,7 @@ async fn auto_encryption_keys(master_key: MasterKey) -> Result<()> { }; let encrypted_payload = ce.encrypt("123-45-6789", key, Algorithm::Unindexed).await?; let coll = db.collection::("case_1"); - coll.insert_one(doc! { "ssn": encrypted_payload }, None) - .await?; + coll.insert_one(doc! { "ssn": encrypted_payload }).await?; Ok(()) } @@ -3103,12 +3084,8 @@ async fn range_explicit_encryption_test( .await?; datakeys_collection - .insert_one( - key1_document, - InsertOneOptions::builder() - .write_concern(WriteConcern::majority()) - .build(), - ) + .insert_one(key1_document) + .write_concern(WriteConcern::majority()) .await?; let key_vault_client = TestClient::new().await; @@ -3149,13 +3126,10 @@ async fn range_explicit_encryption_test( .await?; explicit_encryption_collection - .insert_one( - doc! { - &key: encrypted_value, - "_id": id as i32, - }, - None, - ) + .insert_one(doc! { + &key: encrypted_value, + "_id": id as i32, + }) .await?; } @@ -3427,14 +3401,11 @@ async fn fle2_example() -> Result<()> { // Encrypt an insert. encrypted_coll - .insert_one( - doc! { - "_id": 1, - "encryptedIndexed": "indexedValue", - "encryptedUnindexed": "unindexedValue", - }, - None, - ) + .insert_one(doc! { + "_id": 1, + "encryptedIndexed": "indexedValue", + "encryptedUnindexed": "unindexedValue", + }) .await?; // Encrypt a find. diff --git a/src/test/cursor.rs b/src/test/cursor.rs index 011db74b4..94891e0e6 100644 --- a/src/test/cursor.rs +++ b/src/test/cursor.rs @@ -66,7 +66,7 @@ async fn tailable_cursor() { }; runtime::spawn(async move { - coll.insert_one(doc! { "_id": 5 }, None).await.unwrap(); + coll.insert_one(doc! { "_id": 5 }).await.unwrap(); }); let delay = tokio::time::sleep(await_time); diff --git a/src/test/db.rs b/src/test/db.rs index b699294d3..c8e82ce24 100644 --- a/src/test/db.rs +++ b/src/test/db.rs @@ -52,7 +52,7 @@ async fn list_collections() { for coll_name in coll_names { db.collection(coll_name) - .insert_one(doc! { "x": 1 }, None) + .insert_one(doc! { "x": 1 }) .await .unwrap(); } @@ -80,7 +80,7 @@ async fn list_collections_filter() { let coll_names = &["bar", "baz", "foo"]; for coll_name in coll_names { db.collection(coll_name) - .insert_one(doc! { "x": 1 }, None) + .insert_one(doc! { "x": 1 }) .await .unwrap(); } @@ -119,7 +119,7 @@ async fn list_collection_names() { for coll in expected_colls { db.collection(coll) - .insert_one(doc! { "x": 1 }, None) + .insert_one(doc! { "x": 1 }) .await .unwrap(); } diff --git a/src/test/documentation_examples.rs b/src/test/documentation_examples.rs index cfbab61f6..47b6a11d4 100644 --- a/src/test/documentation_examples.rs +++ b/src/test/documentation_examples.rs @@ -39,19 +39,16 @@ async fn insert_examples(collection: &Collection) -> Result<()> { // Start Example 1 collection - .insert_one( - doc! { - "item": "canvas", - "qty": 100, - "tags": ["cotton"], - "size": { - "h": 28, - "w": 35.5, - "uom": "cm", - } - }, - None, - ) + .insert_one(doc! { + "item": "canvas", + "qty": 100, + "tags": ["cotton"], + "size": { + "h": 28, + "w": 35.5, + "uom": "cm", + } + }) .await?; // End Example 1 @@ -1506,16 +1503,13 @@ async fn run_command_examples() -> Result<()> { let db = client.database("run_command_examples"); db.drop().await?; db.collection::("restaurants") - .insert_one( - doc! { - "name": "Chez Panisse", - "city": "Oakland", - "state": "California", - "country": "United States", - "rating": 4.4, - }, - None, - ) + .insert_one(doc! { + "name": "Chez Panisse", + "city": "Oakland", + "state": "California", + "country": "United States", + "rating": 4.4, + }) .await?; #[allow(unused)] @@ -1612,7 +1606,7 @@ async fn change_streams_examples() -> Result<()> { db.drop().await?; let inventory = db.collection::("inventory"); // Populate an item so the collection exists for the change stream to watch. - inventory.insert_one(doc! {}, None).await?; + inventory.insert_one(doc! {}).await?; // Background writer thread so that the `stream.next()` calls return something. let (tx, mut rx) = tokio::sync::oneshot::channel(); @@ -1622,7 +1616,7 @@ async fn change_streams_examples() -> Result<()> { loop { tokio::select! { _ = interval.tick() => { - writer_inventory.insert_one(doc! {}, None).await?; + writer_inventory.insert_one(doc! {}).await?; } _ = &mut rx => break, } @@ -1697,12 +1691,12 @@ async fn convenient_transaction_examples() -> Result<()> { client .database("mydb1") .collection::("foo") - .insert_one(doc! { "abc": 0}, None) + .insert_one(doc! { "abc": 0}) .await?; client .database("mydb2") .collection::("bar") - .insert_one(doc! { "xyz": 0}, None) + .insert_one(doc! { "xyz": 0}) .await?; // Step 1: Define the callback that specifies the sequence of operations to perform inside the @@ -1719,10 +1713,12 @@ async fn convenient_transaction_examples() -> Result<()> { // Important: You must pass the session to the operations. collection_one - .insert_one_with_session(doc! { "abc": 1 }, None, session) + .insert_one(doc! { "abc": 1 }) + .session(&mut *session) .await?; collection_two - .insert_one_with_session(doc! { "xyz": 999 }, None, session) + .insert_one(doc! { "xyz": 999 }) + .session(session) .await?; Ok(()) diff --git a/src/test/spec/connection_stepdown.rs b/src/test/spec/connection_stepdown.rs index 5eb3802e4..79fb9cc00 100644 --- a/src/test/spec/connection_stepdown.rs +++ b/src/test/spec/connection_stepdown.rs @@ -122,7 +122,7 @@ async fn notwritableprimary_keep_pool() { .await .unwrap(); - let result = coll.insert_one(doc! { "test": 1 }, None).await; + let result = coll.insert_one(doc! { "test": 1 }).await; assert!( matches!( result.map_err(|e| *e.kind), @@ -131,7 +131,7 @@ async fn notwritableprimary_keep_pool() { "insert should have failed" ); - coll.insert_one(doc! { "test": 1 }, None) + coll.insert_one(doc! { "test": 1 }) .await .expect("insert should have succeeded"); @@ -174,7 +174,7 @@ async fn notwritableprimary_reset_pool() { .await .unwrap(); - let result = coll.insert_one(doc! { "test": 1 }, None).await; + let result = coll.insert_one(doc! { "test": 1 }).await; assert!( matches!( result.map_err(|e| *e.kind), @@ -186,7 +186,7 @@ async fn notwritableprimary_reset_pool() { tokio::time::sleep(Duration::from_millis(250)).await; assert_eq!(client.count_pool_cleared_events(), 1); - coll.insert_one(doc! { "test": 1 }, None) + coll.insert_one(doc! { "test": 1 }) .await .expect("insert should have succeeded"); } @@ -223,7 +223,7 @@ async fn shutdown_in_progress() { .await .unwrap(); - let result = coll.insert_one(doc! { "test": 1 }, None).await; + let result = coll.insert_one(doc! { "test": 1 }).await; assert!( matches!( result.map_err(|e| *e.kind), @@ -235,7 +235,7 @@ async fn shutdown_in_progress() { tokio::time::sleep(Duration::from_millis(250)).await; assert_eq!(client.count_pool_cleared_events(), 1); - coll.insert_one(doc! { "test": 1 }, None) + coll.insert_one(doc! { "test": 1 }) .await .expect("insert should have succeeded"); } @@ -268,7 +268,7 @@ async fn interrupted_at_shutdown() { .await .unwrap(); - let result = coll.insert_one(doc! { "test": 1 }, None).await; + let result = coll.insert_one(doc! { "test": 1 }).await; assert!( matches!( result.map_err(|e| *e.kind), @@ -280,7 +280,7 @@ async fn interrupted_at_shutdown() { tokio::time::sleep(Duration::from_millis(250)).await; assert_eq!(client.count_pool_cleared_events(), 1); - coll.insert_one(doc! { "test": 1 }, None) + coll.insert_one(doc! { "test": 1 }) .await .expect("insert should have succeeded"); diff --git a/src/test/spec/crud_v1/insert_one.rs b/src/test/spec/crud_v1/insert_one.rs index b341bc28e..5f159b1f1 100644 --- a/src/test/spec/crud_v1/insert_one.rs +++ b/src/test/spec/crud_v1/insert_one.rs @@ -48,7 +48,7 @@ async fn run_insert_one_test(test_file: TestFile) { } let result = coll - .insert_one(arguments.document, None) + .insert_one(arguments.document) .await .expect(&test_case.description); assert_eq!( diff --git a/src/test/spec/retryable_reads.rs b/src/test/spec/retryable_reads.rs index b6e14a5ae..e29b98c2e 100644 --- a/src/test/spec/retryable_reads.rs +++ b/src/test/spec/retryable_reads.rs @@ -52,7 +52,7 @@ async fn retry_releases_connection() { let collection = client .database("retry_releases_connection") .collection("retry_releases_connection"); - collection.insert_one(doc! { "x": 1 }, None).await.unwrap(); + collection.insert_one(doc! { "x": 1 }).await.unwrap(); // Use a connection error to ensure streaming monitor checks get cancelled. Otherwise, we'd have // to wait for the entire heartbeatFrequencyMS before the find succeeds. @@ -99,7 +99,7 @@ async fn retry_read_pool_cleared() { let collection = client .database("retry_read_pool_cleared") .collection("retry_read_pool_cleared"); - collection.insert_one(doc! { "x": 1 }, None).await.unwrap(); + collection.insert_one(doc! { "x": 1 }).await.unwrap(); let options = FailCommandOptions::builder() .error_code(91) diff --git a/src/test/spec/retryable_writes.rs b/src/test/spec/retryable_writes.rs index 7e7bfbe07..fb635d05f 100644 --- a/src/test/spec/retryable_writes.rs +++ b/src/test/spec/retryable_writes.rs @@ -242,7 +242,7 @@ async fn transaction_ids_included() { started.command.contains_key("txnNumber") }; - coll.insert_one(doc! { "x": 1 }, None).await.unwrap(); + coll.insert_one(doc! { "x": 1 }).await.unwrap(); assert!(includes_txn_number("insert")); coll.update_one(doc! {}, doc! { "$set": doc! { "x": 1 } }) @@ -312,7 +312,7 @@ async fn mmapv1_error_raised() { return; } - let err = coll.insert_one(doc! { "x": 1 }, None).await.unwrap_err(); + let err = coll.insert_one(doc! { "x": 1 }).await.unwrap_err(); match *err.kind { ErrorKind::Command(err) => { assert_eq!( @@ -428,7 +428,7 @@ async fn retry_write_pool_cleared() { let mut tasks: Vec> = Vec::new(); for _ in 0..2 { let coll = collection.clone(); - let task = runtime::spawn(async move { coll.insert_one(doc! {}, None).await }); + let task = runtime::spawn(async move { coll.insert_one(doc! {}).await }); tasks.push(task); } @@ -556,7 +556,7 @@ async fn retry_write_retryable_write_error() { let result = client .database("test") .collection::("test") - .insert_one(doc! { "hello": "there" }, None) + .insert_one(doc! { "hello": "there" }) .await; assert_eq!(result.unwrap_err().code(), Some(91)); @@ -605,7 +605,7 @@ async fn retry_write_different_mongos() { let result = client .database("test") .collection::("retry_write_different_mongos") - .insert_one(doc! {}, None) + .insert_one(doc! {}) .await; assert!(result.is_err()); let events = client.get_command_events(&["insert"]); @@ -663,7 +663,7 @@ async fn retry_write_same_mongos() { let result = client .database("test") .collection::("retry_write_same_mongos") - .insert_one(doc! {}, None) + .insert_one(doc! {}) .await; assert!(result.is_ok(), "{:?}", result); let events = client.get_command_events(&["insert"]); diff --git a/src/test/spec/sessions.rs b/src/test/spec/sessions.rs index ce0856b0d..2a36c722e 100644 --- a/src/test/spec/sessions.rs +++ b/src/test/spec/sessions.rs @@ -74,7 +74,8 @@ async fn explicit_session_created_on_same_client() { .database(function_name!()) .collection(function_name!()); let err = coll - .insert_one_with_session(doc! {}, None, &mut session0) + .insert_one(doc! {}) + .session(&mut session0) .await .unwrap_err(); match *err.kind { @@ -124,7 +125,12 @@ async fn implicit_session_after_connection() { fn ignore_val(r: Result) -> Result<()> { r.map(|_| ()) } - ops.push(coll.insert_one(doc! {}, None).map(ignore_val).boxed()); + ops.push( + coll.insert_one(doc! {}) + .into_future() + .map(ignore_val) + .boxed(), + ); ops.push( coll.delete_one(doc! {}) .into_future() @@ -258,7 +264,7 @@ async fn sessions_not_supported_implicit_session_ignored() { .expect("Did not observe a command started event for find operation"); assert!(!event.command.contains_key("lsid")); - let _ = coll.insert_one(doc! { "x": 1 }, None).await; + let _ = coll.insert_one(doc! { "x": 1 }).await; let event = subscriber .filter_map_event(Duration::from_millis(500), |event| match event { Event::Command(CommandEvent::Started(command_started_event)) @@ -295,7 +301,8 @@ async fn sessions_not_supported_explicit_session_error() { assert!(matches!(*error.kind, ErrorKind::SessionsNotSupported)); let error = coll - .insert_one_with_session(doc! { "x": 1 }, None, &mut session) + .insert_one(doc! { "x": 1 }) + .session(&mut session) .await .unwrap_err(); assert!(matches!(*error.kind, ErrorKind::SessionsNotSupported)); diff --git a/src/test/spec/transactions.rs b/src/test/spec/transactions.rs index c7336d8a7..1820de454 100644 --- a/src/test/spec/transactions.rs +++ b/src/test/spec/transactions.rs @@ -78,7 +78,7 @@ async fn deserialize_recovery_token() { let coll = client .database(function_name!()) .collection(function_name!()); - coll.insert_one(A { num: 4 }, None).await.unwrap(); + coll.insert_one(A { num: 4 }).await.unwrap(); // Attempt to execute Find on a document with schema B. let coll: Collection = client diff --git a/src/test/spec/unified_runner/operation.rs b/src/test/spec/unified_runner/operation.rs index 4b992078c..45e02308f 100644 --- a/src/test/spec/unified_runner/operation.rs +++ b/src/test/spec/unified_runner/operation.rs @@ -768,24 +768,17 @@ impl TestOperation for InsertOne { ) -> BoxFuture<'a, Result>> { async move { let collection = test_runner.get_collection(id).await; + let action = collection + .insert_one(self.document.clone()) + .with_options(self.options.clone()); let result = match &self.session { Some(session_id) => { with_mut_session!(test_runner, session_id, |session| async { - collection - .insert_one_with_session( - self.document.clone(), - self.options.clone(), - session, - ) - .await + action.session(session.deref_mut()).await }) .await? } - None => { - collection - .insert_one(self.document.clone(), self.options.clone()) - .await? - } + None => action.await?, }; let result = to_bson(&result)?; Ok(Some(result.into())) diff --git a/src/test/spec/v2_runner/operation.rs b/src/test/spec/v2_runner/operation.rs index 45149cca8..c84ce58a3 100644 --- a/src/test/spec/v2_runner/operation.rs +++ b/src/test/spec/v2_runner/operation.rs @@ -458,14 +458,11 @@ impl TestOperation for InsertOne { let document = self.document.clone(); let options = self.options.clone(); async move { - let result = match session { - Some(session) => { - collection - .insert_one_with_session(document, options, session) - .await? - } - None => collection.insert_one(document, options).await?, - }; + let result = collection + .insert_one(document) + .with_options(options) + .optional(session, |a, s| a.session(s)) + .await?; let result = bson::to_bson(&result)?; Ok(Some(result)) } diff --git a/src/test/spec/write_error.rs b/src/test/spec/write_error.rs index 0efeb47fb..7600f991d 100644 --- a/src/test/spec/write_error.rs +++ b/src/test/spec/write_error.rs @@ -24,7 +24,7 @@ async fn details() { .await .unwrap(); let coll: Collection = db.collection("test"); - let err = coll.insert_one(doc! { "x": 1 }, None).await.unwrap_err(); + let err = coll.insert_one(doc! { "x": 1 }).await.unwrap_err(); let write_err = match *err.kind { ErrorKind::Write(WriteFailure::WriteError(e)) => e, _ => panic!("expected WriteError, got {:?}", err.kind), diff --git a/src/test/util/event.rs b/src/test/util/event.rs index 7478ad83c..7d0d83990 100644 --- a/src/test/util/event.rs +++ b/src/test/util/event.rs @@ -728,7 +728,7 @@ async fn command_started_event_count() { let coll = client.database("foo").collection("bar"); for i in 0..10 { - coll.insert_one(doc! { "x": i }, None).await.unwrap(); + coll.insert_one(doc! { "x": i }).await.unwrap(); } assert_eq!(client.get_command_started_events(&["insert"]).len(), 10); diff --git a/tests/transactions_example.rs b/tests/transactions_example.rs index b9d4ca8cf..602bb7967 100644 --- a/tests/transactions_example.rs +++ b/tests/transactions_example.rs @@ -64,11 +64,10 @@ async fn execute_transaction(session: &mut ClientSession) -> Result<()> { .await?; events - .insert_one_with_session( - doc! { "employee": 3, "status": { "new": "Inactive", "old": "Active" } }, - None, - session, + .insert_one( + doc! { "employee": 3, "status": { "new": "Inactive", "old": "Active" } } ) + .session(&mut *session) .await?; commit_with_retry(session).await From 0a526df17ea388a6c35016466c8908cc83355e24 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 11 Mar 2024 10:54:03 -0400 Subject: [PATCH 24/39] post-merge fixes --- src/action/find.rs | 24 ++++++++++++------------ src/action/find_and_modify.rs | 22 +++++++++++----------- src/action/insert_many.rs | 4 ++-- src/action/insert_one.rs | 4 ++-- src/test/spec/oidc.rs | 2 +- 5 files changed, 28 insertions(+), 28 deletions(-) diff --git a/src/action/find.rs b/src/action/find.rs index 4a4f6c322..be7354e5f 100644 --- a/src/action/find.rs +++ b/src/action/find.rs @@ -18,7 +18,7 @@ use crate::{ use super::{action_impl, option_setters, ExplicitSession, ImplicitSession, Multiple, Single}; -impl Collection { +impl Collection { /// Finds the documents in the collection matching `filter`. /// /// `await` will return `Result>` (or `Result>` if a session is @@ -34,7 +34,7 @@ impl Collection { } } -impl Collection { +impl Collection { /// Finds a single document in the collection matching `filter`. /// /// `await` will return `Result>`. @@ -50,7 +50,7 @@ impl Collection { } #[cfg(feature = "sync")] -impl crate::sync::Collection { +impl crate::sync::Collection { /// Finds the documents in the collection matching `filter`. /// /// [`run`](Find::run) will return `Result>` (or `Result>` if a @@ -61,7 +61,7 @@ impl crate::sync::Collection { } #[cfg(feature = "sync")] -impl crate::sync::Collection { +impl crate::sync::Collection { /// Finds a single document in the collection matching `filter`. /// /// [`run`](Find::run) will return `Result>`. @@ -72,7 +72,7 @@ impl crate::sync::Collection { /// Finds the documents in a collection matching a filter. Construct with [`Collection::find`]. #[must_use] -pub struct Find<'a, T, Mode = Multiple, Session = ImplicitSession> { +pub struct Find<'a, T: Send + Sync, Mode = Multiple, Session = ImplicitSession> { coll: &'a Collection, filter: Document, options: Option, @@ -80,7 +80,7 @@ pub struct Find<'a, T, Mode = Multiple, Session = ImplicitSession> { _mode: PhantomData, } -impl<'a, T, Mode, Session> Find<'a, T, Mode, Session> { +impl<'a, T: Send + Sync, Mode, Session> Find<'a, T, Mode, Session> { option_setters!(options: FindOptions; allow_partial_results: bool, comment: String, @@ -103,7 +103,7 @@ impl<'a, T, Mode, Session> Find<'a, T, Mode, Session> { } // Some options don't make sense for `find_one`. -impl<'a, T, Session> Find<'a, T, Multiple, Session> { +impl<'a, T: Send + Sync, Session> Find<'a, T, Multiple, Session> { option_setters!(FindOptions; allow_disk_use: bool, batch_size: u32, @@ -114,7 +114,7 @@ impl<'a, T, Session> Find<'a, T, Multiple, Session> { ); } -impl<'a, T, Mode> Find<'a, T, Mode, ImplicitSession> { +impl<'a, T: Send + Sync, Mode> Find<'a, T, Mode, ImplicitSession> { /// Runs the query using the provided session. pub fn session<'s>( self, @@ -131,7 +131,7 @@ impl<'a, T, Mode> Find<'a, T, Mode, ImplicitSession> { } action_impl! { - impl<'a, T> Action for Find<'a, T, Multiple, ImplicitSession> { + impl<'a, T: Send + Sync> Action for Find<'a, T, Multiple, ImplicitSession> { type Future = FindFuture; async fn execute(mut self) -> Result> { @@ -148,7 +148,7 @@ action_impl! { } action_impl! { - impl<'a, T> Action for Find<'a, T, Multiple, ExplicitSession<'a>> { + impl<'a, T: Send + Sync> Action for Find<'a, T, Multiple, ExplicitSession<'a>> { type Future = FindSessionFuture; async fn execute(mut self) -> Result> { @@ -166,7 +166,7 @@ action_impl! { } action_impl! { - impl<'a, T: DeserializeOwned> Action for Find<'a, T, Single, ImplicitSession> + impl<'a, T: DeserializeOwned + Send + Sync> Action for Find<'a, T, Single, ImplicitSession> { type Future = FindOneFuture; @@ -181,7 +181,7 @@ action_impl! { } action_impl! { - impl<'a, T: DeserializeOwned + Send> Action for Find<'a, T, Single, ExplicitSession<'a>> { + impl<'a, T: DeserializeOwned + Send + Sync> Action for Find<'a, T, Single, ExplicitSession<'a>> { type Future = FindOneSessionFuture; async fn execute(self) -> Result> { diff --git a/src/action/find_and_modify.rs b/src/action/find_and_modify.rs index e685a3b10..1c131d2b7 100644 --- a/src/action/find_and_modify.rs +++ b/src/action/find_and_modify.rs @@ -26,7 +26,7 @@ use crate::{ use super::{action_impl, option_setters}; -impl Collection { +impl Collection { /// Atomically finds up to one document in the collection matching `filter` and deletes it. /// /// This operation will retry once upon failure if the connection and encountered error support @@ -74,7 +74,7 @@ impl Collection { } } -impl Collection { +impl Collection { /// Atomically finds up to one document in the collection matching `filter` and replaces it with /// `replacement`. /// @@ -104,7 +104,7 @@ impl Collection { } #[cfg(feature = "sync")] -impl crate::sync::Collection { +impl crate::sync::Collection { /// Atomically finds up to one document in the collection matching `filter` and deletes it. /// /// This operation will retry once upon failure if the connection and encountered error support @@ -138,7 +138,7 @@ impl crate::sync::Collection { } #[cfg(feature = "sync")] -impl crate::sync::Collection { +impl crate::sync::Collection { /// Atomically finds up to one document in the collection matching `filter` and replaces it with /// `replacement`. /// @@ -159,7 +159,7 @@ impl crate::sync::Collection { /// Atomically find up to one document in the collection matching a filter and modify it. Construct /// with [`Collection::find_one_and_delete`]. #[must_use] -pub struct FindAndModify<'a, T, Mode> { +pub struct FindAndModify<'a, T: Send + Sync, Mode> { coll: &'a Collection, filter: Document, modification: Result, @@ -172,7 +172,7 @@ pub struct Delete; pub struct Update; pub struct Replace; -impl<'a, T, Mode> FindAndModify<'a, T, Mode> { +impl<'a, T: Send + Sync, Mode> FindAndModify<'a, T, Mode> { fn options(&mut self) -> &mut FindAndModifyOptions { self.options .get_or_insert_with(::default) @@ -185,7 +185,7 @@ impl<'a, T, Mode> FindAndModify<'a, T, Mode> { } } -impl<'a, T> FindAndModify<'a, T, Delete> { +impl<'a, T: Send + Sync> FindAndModify<'a, T, Delete> { /// Set all options. Note that this will replace all previous values set. pub fn with_options(mut self, value: impl Into>) -> Self { self.options = value.into().map(FindAndModifyOptions::from); @@ -204,7 +204,7 @@ impl<'a, T> FindAndModify<'a, T, Delete> { } } -impl<'a, T> FindAndModify<'a, T, Update> { +impl<'a, T: Send + Sync> FindAndModify<'a, T, Update> { /// Set all options. Note that this will replace all previous values set. pub fn with_options(mut self, value: impl Into>) -> Self { self.options = value.into().map(FindAndModifyOptions::from); @@ -232,7 +232,7 @@ impl<'a, T> FindAndModify<'a, T, Update> { } } -impl<'a, T> FindAndModify<'a, T, Replace> { +impl<'a, T: Send + Sync> FindAndModify<'a, T, Replace> { /// Set all options. Note that this will replace all previous values set. pub fn with_options(mut self, value: impl Into>) -> Self { self.options = value.into().map(FindAndModifyOptions::from); @@ -260,8 +260,8 @@ impl<'a, T> FindAndModify<'a, T, Replace> { } action_impl! { - impl<'a, T: DeserializeOwned + Send, Mode> Action for FindAndModify<'a, T, Mode> { - type Future = FindAndDeleteFuture<'a, T: DeserializeOwned + Send>; + impl<'a, T: DeserializeOwned + Send + Sync, Mode> Action for FindAndModify<'a, T, Mode> { + type Future = FindAndDeleteFuture<'a, T: DeserializeOwned + Send + Sync>; async fn execute(mut self) -> Result> { resolve_write_concern_with_session!(self.coll, self.options, self.session.as_ref())?; diff --git a/src/action/insert_many.rs b/src/action/insert_many.rs index bba473b33..1a80c4b82 100644 --- a/src/action/insert_many.rs +++ b/src/action/insert_many.rs @@ -16,7 +16,7 @@ use crate::{ use super::{action_impl, option_setters, CollRef}; -impl Collection { +impl Collection { /// Inserts the data in `docs` into the collection. /// /// Note that this method accepts both owned and borrowed values, so the input documents @@ -43,7 +43,7 @@ impl Collection { } #[cfg(feature = "sync")] -impl crate::sync::Collection { +impl crate::sync::Collection { /// Inserts the data in `docs` into the collection. /// /// Note that this method accepts both owned and borrowed values, so the input documents diff --git a/src/action/insert_one.rs b/src/action/insert_one.rs index a89525a44..49d7bdc88 100644 --- a/src/action/insert_one.rs +++ b/src/action/insert_one.rs @@ -16,7 +16,7 @@ use crate::{ use super::{action_impl, option_setters, CollRef}; -impl Collection { +impl Collection { /// Inserts `doc` into the collection. /// /// Note that either an owned or borrowed value can be inserted here, so the input document @@ -42,7 +42,7 @@ impl Collection { } #[cfg(feature = "sync")] -impl crate::sync::Collection { +impl crate::sync::Collection { /// Inserts `doc` into the collection. /// /// Note that either an owned or borrowed value can be inserted here, so the input document diff --git a/src/test/spec/oidc.rs b/src/test/spec/oidc.rs index 90f526d44..09d028b9d 100644 --- a/src/test/spec/oidc.rs +++ b/src/test/spec/oidc.rs @@ -45,7 +45,7 @@ async fn machine_single_principal_implicit_username() -> anyhow::Result<()> { client .database("test") .collection::("test") - .find_one(None, None) + .find_one(doc! {}) .await?; assert_eq!(1, *(*call_count).lock().unwrap()); Ok(()) From 602ae3a6dda73ade75626e3f73107eaa157651c4 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 11 Mar 2024 11:17:53 -0400 Subject: [PATCH 25/39] convert replace_one --- src/action.rs | 2 + src/action/replace_one.rs | 103 ++++++++++++++++++++++++++++++++++++++ src/operation/update.rs | 18 ++++++- 3 files changed, 121 insertions(+), 2 deletions(-) create mode 100644 src/action/replace_one.rs diff --git a/src/action.rs b/src/action.rs index 95849c594..2f5e5f543 100644 --- a/src/action.rs +++ b/src/action.rs @@ -18,6 +18,7 @@ mod list_collections; mod list_databases; mod list_indexes; mod perf; +mod replace_one; mod run_command; mod session; mod shutdown; @@ -43,6 +44,7 @@ pub use list_collections::ListCollections; pub use list_databases::ListDatabases; pub use list_indexes::ListIndexes; pub use perf::WarmConnectionPool; +pub use replace_one::ReplaceOne; pub use run_command::{RunCommand, RunCursorCommand}; pub use session::StartSession; pub use shutdown::Shutdown; diff --git a/src/action/replace_one.rs b/src/action/replace_one.rs new file mode 100644 index 000000000..b78f937e6 --- /dev/null +++ b/src/action/replace_one.rs @@ -0,0 +1,103 @@ +use std::borrow::Borrow; + +use bson::{Bson, Document, RawDocumentBuf}; +use serde::Serialize; + +use crate::{ + coll::options::{Hint, ReplaceOptions, UpdateOptions}, + collation::Collation, + error::Result, + operation::Update as Op, + options::WriteConcern, + results::UpdateResult, + serde_util, + ClientSession, + Collection, +}; + +use super::{action_impl, option_setters, CollRef}; + +impl Collection { + /// Replaces up to one document matching `query` in the collection with `replacement`. + /// + /// This operation will retry once upon failure if the connection and encountered error support + /// retryability. See the documentation + /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on + /// retryable writes. + /// + /// `await` will return `Result`. + pub fn replace_one_2(&self, query: Document, replacement: impl Borrow) -> ReplaceOne { + ReplaceOne { + coll: CollRef::new(self), + query, + replacement: serde_util::to_raw_document_buf_with_options( + replacement.borrow(), + self.human_readable_serialization(), + ), + options: None, + session: None, + } + } +} + +#[cfg(feature = "sync")] +impl crate::sync::Collection { + /// Replaces up to one document matching `query` in the collection with `replacement`. + /// + /// This operation will retry once upon failure if the connection and encountered error support + /// retryability. See the documentation + /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on + /// retryable writes. + /// + /// [`run`](ReplaceOne::run) will return `Result`. + pub fn replace_one_2(&self, query: Document, replacement: impl Borrow) -> ReplaceOne { + self.async_collection.replace_one_2(query, replacement) + } +} + +/// Replace up to one document matching a query. Construct with [`Collection::replace_one`]. +#[must_use] +pub struct ReplaceOne<'a> { + coll: CollRef<'a>, + query: Document, + replacement: Result, + options: Option, + session: Option<&'a mut ClientSession>, +} + +impl<'a> ReplaceOne<'a> { + option_setters! { options: ReplaceOptions; + bypass_document_validation: bool, + upsert: bool, + collation: Collation, + hint: Hint, + write_concern: WriteConcern, + let_vars: Document, + comment: Bson, + } + + /// Runs the operation using the provided session. + pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { + self.session = Some(value.into()); + self + } +} + +action_impl! { + impl<'a> Action for ReplaceOne<'a> { + type Future = ReplaceOneFuture; + + async fn execute(mut self) -> Result { + resolve_write_concern_with_session!(self.coll, self.options, self.session.as_ref())?; + + let update = Op::with_replace_raw( + self.coll.namespace(), + self.query, + self.replacement?, + false, + self.options.map(UpdateOptions::from_replace_options), + )?; + self.coll.client().execute_operation(update, self.session).await + } + } +} diff --git a/src/operation/update.rs b/src/operation/update.rs index 66046a9ee..735d9499b 100644 --- a/src/operation/update.rs +++ b/src/operation/update.rs @@ -90,9 +90,7 @@ impl Update { options, } } -} -impl Update { pub(crate) fn with_replace( ns: Namespace, filter: Document, @@ -109,6 +107,22 @@ impl Update { options, }) } + + pub(crate) fn with_replace_raw( + ns: Namespace, + filter: Document, + update: RawDocumentBuf, + multi: bool, + options: Option, + ) -> Result { + Ok(Self { + ns, + filter, + update: UpdateOrReplace::Replacement(update), + multi: multi.then_some(true), + options, + }) + } } impl OperationWithDefaults for Update { From 1c500769bb472403d68f3a3fda7f7024943a1624 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 11 Mar 2024 11:46:35 -0400 Subject: [PATCH 26/39] tweak action --- action_macro/src/lib.rs | 7 +++---- src/action.rs | 7 ++----- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/action_macro/src/lib.rs b/action_macro/src/lib.rs index ed529042a..2ef750fc7 100644 --- a/action_macro/src/lib.rs +++ b/action_macro/src/lib.rs @@ -56,10 +56,9 @@ pub fn action_impl(input: proc_macro::TokenStream) -> proc_macro::TokenStream { }); quote! { - impl #generics crate::action::private::Sealed for #action {} - impl #generics crate::action::Action for #action { - type Output = #exec_output; - } + impl #generics crate::action::private::Sealed for #action { } + + impl #generics crate::action::Action for #action { } impl #generics std::future::IntoFuture for #action { type Output = #exec_output; diff --git a/src/action.rs b/src/action.rs index 2f5e5f543..f2b5b5ccb 100644 --- a/src/action.rs +++ b/src/action.rs @@ -25,7 +25,7 @@ mod shutdown; mod update; mod watch; -use std::{marker::PhantomData, ops::Deref}; +use std::{future::IntoFuture, marker::PhantomData, ops::Deref}; pub use aggregate::Aggregate; use bson::Document; @@ -118,10 +118,7 @@ pub(crate) mod private { /// A pending action to execute on the server. The action can be configured via chained methods and /// executed via `await` (or `run` if using the sync client). -pub trait Action: private::Sealed { - /// The type of the value produced by execution. - type Output; - +pub trait Action: private::Sealed + IntoFuture { /// If the value is `Some`, call the provided function on `self`. Convenient for chained /// updates with values that need to be set conditionally. For example: /// ```rust From 5881f8fde15d187e45e6863f51d0412feb5446e6 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 11 Mar 2024 12:03:45 -0400 Subject: [PATCH 27/39] tweak documentation --- src/action/aggregate.rs | 2 +- src/action/count.rs | 2 +- src/action/create_collection.rs | 2 +- src/action/create_index.rs | 2 +- src/action/delete.rs | 2 +- src/action/distinct.rs | 2 +- src/action/drop_index.rs | 2 +- src/action/find_and_modify.rs | 4 +- src/action/insert_many.rs | 2 +- src/action/insert_one.rs | 2 +- src/action/list_indexes.rs | 2 +- src/action/replace_one.rs | 8 +-- src/action/update.rs | 2 +- src/client/session/test.rs | 2 +- src/client/session/test/causal_consistency.rs | 8 +-- src/coll.rs | 70 +------------------ src/concern/test.rs | 48 +++++-------- src/operation/update.rs | 17 ----- src/sync/coll.rs | 56 +-------------- src/test/coll.rs | 6 +- src/test/documentation_examples.rs | 1 - src/test/spec/crud_v1/replace_one.rs | 3 +- src/test/spec/retryable_writes.rs | 4 +- src/test/spec/unified_runner/operation.rs | 7 +- src/test/spec/v2_runner/operation.rs | 26 ++----- 25 files changed, 54 insertions(+), 228 deletions(-) diff --git a/src/action/aggregate.rs b/src/action/aggregate.rs index b4272f4f7..108a68d12 100644 --- a/src/action/aggregate.rs +++ b/src/action/aggregate.rs @@ -116,7 +116,7 @@ impl<'a, Session> Aggregate<'a, Session> { } impl<'a> Aggregate<'a, ImplicitSession> { - /// Runs the operation using the provided session. + /// Use the provided session when running the operation. pub fn session( self, value: impl Into<&'a mut ClientSession>, diff --git a/src/action/count.rs b/src/action/count.rs index 533dfefc2..6e2ad40da 100644 --- a/src/action/count.rs +++ b/src/action/count.rs @@ -129,7 +129,7 @@ impl<'a> CountDocuments<'a> { comment: bson::Bson, ); - /// Runs the operation using the provided session. + /// Use the provided session when running the operation. pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { self.session = Some(value.into()); self diff --git a/src/action/create_collection.rs b/src/action/create_collection.rs index 3d8827be4..f68cf334b 100644 --- a/src/action/create_collection.rs +++ b/src/action/create_collection.rs @@ -66,7 +66,7 @@ impl<'a> CreateCollection<'a> { encrypted_fields: Document, ); - /// Runs the operation using the provided session. + /// Use the provided session when running the operation. pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { self.session = Some(value.into()); self diff --git a/src/action/create_index.rs b/src/action/create_index.rs index 83b06cc11..ba9a39b64 100644 --- a/src/action/create_index.rs +++ b/src/action/create_index.rs @@ -91,7 +91,7 @@ impl<'a, M> CreateIndex<'a, M> { comment: Bson, ); - /// Runs the operation using the provided session. + /// Use the provided session when running the operation. pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { self.session = Some(value.into()); self diff --git a/src/action/delete.rs b/src/action/delete.rs index 97067deb1..200e642a9 100644 --- a/src/action/delete.rs +++ b/src/action/delete.rs @@ -94,7 +94,7 @@ impl<'a> Delete<'a> { comment: Bson, ); - /// Runs the operation using the provided session. + /// Use the provided session when running the operation. pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { self.session = Some(value.into()); self diff --git a/src/action/distinct.rs b/src/action/distinct.rs index bc1280342..939feca33 100644 --- a/src/action/distinct.rs +++ b/src/action/distinct.rs @@ -65,7 +65,7 @@ impl<'a> Distinct<'a> { comment: Bson, ); - /// Runs the operation using the provided session. + /// Use the provided session when running the operation. pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { self.session = Some(value.into()); self diff --git a/src/action/drop_index.rs b/src/action/drop_index.rs index 6ed3c0642..725cf9193 100644 --- a/src/action/drop_index.rs +++ b/src/action/drop_index.rs @@ -78,7 +78,7 @@ impl<'a> DropIndex<'a> { comment: Bson, ); - /// Runs the operation using the provided session. + /// Use the provided session when running the operation. pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { self.session = Some(value.into()); self diff --git a/src/action/find_and_modify.rs b/src/action/find_and_modify.rs index 1c131d2b7..edcf7c970 100644 --- a/src/action/find_and_modify.rs +++ b/src/action/find_and_modify.rs @@ -178,7 +178,7 @@ impl<'a, T: Send + Sync, Mode> FindAndModify<'a, T, Mode> { .get_or_insert_with(::default) } - /// Runs the operation using the provided session. + /// Use the provided session when running the operation. pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { self.session = Some(value.into()); self @@ -261,7 +261,7 @@ impl<'a, T: Send + Sync> FindAndModify<'a, T, Replace> { action_impl! { impl<'a, T: DeserializeOwned + Send + Sync, Mode> Action for FindAndModify<'a, T, Mode> { - type Future = FindAndDeleteFuture<'a, T: DeserializeOwned + Send + Sync>; + type Future = FindAndModifyFuture<'a, T: DeserializeOwned + Send + Sync>; async fn execute(mut self) -> Result> { resolve_write_concern_with_session!(self.coll, self.options, self.session.as_ref())?; diff --git a/src/action/insert_many.rs b/src/action/insert_many.rs index 1a80c4b82..4bd5f5447 100644 --- a/src/action/insert_many.rs +++ b/src/action/insert_many.rs @@ -77,7 +77,7 @@ impl<'a> InsertMany<'a> { comment: Bson, } - /// Runs the operation using the provided session. + /// Use the provided session when running the operation. pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { self.session = Some(value.into()); self diff --git a/src/action/insert_one.rs b/src/action/insert_one.rs index 49d7bdc88..0f4296241 100644 --- a/src/action/insert_one.rs +++ b/src/action/insert_one.rs @@ -75,7 +75,7 @@ impl<'a> InsertOne<'a> { comment: Bson, } - /// Runs the operation using the provided session. + /// Use the provided session when running the operation. pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { self.session = Some(value.into()); self diff --git a/src/action/list_indexes.rs b/src/action/list_indexes.rs index ac05c5566..0e2e23bd9 100644 --- a/src/action/list_indexes.rs +++ b/src/action/list_indexes.rs @@ -94,7 +94,7 @@ impl<'a, Mode, Session> ListIndexes<'a, Mode, Session> { } impl<'a, Mode> ListIndexes<'a, Mode, ImplicitSession> { - /// Runs the operation using the provided session. + /// Use the provided session when running the operation. pub fn session( self, value: impl Into<&'a mut ClientSession>, diff --git a/src/action/replace_one.rs b/src/action/replace_one.rs index b78f937e6..50cd190e4 100644 --- a/src/action/replace_one.rs +++ b/src/action/replace_one.rs @@ -26,7 +26,7 @@ impl Collection { /// retryable writes. /// /// `await` will return `Result`. - pub fn replace_one_2(&self, query: Document, replacement: impl Borrow) -> ReplaceOne { + pub fn replace_one(&self, query: Document, replacement: impl Borrow) -> ReplaceOne { ReplaceOne { coll: CollRef::new(self), query, @@ -50,8 +50,8 @@ impl crate::sync::Collection { /// retryable writes. /// /// [`run`](ReplaceOne::run) will return `Result`. - pub fn replace_one_2(&self, query: Document, replacement: impl Borrow) -> ReplaceOne { - self.async_collection.replace_one_2(query, replacement) + pub fn replace_one(&self, query: Document, replacement: impl Borrow) -> ReplaceOne { + self.async_collection.replace_one(query, replacement) } } @@ -76,7 +76,7 @@ impl<'a> ReplaceOne<'a> { comment: Bson, } - /// Runs the operation using the provided session. + /// Use the provided session when running the operation. pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { self.session = Some(value.into()); self diff --git a/src/action/update.rs b/src/action/update.rs index 466c5f09c..ca877f9fa 100644 --- a/src/action/update.rs +++ b/src/action/update.rs @@ -120,7 +120,7 @@ impl<'a> Update<'a> { comment: Bson, ); - /// Runs the operation using the provided session. + /// Use the provided session when running the operation. pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { self.session = Some(value.into()); self diff --git a/src/client/session/test.rs b/src/client/session/test.rs index da40e6b05..1649e0de5 100644 --- a/src/client/session/test.rs +++ b/src/client/session/test.rs @@ -71,7 +71,7 @@ macro_rules! for_each_op { collection_op!( $test_name, coll, - coll.replace_one(doc! { "x": 1 }, doc! { "x": 2 }, None) + coll.replace_one(doc! { "x": 1 }, doc! { "x": 2 }) ), ) .await; diff --git a/src/client/session/test/causal_consistency.rs b/src/client/session/test/causal_consistency.rs index 0238cc228..12caec724 100644 --- a/src/client/session/test/causal_consistency.rs +++ b/src/client/session/test/causal_consistency.rs @@ -69,12 +69,8 @@ fn all_session_ops() -> impl Iterator { .session(s))); ops.push(op!("update", false, |coll, s| coll - .replace_one_with_session( - doc! { "x": 1 }, - doc! { "x": 2 }, - None, - s, - ))); + .replace_one(doc! { "x": 1 }, doc! { "x": 2 },) + .session(s))); ops.push(op!("delete", false, |coll, s| coll .delete_one(doc! { "x": 1 }) diff --git a/src/coll.rs b/src/coll.rs index e1b0f7de7..569986cf7 100644 --- a/src/coll.rs +++ b/src/coll.rs @@ -1,22 +1,19 @@ mod action; pub mod options; -use std::{borrow::Borrow, fmt, fmt::Debug, str::FromStr, sync::Arc}; +use std::{fmt, fmt::Debug, str::FromStr, sync::Arc}; use serde::{de::Error as DeError, Deserialize, Deserializer, Serialize}; use self::options::*; use crate::{ - bson::{doc, Document}, + bson::doc, client::options::ServerAddress, cmap::conn::PinnedConnectionHandle, concern::{ReadConcern, WriteConcern}, error::{Error, Result}, - operation::Update, - results::UpdateResult, selection_criteria::SelectionCriteria, Client, - ClientSession, Database, }; @@ -222,69 +219,6 @@ where } } -impl Collection -where - T: Serialize + Send + Sync, -{ - async fn replace_one_common( - &self, - query: Document, - replacement: impl Borrow, - options: impl Into>, - session: impl Into>, - ) -> Result { - let mut options = options.into(); - - let session = session.into(); - - resolve_write_concern_with_session!(self, options, session.as_ref())?; - - let update = Update::with_replace( - self.namespace(), - query, - replacement.borrow(), - false, - options.map(UpdateOptions::from_replace_options), - self.inner.human_readable_serialization, - )?; - self.client().execute_operation(update, session).await - } - - /// Replaces up to one document matching `query` in the collection with `replacement`. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub async fn replace_one( - &self, - query: Document, - replacement: impl Borrow, - options: impl Into>, - ) -> Result { - self.replace_one_common(query, replacement, options, None) - .await - } - - /// Replaces up to one document matching `query` in the collection with `replacement` using the - /// provided `ClientSession`. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub async fn replace_one_with_session( - &self, - query: Document, - replacement: impl Borrow, - options: impl Into>, - session: &mut ClientSession, - ) -> Result { - self.replace_one_common(query, replacement, options, session) - .await - } -} - /// A struct modeling the canonical name for a collection in MongoDB. #[derive(Debug, Clone, PartialEq, Eq)] pub struct Namespace { diff --git a/src/concern/test.rs b/src/concern/test.rs index 1bf6cdb5d..d14a7bb1a 100644 --- a/src/concern/test.rs +++ b/src/concern/test.rs @@ -3,7 +3,7 @@ use std::time::Duration; use crate::{ bson::{doc, Bson, Document}, error::ErrorKind, - options::{Acknowledgment, ReadConcern, ReplaceOptions, TransactionOptions, WriteConcern}, + options::{Acknowledgment, ReadConcern, TransactionOptions, WriteConcern}, test::{EventClient, TestClient}, Collection, }; @@ -356,34 +356,24 @@ async fn command_contains_write_concern_replace_one() { coll.drop().await.unwrap(); coll.insert_one(doc! { "foo": "bar" }).await.unwrap(); - coll.replace_one( - doc! { "foo": "bar" }, - doc! { "baz": "fun" }, - ReplaceOptions::builder() - .write_concern( - WriteConcern::builder() - .w(Acknowledgment::Nodes(1)) - .journal(true) - .build(), - ) - .build(), - ) - .await - .unwrap(); - coll.replace_one( - doc! { "foo": "bar" }, - doc! { "baz": "fun" }, - ReplaceOptions::builder() - .write_concern( - WriteConcern::builder() - .w(Acknowledgment::Nodes(1)) - .journal(false) - .build(), - ) - .build(), - ) - .await - .unwrap(); + coll.replace_one(doc! { "foo": "bar" }, doc! { "baz": "fun" }) + .write_concern( + WriteConcern::builder() + .w(Acknowledgment::Nodes(1)) + .journal(true) + .build(), + ) + .await + .unwrap(); + coll.replace_one(doc! { "foo": "bar" }, doc! { "baz": "fun" }) + .write_concern( + WriteConcern::builder() + .w(Acknowledgment::Nodes(1)) + .journal(false) + .build(), + ) + .await + .unwrap(); assert_eq!( command_write_concerns(&client, "update"), diff --git a/src/operation/update.rs b/src/operation/update.rs index 735d9499b..a398690de 100644 --- a/src/operation/update.rs +++ b/src/operation/update.rs @@ -91,23 +91,6 @@ impl Update { } } - pub(crate) fn with_replace( - ns: Namespace, - filter: Document, - update: &T, - multi: bool, - options: Option, - human_readable_serialization: bool, - ) -> Result { - Ok(Self { - ns, - filter, - update: UpdateOrReplace::replacement(update, human_readable_serialization)?, - multi: multi.then_some(true), - options, - }) - } - pub(crate) fn with_replace_raw( ns: Namespace, filter: Document, diff --git a/src/sync/coll.rs b/src/sync/coll.rs index 7e7a136fb..7ae9924fb 100644 --- a/src/sync/coll.rs +++ b/src/sync/coll.rs @@ -1,13 +1,5 @@ -use std::{borrow::Borrow, fmt::Debug}; - -use serde::Serialize; - -use super::ClientSession; use crate::{ - bson::Document, - error::Result, - options::{ReadConcern, ReplaceOptions, SelectionCriteria, WriteConcern}, - results::UpdateResult, + options::{ReadConcern, SelectionCriteria, WriteConcern}, Collection as AsyncCollection, Namespace, }; @@ -99,49 +91,3 @@ where self.async_collection.write_concern() } } - -impl Collection -where - T: Serialize + Send + Sync, -{ - /// Replaces up to one document matching `query` in the collection with `replacement`. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub fn replace_one( - &self, - query: Document, - replacement: impl Borrow, - options: impl Into>, - ) -> Result { - crate::sync::TOKIO_RUNTIME.block_on(self.async_collection.replace_one( - query, - replacement.borrow(), - options.into(), - )) - } - - /// Replaces up to one document matching `query` in the collection with `replacement` using the - /// provided `ClientSession`. - /// - /// This operation will retry once upon failure if the connection and encountered error support - /// retryability. See the documentation - /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on - /// retryable writes. - pub fn replace_one_with_session( - &self, - query: Document, - replacement: impl Borrow, - options: impl Into>, - session: &mut ClientSession, - ) -> Result { - crate::sync::TOKIO_RUNTIME.block_on(self.async_collection.replace_one_with_session( - query, - replacement.borrow(), - options.into(), - &mut session.async_client_session, - )) - } -} diff --git a/src/test/coll.rs b/src/test/coll.rs index a6ca90d44..a12bbf030 100644 --- a/src/test/coll.rs +++ b/src/test/coll.rs @@ -825,7 +825,7 @@ async fn typed_replace_one() { str: "b".into(), }; coll.insert_one(insert_data).await.unwrap(); - coll.replace_one(doc! { "x": 1 }, replacement.clone(), None) + coll.replace_one(doc! { "x": 1 }, replacement.clone()) .await .unwrap(); @@ -1070,7 +1070,7 @@ async fn invalid_utf8_response() { // test triggering an invalid error message via a replace_one. let replace_err = coll - .replace_one(doc! {"x": 1}, &long_unicode_str_doc, None) + .replace_one(doc! {"x": 1}, &long_unicode_str_doc) .await .expect_err("replacement with duplicate key should fail") .kind; @@ -1178,7 +1178,6 @@ async fn configure_human_readable_serialization() { id: 1, s: StringOrBytes("non human readable!".into()), }, - None, ) .await .unwrap(); @@ -1221,7 +1220,6 @@ async fn configure_human_readable_serialization() { id: 1, s: StringOrBytes("human readable!".into()), }, - None, ) .await .unwrap(); diff --git a/src/test/documentation_examples.rs b/src/test/documentation_examples.rs index 47b6a11d4..00145b913 100644 --- a/src/test/documentation_examples.rs +++ b/src/test/documentation_examples.rs @@ -1129,7 +1129,6 @@ async fn update_examples(collection: &Collection) -> Result<()> { }, ], }, - None, ) .await?; // End Example 54 diff --git a/src/test/spec/crud_v1/replace_one.rs b/src/test/spec/crud_v1/replace_one.rs index e1dda8e6f..42f56ee46 100644 --- a/src/test/spec/crud_v1/replace_one.rs +++ b/src/test/spec/crud_v1/replace_one.rs @@ -63,7 +63,8 @@ async fn run_replace_one_test(test_file: TestFile) { }; let result = coll - .replace_one(arguments.filter, arguments.replacement, options) + .replace_one(arguments.filter, arguments.replacement) + .with_options(options) .await .expect(&test_case.description); assert_eq!( diff --git a/src/test/spec/retryable_writes.rs b/src/test/spec/retryable_writes.rs index fb635d05f..f9ae725d3 100644 --- a/src/test/spec/retryable_writes.rs +++ b/src/test/spec/retryable_writes.rs @@ -250,9 +250,7 @@ async fn transaction_ids_included() { .unwrap(); assert!(includes_txn_number("update")); - coll.replace_one(doc! {}, doc! { "x": 1 }, None) - .await - .unwrap(); + coll.replace_one(doc! {}, doc! { "x": 1 }).await.unwrap(); assert!(includes_txn_number("update")); coll.delete_one(doc! {}).await.unwrap(); diff --git a/src/test/spec/unified_runner/operation.rs b/src/test/spec/unified_runner/operation.rs index 45e02308f..d6b179c89 100644 --- a/src/test/spec/unified_runner/operation.rs +++ b/src/test/spec/unified_runner/operation.rs @@ -1240,11 +1240,8 @@ impl TestOperation for ReplaceOne { async move { let collection = test_runner.get_collection(id).await; let result = collection - .replace_one( - self.filter.clone(), - self.replacement.clone(), - self.options.clone(), - ) + .replace_one(self.filter.clone(), self.replacement.clone()) + .with_options(self.options.clone()) .await?; let result = to_bson(&result)?; Ok(Some(result.into())) diff --git a/src/test/spec/v2_runner/operation.rs b/src/test/spec/v2_runner/operation.rs index c84ce58a3..c0afd77ab 100644 --- a/src/test/spec/v2_runner/operation.rs +++ b/src/test/spec/v2_runner/operation.rs @@ -771,27 +771,11 @@ impl TestOperation for ReplaceOne { session: Option<&'a mut ClientSession>, ) -> BoxFuture<'a, Result>> { async move { - let result = match session { - Some(session) => { - collection - .replace_one_with_session( - self.filter.clone(), - self.replacement.clone(), - self.options.clone(), - session, - ) - .await? - } - None => { - collection - .replace_one( - self.filter.clone(), - self.replacement.clone(), - self.options.clone(), - ) - .await? - } - }; + let result = collection + .replace_one(self.filter.clone(), self.replacement.clone()) + .with_options(self.options.clone()) + .optional(session, |a, s| a.session(s)) + .await?; let result = bson::to_bson(&result)?; Ok(Some(result)) } From 9b280d121b9c43bf13b0d8142726f236136cd64a Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 11 Mar 2024 12:38:06 -0400 Subject: [PATCH 28/39] fix doctests --- src/change_stream.rs | 2 +- src/change_stream/session.rs | 2 +- src/client/session.rs | 14 ++++++------- src/coll.rs | 2 +- src/cursor.rs | 18 ++++++++--------- src/cursor/session.rs | 30 ++++++++++++++-------------- src/lib.rs | 16 ++++++++------- src/operation/update.rs | 2 +- src/sync/change_stream.rs | 4 ++-- src/sync/client/session.rs | 8 ++++---- src/sync/coll.rs | 2 +- src/sync/cursor.rs | 38 ++++++++++++++++++------------------ 12 files changed, 70 insertions(+), 68 deletions(-) diff --git a/src/change_stream.rs b/src/change_stream.rs index 428158ba6..86c2bb0b7 100644 --- a/src/change_stream.rs +++ b/src/change_stream.rs @@ -57,7 +57,7 @@ use crate::{ /// let mut change_stream = coll.watch().await?; /// let coll_ref = coll.clone(); /// task::spawn(async move { -/// coll_ref.insert_one(doc! { "x": 1 }, None).await; +/// coll_ref.insert_one(doc! { "x": 1 }).await; /// }); /// while let Some(event) = change_stream.next().await.transpose()? { /// println!("operation performed: {:?}, document: {:?}", event.operation_type, event.full_document); diff --git a/src/change_stream/session.rs b/src/change_stream/session.rs index e292b44af..0af9e2fa4 100644 --- a/src/change_stream/session.rs +++ b/src/change_stream/session.rs @@ -85,7 +85,7 @@ where /// let mut cs = coll.watch().session(&mut session).await?; /// while let Some(event) = cs.next(&mut session).await? { /// let id = bson::to_bson(&event.id)?; - /// other_coll.insert_one_with_session(doc! { "id": id }, None, &mut session).await?; + /// other_coll.insert_one(doc! { "id": id }).session(&mut session).await?; /// } /// # Ok::<(), mongodb::error::Error>(()) /// # }; diff --git a/src/client/session.rs b/src/client/session.rs index c96c834b8..3c9f43d01 100644 --- a/src/client/session.rs +++ b/src/client/session.rs @@ -81,7 +81,7 @@ pub(crate) static SESSIONS_UNSUPPORTED_COMMANDS: Lazy> = L /// # } /// /// async fn execute_transaction(coll: &Collection, session: &mut ClientSession) -> Result<()> { -/// coll.insert_one_with_session(doc! { "x": 1 }, None, session).await?; +/// coll.insert_one(doc! { "x": 1 }).session(&mut *session).await?; /// coll.delete_one(doc! { "y": 2 }).session(&mut *session).await?; /// // An "UnknownTransactionCommitResult" label indicates that it is unknown whether the /// // commit has satisfied the write concern associated with the transaction. If an error @@ -355,7 +355,7 @@ impl ClientSession { /// # let coll = client.database("foo").collection::("bar"); /// # let mut session = client.start_session().await?; /// session.start_transaction(None).await?; - /// let result = coll.insert_one_with_session(doc! { "x": 1 }, None, &mut session).await?; + /// let result = coll.insert_one(doc! { "x": 1 }).session(&mut session).await?; /// session.commit_transaction().await?; /// # Ok(()) /// # } @@ -456,7 +456,7 @@ impl ClientSession { /// # let coll = client.database("foo").collection::("bar"); /// # let mut session = client.start_session().await?; /// session.start_transaction(None).await?; - /// let result = coll.insert_one_with_session(doc! { "x": 1 }, None, &mut session).await?; + /// let result = coll.insert_one(doc! { "x": 1 }).session(&mut session).await?; /// session.commit_transaction().await?; /// # Ok(()) /// # } @@ -524,8 +524,8 @@ impl ClientSession { /// # } /// /// async fn execute_transaction(coll: &Collection, session: &mut ClientSession) -> Result<()> { - /// coll.insert_one_with_session(doc! { "x": 1 }, None, session).await?; - /// coll.delete_one(doc! { "y": 2 }).session(session).await?; + /// coll.insert_one(doc! { "x": 1 }).session(&mut *session).await?; + /// coll.delete_one(doc! { "y": 2 }).session(&mut *session).await?; /// Ok(()) /// } /// ``` @@ -605,7 +605,7 @@ impl ClientSession { /// session.with_transaction( /// (&coll, &my_data), /// |session, (coll, my_data)| async move { - /// coll.insert_one_with_session(doc! { "data": *my_data }, None, session).await + /// coll.insert_one(doc! { "data": *my_data }).session(session).await /// }.boxed(), /// None, /// ).await?; @@ -613,7 +613,7 @@ impl ClientSession { /// session.with_transaction( /// (), /// |session, _| async move { - /// coll.insert_one_with_session(doc! { "data": my_data }, None, session).await + /// coll.insert_one(doc! { "data": my_data }).session(session).await /// }.boxed(), /// None, /// ).await?; diff --git a/src/coll.rs b/src/coll.rs index 569986cf7..005d3d088 100644 --- a/src/coll.rs +++ b/src/coll.rs @@ -63,7 +63,7 @@ use crate::{ /// // Spawn several tasks that operate on the same collection concurrently. /// tokio::task::spawn(async move { /// // Perform operations with `coll_ref` that work with directly our model. -/// coll_ref.insert_one(Item { id: i }, None).await; +/// coll_ref.insert_one(Item { id: i }).await; /// }); /// } /// # diff --git a/src/cursor.rs b/src/cursor.rs index 8dae51d2d..d22474441 100644 --- a/src/cursor.rs +++ b/src/cursor.rs @@ -67,7 +67,7 @@ pub(crate) use common::{ /// used in conjunction with the `?` operator. /// /// ```rust -/// # use mongodb::{bson::Document, Client, error::Result}; +/// # use mongodb::{bson::{Document, doc}, Client, error::Result}; /// # /// # async fn do_stuff() -> Result<()> { /// # let client = Client::with_uri_str("mongodb://example.com").await?; @@ -75,7 +75,7 @@ pub(crate) use common::{ /// # /// use futures::stream::{StreamExt, TryStreamExt}; /// -/// let mut cursor = coll.find(None, None).await?; +/// let mut cursor = coll.find(doc! {}).await?; /// // regular Stream uses next() and iterates over Option> /// while let Some(doc) = cursor.next().await { /// println!("{}", doc?) @@ -83,7 +83,7 @@ pub(crate) use common::{ /// // regular Stream uses collect() and collects into a Vec> /// let v: Vec> = cursor.collect().await; /// -/// let mut cursor = coll.find(None, None).await?; +/// let mut cursor = coll.find(doc! {}).await?; /// // TryStream uses try_next() and iterates over Result> /// while let Some(doc) = cursor.try_next().await? { /// println!("{}", doc) @@ -190,11 +190,11 @@ impl Cursor { /// calling [`Cursor::advance`] first or after [`Cursor::advance`] returns an error / false. /// /// ``` - /// # use mongodb::{Client, bson::Document, error::Result}; + /// # use mongodb::{Client, bson::{Document, doc}, error::Result}; /// # async fn foo() -> Result<()> { /// # let client = Client::with_uri_str("mongodb://localhost:27017").await?; /// # let coll = client.database("stuff").collection::("stuff"); - /// let mut cursor = coll.find(None, None).await?; + /// let mut cursor = coll.find(doc! {}).await?; /// while cursor.advance().await? { /// println!("{:?}", cursor.current()); /// } @@ -223,11 +223,11 @@ impl Cursor { /// or without calling [`Cursor::advance`] at all may result in a panic. /// /// ``` - /// # use mongodb::{Client, bson::Document, error::Result}; + /// # use mongodb::{Client, bson::{Document, doc}, error::Result}; /// # async fn foo() -> Result<()> { /// # let client = Client::with_uri_str("mongodb://localhost:27017").await?; /// # let coll = client.database("stuff").collection::("stuff"); - /// let mut cursor = coll.find(None, None).await?; + /// let mut cursor = coll.find(doc! {}).await?; /// while cursor.advance().await? { /// println!("{:?}", cursor.current()); /// } @@ -246,7 +246,7 @@ impl Cursor { /// true or without calling [`Cursor::advance`] at all may result in a panic. /// /// ``` - /// # use mongodb::{Client, error::Result}; + /// # use mongodb::{Client, error::Result, bson::doc}; /// # async fn foo() -> Result<()> { /// # let client = Client::with_uri_str("mongodb://localhost:27017").await?; /// # let db = client.database("foo"); @@ -259,7 +259,7 @@ impl Cursor { /// } /// /// let coll = db.collection::("cat"); - /// let mut cursor = coll.find(None, None).await?; + /// let mut cursor = coll.find(doc! {}).await?; /// while cursor.advance().await? { /// println!("{:?}", cursor.deserialize_current()?); /// } diff --git a/src/cursor/session.rs b/src/cursor/session.rs index 39faa7a3f..6f460ac63 100644 --- a/src/cursor/session.rs +++ b/src/cursor/session.rs @@ -40,7 +40,7 @@ use crate::{ /// [`SessionCursor::stream`]: /// /// ```rust -/// # use mongodb::{bson::Document, Client, error::Result, ClientSession, SessionCursor}; +/// # use mongodb::{bson::{Document, doc}, Client, error::Result, ClientSession, SessionCursor}; /// # /// # async fn do_stuff() -> Result<()> { /// # let client = Client::with_uri_str("mongodb://example.com").await?; @@ -48,7 +48,7 @@ use crate::{ /// # let coll = client.database("foo").collection::("bar"); /// # /// // iterate using next() -/// let mut cursor = coll.find_with_session(None, None, &mut session).await?; +/// let mut cursor = coll.find(doc! {}).session(&mut session).await?; /// while let Some(doc) = cursor.next(&mut session).await.transpose()? { /// println!("{}", doc) /// } @@ -56,7 +56,7 @@ use crate::{ /// // iterate using `Stream`: /// use futures::stream::TryStreamExt; /// -/// let mut cursor = coll.find_with_session(None, None, &mut session).await?; +/// let mut cursor = coll.find(doc! {}).session(&mut session).await?; /// let results: Vec<_> = cursor.stream(&mut session).try_collect().await?; /// # /// # Ok(()) @@ -129,23 +129,23 @@ where /// use futures::stream::TryStreamExt; /// /// // iterate over the results - /// let mut cursor = coll.find_with_session(doc! { "x": 1 }, None, &mut session).await?; + /// let mut cursor = coll.find(doc! { "x": 1 }).session(&mut session).await?; /// while let Some(doc) = cursor.stream(&mut session).try_next().await? { /// println!("{}", doc); /// } /// /// // collect the results - /// let mut cursor1 = coll.find_with_session(doc! { "x": 1 }, None, &mut session).await?; + /// let mut cursor1 = coll.find(doc! { "x": 1 }).session(&mut session).await?; /// let v: Vec = cursor1.stream(&mut session).try_collect().await?; /// /// // use session between iterations - /// let mut cursor2 = coll.find_with_session(doc! { "x": 1 }, None, &mut session).await?; + /// let mut cursor2 = coll.find(doc! { "x": 1 }).session(&mut session).await?; /// loop { /// let doc = match cursor2.stream(&mut session).try_next().await? { /// Some(d) => d, /// None => break, /// }; - /// other_coll.insert_one_with_session(doc, None, &mut session).await?; + /// other_coll.insert_one(doc).session(&mut session).await?; /// } /// # Ok::<(), mongodb::error::Error>(()) /// # }; @@ -173,9 +173,9 @@ where /// # let coll = client.database("foo").collection::("bar"); /// # let other_coll = coll.clone(); /// # let mut session = client.start_session().await?; - /// let mut cursor = coll.find_with_session(doc! { "x": 1 }, None, &mut session).await?; + /// let mut cursor = coll.find(doc! { "x": 1 }).session(&mut session).await?; /// while let Some(doc) = cursor.next(&mut session).await.transpose()? { - /// other_coll.insert_one_with_session(doc, None, &mut session).await?; + /// other_coll.insert_one(doc).session(&mut session).await?; /// } /// # Ok::<(), mongodb::error::Error>(()) /// # }; @@ -223,12 +223,12 @@ impl SessionCursor { /// [`SessionCursor::advance`] returns an error / false. /// /// ``` - /// # use mongodb::{Client, bson::Document, error::Result}; + /// # use mongodb::{Client, bson::{doc, Document}, error::Result}; /// # async fn foo() -> Result<()> { /// # let client = Client::with_uri_str("mongodb://localhost:27017").await?; /// # let mut session = client.start_session().await?; /// # let coll = client.database("stuff").collection::("stuff"); - /// let mut cursor = coll.find_with_session(None, None, &mut session).await?; + /// let mut cursor = coll.find(doc! {}).session(&mut session).await?; /// while cursor.advance(&mut session).await? { /// println!("{:?}", cursor.current()); /// } @@ -256,12 +256,12 @@ impl SessionCursor { /// true or without calling [`SessionCursor::advance`] at all may result in a panic. /// /// ``` - /// # use mongodb::{Client, bson::Document, error::Result}; + /// # use mongodb::{Client, bson::{Document, doc}, error::Result}; /// # async fn foo() -> Result<()> { /// # let client = Client::with_uri_str("mongodb://localhost:27017").await?; /// # let mut session = client.start_session().await?; /// # let coll = client.database("stuff").collection::("stuff"); - /// let mut cursor = coll.find_with_session(None, None, &mut session).await?; + /// let mut cursor = coll.find(doc! {}).session(&mut session).await?; /// while cursor.advance(&mut session).await? { /// println!("{:?}", cursor.current()); /// } @@ -281,7 +281,7 @@ impl SessionCursor { /// true or without calling [`SessionCursor::advance`] at all may result in a panic. /// /// ``` - /// # use mongodb::{Client, error::Result}; + /// # use mongodb::{Client, error::Result, bson::doc}; /// # async fn foo() -> Result<()> { /// # let client = Client::with_uri_str("mongodb://localhost:27017").await?; /// # let mut session = client.start_session().await?; @@ -295,7 +295,7 @@ impl SessionCursor { /// } /// /// let coll = db.collection::("cat"); - /// let mut cursor = coll.find_with_session(None, None, &mut session).await?; + /// let mut cursor = coll.find(doc! {}).session(&mut session).await?; /// while cursor.advance(&mut session).await? { /// println!("{:?}", cursor.deserialize_current()?); /// } diff --git a/src/lib.rs b/src/lib.rs index cad30f697..123c45cf9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -98,7 +98,7 @@ //! ]; //! //! // Insert some documents into the "mydb.books" collection. -//! collection.insert_many(docs, None).await?; +//! collection.insert_many(docs).await?; //! # Ok(()) } //! ``` //! @@ -137,7 +137,7 @@ //! ]; //! //! // Insert the books into "mydb.books" collection, no manual conversion to BSON necessary. -//! typed_collection.insert_many(books, None).await?; +//! typed_collection.insert_many(books).await?; //! # Ok(()) } //! ``` //! @@ -166,8 +166,10 @@ //! //! // Query the books in the collection with a filter and an option. //! let filter = doc! { "author": "George Orwell" }; -//! let find_options = FindOptions::builder().sort(doc! { "title": 1 }).build(); -//! let mut cursor = typed_collection.find(filter, find_options).await?; +//! let mut cursor = typed_collection +//! .find(filter) +//! .sort(doc! { "title": 1 }) +//! .await?; //! //! // Iterate over the results of the cursor. //! while let Some(book) = cursor.try_next().await? { @@ -218,9 +220,9 @@ //! ]; //! //! // Insert some books into the "mydb.books" collection. -//! collection.insert_many(docs, None)?; +//! collection.insert_many(docs).run()?; //! -//! let cursor = collection.find(doc! { "author": "George Orwell" }, None)?; +//! let cursor = collection.find(doc! { "author": "George Orwell" }).run()?; //! for result in cursor { //! println!("title: {}", result?.title); //! } @@ -256,7 +258,7 @@ //! # let client = Client::with_uri_str("mongodb://example.com").await?; //! let collection = client.database("foo").collection("bar"); //! let handle = tokio::task::spawn(async move { -//! collection.insert_one(doc! { "x": 1 }, None).await +//! collection.insert_one(doc! { "x": 1 }).await //! }); //! //! tokio::time::timeout(Duration::from_secs(5), handle).await???; diff --git a/src/operation/update.rs b/src/operation/update.rs index a398690de..e7998c866 100644 --- a/src/operation/update.rs +++ b/src/operation/update.rs @@ -41,7 +41,7 @@ impl UpdateOrReplace { UpdateModifications::Pipeline(pipeline) => bson_util::to_raw_bson_array(pipeline), }, Self::Replacement(replacement_doc) => { - bson_util::replacement_raw_document_check(&replacement_doc)?; + bson_util::replacement_raw_document_check(replacement_doc)?; Ok(replacement_doc.clone().into()) } } diff --git a/src/sync/change_stream.rs b/src/sync/change_stream.rs index cac5f42ce..516900c5d 100644 --- a/src/sync/change_stream.rs +++ b/src/sync/change_stream.rs @@ -36,7 +36,7 @@ use super::ClientSession; /// # let client = Client::with_uri_str("mongodb://example.com")?; /// # let coll = client.database("foo").collection("bar"); /// let mut change_stream = coll.watch().run()?; -/// coll.insert_one(doc! { "x": 1 }, None)?; +/// coll.insert_one(doc! { "x": 1 }).run()?; /// for event in change_stream { /// let event = event?; /// println!("operation performed: {:?}, document: {:?}", event.operation_type, event.full_document); @@ -192,7 +192,7 @@ where /// let mut cs = coll.watch().session(&mut session).run()?; /// while let Some(event) = cs.next(&mut session)? { /// let id = bson::to_bson(&event.id)?; - /// other_coll.insert_one_with_session(doc! { "id": id }, None, &mut session)?; + /// other_coll.insert_one(doc! { "id": id }).session(&mut session).run()?; /// } /// # Ok::<(), mongodb::error::Error>(()) /// # }; diff --git a/src/sync/client/session.rs b/src/sync/client/session.rs index 8272f5fef..b8c305bc7 100644 --- a/src/sync/client/session.rs +++ b/src/sync/client/session.rs @@ -67,7 +67,7 @@ impl ClientSession { /// # let coll = client.database("foo").collection::("bar"); /// # let mut session = client.start_session().run()?; /// session.start_transaction(None)?; - /// let result = coll.insert_one_with_session(doc! { "x": 1 }, None, &mut session)?; + /// let result = coll.insert_one(doc! { "x": 1 }).session(&mut session).run()?; /// session.commit_transaction()?; /// # Ok(()) /// # } @@ -89,7 +89,7 @@ impl ClientSession { /// # let coll = client.database("foo").collection::("bar"); /// # let mut session = client.start_session().run()?; /// session.start_transaction(None)?; - /// let result = coll.insert_one_with_session(doc! { "x": 1 }, None, &mut session)?; + /// let result = coll.insert_one(doc! { "x": 1 }).session(&mut session).run()?; /// session.commit_transaction()?; /// # Ok(()) /// # } @@ -122,8 +122,8 @@ impl ClientSession { /// # } /// /// fn execute_transaction(coll: Collection, session: &mut ClientSession) -> Result<()> { - /// coll.insert_one_with_session(doc! { "x": 1 }, None, session)?; - /// coll.delete_one(doc! { "y": 2 }).session(session).run()?; + /// coll.insert_one(doc! { "x": 1 }).session(&mut *session).run()?; + /// coll.delete_one(doc! { "y": 2 }).session(&mut *session).run()?; /// Ok(()) /// } /// ``` diff --git a/src/sync/coll.rs b/src/sync/coll.rs index 7ae9924fb..74c7bcc7c 100644 --- a/src/sync/coll.rs +++ b/src/sync/coll.rs @@ -29,7 +29,7 @@ use crate::{ /// /// std::thread::spawn(move || { /// // Perform operations with `coll_ref`. For example: -/// coll_ref.insert_one(doc! { "x": i }, None); +/// coll_ref.insert_one(doc! { "x": i }); /// }); /// } /// # diff --git a/src/sync/cursor.rs b/src/sync/cursor.rs index 779848117..5c1d7724a 100644 --- a/src/sync/cursor.rs +++ b/src/sync/cursor.rs @@ -31,12 +31,12 @@ use crate::{ /// documents it yields using a for loop: /// /// ```rust -/// # use mongodb::{bson::Document, sync::Client, error::Result}; +/// # use mongodb::{bson::{doc, Document}, sync::Client, error::Result}; /// # /// # fn do_stuff() -> Result<()> { /// # let client = Client::with_uri_str("mongodb://example.com")?; /// # let coll = client.database("foo").collection::("bar"); -/// # let mut cursor = coll.find(None, None)?; +/// # let mut cursor = coll.find(doc! {}).run()?; /// # /// for doc in cursor { /// println!("{}", doc?) @@ -60,7 +60,7 @@ use crate::{ /// # fn do_stuff() -> Result<()> { /// # let client = Client::with_uri_str("mongodb://example.com")?; /// # let coll = client.database("foo").collection("bar"); -/// # let cursor = coll.find(Some(doc! { "x": 1 }), None)?; +/// # let cursor = coll.find(doc! { "x": 1 }).run()?; /// # /// let results: Vec> = cursor.collect(); /// # Ok(()) @@ -92,11 +92,11 @@ impl Cursor { /// calling [`Cursor::advance`] first or after [`Cursor::advance`] returns an error / false. /// /// ``` - /// # use mongodb::{sync::Client, bson::Document, error::Result}; + /// # use mongodb::{sync::Client, bson::{Document, doc}, error::Result}; /// # fn foo() -> Result<()> { /// # let client = Client::with_uri_str("mongodb://localhost:27017")?; /// # let coll = client.database("stuff").collection::("stuff"); - /// let mut cursor = coll.find(None, None)?; + /// let mut cursor = coll.find(doc! {}).run()?; /// while cursor.advance()? { /// println!("{:?}", cursor.deserialize_current()?); /// } @@ -115,11 +115,11 @@ impl Cursor { /// or without calling [`Cursor::advance`] at all may result in a panic. /// /// ``` - /// # use mongodb::{sync::Client, bson::Document, error::Result}; + /// # use mongodb::{sync::Client, bson::{doc, Document}, error::Result}; /// # fn foo() -> Result<()> { /// # let client = Client::with_uri_str("mongodb://localhost:27017")?; /// # let coll = client.database("stuff").collection::("stuff"); - /// let mut cursor = coll.find(None, None)?; + /// let mut cursor = coll.find(doc! {}).run()?; /// while cursor.advance()? { /// println!("{:?}", cursor.current()); /// } @@ -138,7 +138,7 @@ impl Cursor { /// true or without calling [`Cursor::advance`] at all may result in a panic. /// /// ``` - /// # use mongodb::{sync::Client, error::Result}; + /// # use mongodb::{sync::Client, error::Result, bson::doc}; /// # fn foo() -> Result<()> { /// # let client = Client::with_uri_str("mongodb://localhost:27017")?; /// # let db = client.database("foo"); @@ -151,7 +151,7 @@ impl Cursor { /// } /// /// let coll = db.collection::("cat"); - /// let mut cursor = coll.find(None, None)?; + /// let mut cursor = coll.find(doc! {}).run()?; /// while cursor.advance()? { /// println!("{:?}", cursor.deserialize_current()?); /// } @@ -181,13 +181,13 @@ where /// one. To iterate, retrieve a [`SessionCursorIter]` using [`SessionCursor::iter`]: /// /// ```rust -/// # use mongodb::{bson::Document, sync::Client, error::Result}; +/// # use mongodb::{bson::{doc, Document}, sync::Client, error::Result}; /// # /// # fn do_stuff() -> Result<()> { /// # let client = Client::with_uri_str("mongodb://example.com")?; /// # let mut session = client.start_session().run()?; /// # let coll = client.database("foo").collection::("bar"); -/// # let mut cursor = coll.find_with_session(None, None, &mut session)?; +/// # let mut cursor = coll.find(doc! {}).session(&mut session).run()?; /// # /// for doc in cursor.iter(&mut session) { /// println!("{}", doc?) @@ -220,12 +220,12 @@ impl SessionCursor { /// calling [`Cursor::advance`] first or after [`Cursor::advance`] returns an error / false. /// /// ``` - /// # use mongodb::{sync::Client, bson::Document, error::Result}; + /// # use mongodb::{sync::Client, bson::{doc, Document}, error::Result}; /// # fn foo() -> Result<()> { /// # let client = Client::with_uri_str("mongodb://localhost:27017")?; /// # let mut session = client.start_session().run()?; /// # let coll = client.database("stuff").collection::("stuff"); - /// let mut cursor = coll.find_with_session(None, None, &mut session)?; + /// let mut cursor = coll.find(doc! {}).session(&mut session).run()?; /// while cursor.advance(&mut session)? { /// println!("{:?}", cursor.deserialize_current()?); /// } @@ -245,12 +245,12 @@ impl SessionCursor { /// or without calling [`Cursor::advance`] at all may result in a panic. /// /// ``` - /// # use mongodb::{sync::Client, bson::Document, error::Result}; + /// # use mongodb::{sync::Client, bson::{doc, Document}, error::Result}; /// # fn foo() -> Result<()> { /// # let client = Client::with_uri_str("mongodb://localhost:27017")?; /// # let mut session = client.start_session().run()?; /// # let coll = client.database("stuff").collection::("stuff"); - /// let mut cursor = coll.find_with_session(None, None, &mut session)?; + /// let mut cursor = coll.find(doc! {}).session(&mut session).run()?; /// while cursor.advance(&mut session)? { /// println!("{:?}", cursor.current()); /// } @@ -269,7 +269,7 @@ impl SessionCursor { /// true or without calling [`Cursor::advance`] at all may result in a panic. /// /// ``` - /// # use mongodb::{sync::Client, error::Result}; + /// # use mongodb::{sync::Client, error::Result, bson::doc}; /// # fn foo() -> Result<()> { /// # let client = Client::with_uri_str("mongodb://localhost:27017")?; /// # let mut session = client.start_session().run()?; @@ -283,7 +283,7 @@ impl SessionCursor { /// } /// /// let coll = db.collection::("cat"); - /// let mut cursor = coll.find_with_session(None, None, &mut session)?; + /// let mut cursor = coll.find(doc! {}).session(&mut session).run()?; /// while cursor.advance(&mut session)? { /// println!("{:?}", cursor.deserialize_current()?); /// } @@ -327,9 +327,9 @@ where /// # let coll = client.database("foo").collection::("bar"); /// # let other_coll = coll.clone(); /// # let mut session = client.start_session().run()?; - /// let mut cursor = coll.find_with_session(doc! { "x": 1 }, None, &mut session)?; + /// let mut cursor = coll.find(doc! { "x": 1 }).session(&mut session).run()?; /// while let Some(doc) = cursor.next(&mut session).transpose()? { - /// other_coll.insert_one_with_session(doc, None, &mut session)?; + /// other_coll.insert_one(doc).session(&mut session).run()?; /// } /// # Ok::<(), mongodb::error::Error>(()) /// # } From bdd3a91a34e5a24cb2b2c54661bc63becfe26202 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 11 Mar 2024 13:45:10 -0400 Subject: [PATCH 29/39] simplify --- action_macro/src/lib.rs | 17 +-- src/action.rs | 2 +- src/action/find.rs | 3 +- src/action/find_and_modify.rs | 189 +++++++++++++++++++--------------- src/operation/update.rs | 13 +-- 5 files changed, 116 insertions(+), 108 deletions(-) diff --git a/action_macro/src/lib.rs b/action_macro/src/lib.rs index 2ef750fc7..91c41b878 100644 --- a/action_macro/src/lib.rs +++ b/action_macro/src/lib.rs @@ -30,15 +30,13 @@ pub fn action_impl(input: proc_macro::TokenStream) -> proc_macro::TokenStream { lifetime, action, future_name, - future_generics, exec_self_mut, exec_output, exec_body, sync_wrap, } = parse_macro_input!(input as ActionImpl); - let future_generics = future_generics.unwrap_or_else(|| generics.clone()); - let mut unbounded_generics = future_generics.clone(); + let mut unbounded_generics = generics.clone(); for lt in unbounded_generics.lifetimes_mut() { lt.bounds.clear(); } @@ -71,9 +69,9 @@ pub fn action_impl(input: proc_macro::TokenStream) -> proc_macro::TokenStream { } } - pub struct #future_name #future_generics (crate::BoxFuture<#lifetime, #exec_output>); + pub struct #future_name #generics (crate::BoxFuture<#lifetime, #exec_output>); - impl #future_generics std::future::Future for #future_name #unbounded_generics { + impl #generics std::future::Future for #future_name #unbounded_generics { type Output = #exec_output; fn poll(mut self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll { @@ -102,7 +100,6 @@ struct ActionImpl { lifetime: Lifetime, action: Type, future_name: Ident, - future_generics: Option, exec_self_mut: Option, exec_output: Type, exec_body: Block, @@ -132,16 +129,11 @@ impl Parse for ActionImpl { let impl_body; braced!(impl_body in input); - // type Future = FutureName; + // type Future = FutureName; impl_body.parse::()?; parse_name(&impl_body, "Future")?; impl_body.parse::()?; let future_name = impl_body.parse()?; - let future_generics = if impl_body.peek(Token![<]) { - Some(impl_body.parse()?) - } else { - None - }; impl_body.parse::()?; // async fn execute([mut] self) -> OutType { } @@ -175,7 +167,6 @@ impl Parse for ActionImpl { lifetime, action, future_name, - future_generics, exec_self_mut, exec_output, exec_body, diff --git a/src/action.rs b/src/action.rs index f2b5b5ccb..dd79e4aa9 100644 --- a/src/action.rs +++ b/src/action.rs @@ -37,7 +37,7 @@ pub use distinct::Distinct; pub use drop::{DropCollection, DropDatabase}; pub use drop_index::DropIndex; pub use find::Find; -pub use find_and_modify::FindAndModify; +pub use find_and_modify::{FindOneAndDelete, FindOneAndReplace, FindOneAndUpdate}; pub use insert_many::InsertMany; pub use insert_one::InsertOne; pub use list_collections::ListCollections; diff --git a/src/action/find.rs b/src/action/find.rs index be7354e5f..4c07946f6 100644 --- a/src/action/find.rs +++ b/src/action/find.rs @@ -70,7 +70,8 @@ impl crate::sync::Collection { } } -/// Finds the documents in a collection matching a filter. Construct with [`Collection::find`]. +/// Finds the documents in a collection matching a filter. Construct with [`Collection::find`] or +/// [`Collection::find_one`]. #[must_use] pub struct Find<'a, T: Send + Sync, Mode = Multiple, Session = ImplicitSession> { coll: &'a Collection, diff --git a/src/action/find_and_modify.rs b/src/action/find_and_modify.rs index edcf7c970..4ca344909 100644 --- a/src/action/find_and_modify.rs +++ b/src/action/find_and_modify.rs @@ -1,6 +1,6 @@ -use std::{borrow::Borrow, marker::PhantomData, time::Duration}; +use std::{borrow::Borrow, time::Duration}; -use bson::{Bson, Document}; +use bson::{Bson, Document, RawDocumentBuf}; use serde::{de::DeserializeOwned, Serialize}; use crate::{ @@ -20,6 +20,7 @@ use crate::{ UpdateOrReplace, }, options::WriteConcern, + serde_util, ClientSession, Collection, }; @@ -27,6 +28,19 @@ use crate::{ use super::{action_impl, option_setters}; impl Collection { + async fn find_and_modify<'a>( + &self, + filter: Document, + modification: Modification, + mut options: Option, + session: Option<&'a mut ClientSession>, + ) -> Result> { + resolve_write_concern_with_session!(self, options, session.as_ref())?; + + let op = Op::::with_modification(self.namespace(), filter, modification, options)?; + self.client().execute_operation(op, session).await + } + /// Atomically finds up to one document in the collection matching `filter` and deletes it. /// /// This operation will retry once upon failure if the connection and encountered error support @@ -35,14 +49,12 @@ impl Collection { /// retryable writes. /// /// `await` will return `Result>`. - pub fn find_one_and_delete(&self, filter: Document) -> FindAndModify<'_, T, Delete> { - FindAndModify { + pub fn find_one_and_delete(&self, filter: Document) -> FindOneAndDelete<'_, T> { + FindOneAndDelete { coll: self, filter, - modification: Ok(Modification::Delete), options: None, session: None, - _mode: PhantomData, } } @@ -61,15 +73,13 @@ impl Collection { &self, filter: Document, update: impl Into, - ) -> FindAndModify<'_, T, Update> { - let update = update.into(); - FindAndModify { + ) -> FindOneAndUpdate<'_, T> { + FindOneAndUpdate { coll: self, filter, - modification: Ok(Modification::Update(update.into())), + update: update.into(), options: None, session: None, - _mode: PhantomData, } } } @@ -86,19 +96,16 @@ impl Collection { &self, filter: Document, replacement: impl Borrow, - ) -> FindAndModify<'_, T, Replace> { - let human_readable_serialization = self.human_readable_serialization(); - FindAndModify { + ) -> FindOneAndReplace<'_, T> { + FindOneAndReplace { coll: self, filter, - modification: UpdateOrReplace::replacement( + replacement: serde_util::to_raw_document_buf_with_options( replacement.borrow(), - human_readable_serialization, - ) - .map(Modification::Update), + self.human_readable_serialization(), + ), options: None, session: None, - _mode: PhantomData, } } } @@ -113,7 +120,7 @@ impl crate::sync::Collection { /// retryable writes. /// /// [`run`](FindAndModify::run) will return `Result>`. - pub fn find_one_and_delete(&self, filter: Document) -> FindAndModify<'_, T, Delete> { + pub fn find_one_and_delete(&self, filter: Document) -> FindOneAndDelete<'_, T> { self.async_collection.find_one_and_delete(filter) } @@ -132,7 +139,7 @@ impl crate::sync::Collection { &self, filter: Document, update: impl Into, - ) -> FindAndModify<'_, T, Update> { + ) -> FindOneAndUpdate<'_, T> { self.async_collection.find_one_and_update(filter, update) } } @@ -150,32 +157,32 @@ impl crate::sync::Collection { &self, filter: Document, replacement: impl Borrow, - ) -> FindAndModify<'_, T, Replace> { + ) -> FindOneAndReplace<'_, T> { self.async_collection .find_one_and_replace(filter, replacement) } } -/// Atomically find up to one document in the collection matching a filter and modify it. Construct -/// with [`Collection::find_one_and_delete`]. +/// Atomically finds up to one document in the collection matching a filter and deletes it. +/// Construct with [`Collection::find_one_and_delete`]. #[must_use] -pub struct FindAndModify<'a, T: Send + Sync, Mode> { +pub struct FindOneAndDelete<'a, T: Send + Sync> { coll: &'a Collection, filter: Document, - modification: Result, - options: Option, + options: Option, session: Option<&'a mut ClientSession>, - _mode: PhantomData, } -pub struct Delete; -pub struct Update; -pub struct Replace; - -impl<'a, T: Send + Sync, Mode> FindAndModify<'a, T, Mode> { - fn options(&mut self) -> &mut FindAndModifyOptions { - self.options - .get_or_insert_with(::default) +impl<'a, T: Send + Sync> FindOneAndDelete<'a, T> { + option_setters! { options: FindOneAndDeleteOptions; + max_time: Duration, + projection: Document, + sort: Document, + write_concern: WriteConcern, + collation: Collation, + hint: Hint, + let_vars: Document, + comment: Bson, } /// Use the provided session when running the operation. @@ -185,37 +192,39 @@ impl<'a, T: Send + Sync, Mode> FindAndModify<'a, T, Mode> { } } -impl<'a, T: Send + Sync> FindAndModify<'a, T, Delete> { - /// Set all options. Note that this will replace all previous values set. - pub fn with_options(mut self, value: impl Into>) -> Self { - self.options = value.into().map(FindAndModifyOptions::from); - self - } +action_impl! { + impl<'a, T: DeserializeOwned + Send + Sync> Action for FindOneAndDelete<'a, T> { + type Future = FindOneAndDeleteFuture; - option_setters! { FindOneAndDeleteOptions; - max_time: Duration, - projection: Document, - sort: Document, - write_concern: WriteConcern, - collation: Collation, - hint: Hint, - let_vars: Document, - comment: Bson, + async fn execute(self) -> Result> { + self.coll.find_and_modify( + self.filter, + Modification::Delete, + self.options.map(FindAndModifyOptions::from), + self.session, + ).await + } } } -impl<'a, T: Send + Sync> FindAndModify<'a, T, Update> { - /// Set all options. Note that this will replace all previous values set. - pub fn with_options(mut self, value: impl Into>) -> Self { - self.options = value.into().map(FindAndModifyOptions::from); - self - } +/// Atomically finds up to one document in the collection matching a filter and updates it. +/// Construct with [`Collection::find_one_and_update`]. +#[must_use] +pub struct FindOneAndUpdate<'a, T: Send + Sync> { + coll: &'a Collection, + filter: Document, + update: UpdateModifications, + options: Option, + session: Option<&'a mut ClientSession>, +} - option_setters! { FindOneAndUpdateOptions; +impl<'a, T: Send + Sync> FindOneAndUpdate<'a, T> { + option_setters! { options: FindOneAndUpdateOptions; array_filters: Vec, bypass_document_validation: bool, max_time: Duration, projection: Document, + return_document: ReturnDocument, sort: Document, upsert: bool, write_concern: WriteConcern, @@ -225,24 +234,45 @@ impl<'a, T: Send + Sync> FindAndModify<'a, T, Update> { comment: Bson, } - /// Set the [`FindOneAndUpdateOptions::return_document`] option. - pub fn return_document(mut self, value: ReturnDocument) -> Self { - self.options().new = Some(value.as_bool()); + /// Use the provided session when running the operation. + pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { + self.session = Some(value.into()); self } } -impl<'a, T: Send + Sync> FindAndModify<'a, T, Replace> { - /// Set all options. Note that this will replace all previous values set. - pub fn with_options(mut self, value: impl Into>) -> Self { - self.options = value.into().map(FindAndModifyOptions::from); - self +action_impl! { + impl<'a, T: DeserializeOwned + Send + Sync> Action for FindOneAndUpdate<'a, T> { + type Future = FindOneAndUpdateFuture; + + async fn execute(self) -> Result> { + self.coll.find_and_modify( + self.filter, + Modification::Update(self.update.into()), + self.options.map(FindAndModifyOptions::from), + self.session, + ).await + } } +} - option_setters! { FindOneAndReplaceOptions; +/// Atomically finds up to one document in the collection matching a filter and replaces it. +/// Construct with [`Collection::find_one_and_replace`]. +#[must_use] +pub struct FindOneAndReplace<'a, T: Send + Sync> { + coll: &'a Collection, + filter: Document, + replacement: Result, + options: Option, + session: Option<&'a mut ClientSession>, +} + +impl<'a, T: Send + Sync> FindOneAndReplace<'a, T> { + option_setters! { options: FindOneAndReplaceOptions; bypass_document_validation: bool, max_time: Duration, projection: Document, + return_document: ReturnDocument, sort: Document, upsert: bool, write_concern: WriteConcern, @@ -252,27 +282,24 @@ impl<'a, T: Send + Sync> FindAndModify<'a, T, Replace> { comment: Bson, } - /// Set the [`FindOneAndReplaceOptions::return_document`] option. - pub fn return_document(mut self, value: ReturnDocument) -> Self { - self.options().new = Some(value.as_bool()); + /// Use the provided session when running the operation. + pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { + self.session = Some(value.into()); self } } action_impl! { - impl<'a, T: DeserializeOwned + Send + Sync, Mode> Action for FindAndModify<'a, T, Mode> { - type Future = FindAndModifyFuture<'a, T: DeserializeOwned + Send + Sync>; - - async fn execute(mut self) -> Result> { - resolve_write_concern_with_session!(self.coll, self.options, self.session.as_ref())?; + impl<'a, T: DeserializeOwned + Send + Sync> Action for FindOneAndReplace<'a, T> { + type Future = FindOneAndReplaceFuture; - let op = Op::::with_modification( - self.coll.namespace(), + async fn execute(self) -> Result> { + self.coll.find_and_modify( self.filter, - self.modification?, - self.options, - )?; - self.coll.client().execute_operation(op, self.session).await + Modification::Update(UpdateOrReplace::Replacement(self.replacement?)), + self.options.map(FindAndModifyOptions::from), + self.session, + ).await } } } diff --git a/src/operation/update.rs b/src/operation/update.rs index e7998c866..bb8a29cba 100644 --- a/src/operation/update.rs +++ b/src/operation/update.rs @@ -1,7 +1,7 @@ #[cfg(test)] mod test; -use serde::{Deserialize, Serialize}; +use serde::Deserialize; use crate::{ bson::{doc, rawdoc, Document, RawArrayBuf, RawBson, RawDocumentBuf}, @@ -11,7 +11,6 @@ use crate::{ operation::{OperationWithDefaults, Retryability, WriteResponseBody}, options::{UpdateModifications, UpdateOptions, WriteConcern}, results::UpdateResult, - serde_util::to_raw_document_buf_with_options, Namespace, }; @@ -22,16 +21,6 @@ pub(crate) enum UpdateOrReplace { } impl UpdateOrReplace { - pub(crate) fn replacement( - update: &T, - human_readable_serialization: bool, - ) -> Result { - Ok(Self::Replacement(to_raw_document_buf_with_options( - update, - human_readable_serialization, - )?)) - } - pub(crate) fn to_raw_bson(&self) -> Result { match self { Self::UpdateModifications(update_modifications) => match update_modifications { From d5430d8ef3cea608d0eea604f6ac909b6749c155 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 11 Mar 2024 14:10:19 -0400 Subject: [PATCH 30/39] pre-review tidy --- action_macro/src/lib.rs | 2 +- src/action/find.rs | 2 +- src/action/list_collections.rs | 2 +- src/action/list_databases.rs | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/action_macro/src/lib.rs b/action_macro/src/lib.rs index 91c41b878..b3c2b4a7e 100644 --- a/action_macro/src/lib.rs +++ b/action_macro/src/lib.rs @@ -91,7 +91,7 @@ pub fn action_impl(input: proc_macro::TokenStream) -> proc_macro::TokenStream { } // impl Action for ActionType { -// type Future = FutureName; +// type Future = FutureName; // async fn execute([mut] self) -> OutType { } // [SyncWrap] // } diff --git a/src/action/find.rs b/src/action/find.rs index 4c07946f6..2381fb183 100644 --- a/src/action/find.rs +++ b/src/action/find.rs @@ -116,7 +116,7 @@ impl<'a, T: Send + Sync, Session> Find<'a, T, Multiple, Session> { } impl<'a, T: Send + Sync, Mode> Find<'a, T, Mode, ImplicitSession> { - /// Runs the query using the provided session. + /// Use the provided session when running the operation. pub fn session<'s>( self, value: impl Into<&'s mut ClientSession>, diff --git a/src/action/list_collections.rs b/src/action/list_collections.rs index 15dcb2ed0..b81a103e9 100644 --- a/src/action/list_collections.rs +++ b/src/action/list_collections.rs @@ -87,7 +87,7 @@ impl<'a, M, S> ListCollections<'a, M, S> { } impl<'a, M> ListCollections<'a, M, ImplicitSession> { - /// Runs the query using the provided session. + /// Use the provided session when running the operation. pub fn session<'s>( self, value: impl Into<&'s mut ClientSession>, diff --git a/src/action/list_databases.rs b/src/action/list_databases.rs index f4c7c28d7..08b9eb73f 100644 --- a/src/action/list_databases.rs +++ b/src/action/list_databases.rs @@ -75,7 +75,7 @@ impl<'a, M> ListDatabases<'a, M> { comment: Bson, ); - /// Runs the query using the provided session. + /// Use the provided session when running the operation. pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { self.session = Some(value.into()); self From 6e5abbd592b8508de09e40c8d04d268266468fa5 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 11 Mar 2024 14:47:06 -0400 Subject: [PATCH 31/39] fix find --- src/operation/find.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/operation/find.rs b/src/operation/find.rs index fdd08a49f..fb0434461 100644 --- a/src/operation/find.rs +++ b/src/operation/find.rs @@ -80,9 +80,7 @@ impl OperationWithDefaults for Find { append_options(&mut body, self.options.as_ref())?; - if !self.filter.is_empty() { - body.insert("filter", self.filter.clone()); - } + body.insert("filter", self.filter.clone()); Ok(Command::new_read( Self::NAME.to_string(), From 5047b151f25485eaa151132f652058213c1044b8 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 11 Mar 2024 16:09:44 -0400 Subject: [PATCH 32/39] minor fixes --- action_macro/Cargo.toml | 1 + manual/src/README.md | 2 +- manual/src/encryption.md | 30 ++++++++++++++---------------- manual/src/reading.md | 9 +++++---- manual/src/tracing.md | 4 ++-- src/action/find_and_modify.rs | 6 ++++-- 6 files changed, 27 insertions(+), 25 deletions(-) diff --git a/action_macro/Cargo.toml b/action_macro/Cargo.toml index f846e27ef..8320208ca 100644 --- a/action_macro/Cargo.toml +++ b/action_macro/Cargo.toml @@ -2,6 +2,7 @@ name = "action_macro" version = "0.1.0" edition = "2021" +license = "Apache-2.0" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/manual/src/README.md b/manual/src/README.md index 2e8cbfa02..b4def29c7 100644 --- a/manual/src/README.md +++ b/manual/src/README.md @@ -23,7 +23,7 @@ e.g. # let client = Client::with_uri_str("mongodb://example.com").await?; let collection = client.database("foo").collection("bar"); let handle = tokio::task::spawn(async move { - collection.insert_one(doc! { "x": 1 }, None).await + collection.insert_one(doc! { "x": 1 }).await }); tokio::time::timeout(Duration::from_secs(5), handle).await???; diff --git a/manual/src/encryption.md b/manual/src/encryption.md index 4c9e4e1f8..0fbfd9a79 100644 --- a/manual/src/encryption.md +++ b/manual/src/encryption.md @@ -187,16 +187,16 @@ async fn main() -> Result<()> { // Clear old data. coll.drop().await?; - coll.insert_one(doc! { "encryptedField": "123456789" }, None) + coll.insert_one(doc! { "encryptedField": "123456789" }) .await?; - println!("Decrypted document: {:?}", coll.find_one(None, None).await?); + println!("Decrypted document: {:?}", coll.find_one(doc! {}).await?); let unencrypted_coll = Client::with_uri_str(URI) .await? .database(&encrypted_namespace.db) .collection::(&encrypted_namespace.coll); println!( "Encrypted document: {:?}", - unencrypted_coll.find_one(None, None).await? + unencrypted_coll.find_one(doc! {}).await? ); Ok(()) @@ -294,19 +294,19 @@ async fn main() -> Result<()> { .validator(doc! { "$jsonSchema": schema }) .await?; - coll.insert_one(doc! { "encryptedField": "123456789" }, None) + coll.insert_one(doc! { "encryptedField": "123456789" }) .await?; - println!("Decrypted document: {:?}", coll.find_one(None, None).await?); + println!("Decrypted document: {:?}", coll.find_one(doc! {}).await?); let unencrypted_coll = Client::with_uri_str(URI) .await? .database(&encrypted_namespace.db) .collection::(&encrypted_namespace.coll); println!( "Encrypted document: {:?}", - unencrypted_coll.find_one(None, None).await? + unencrypted_coll.find_one(doc! {}).await? ); // This would return a Write error with the message "Document failed validation". - // unencrypted_coll.insert_one(doc! { "encryptedField": "123456789" }, None) + // unencrypted_coll.insert_one(doc! { "encryptedField": "123456789" }) // .await?; Ok(()) @@ -407,11 +407,10 @@ async fn main() -> Result<()> { db.create_collection("encryptedCollection").await?; coll.insert_one( doc! { "_id": 1, "firstName": "Jane", "lastName": "Doe" }, - None, ) .await?; let docs: Vec<_> = coll - .find(doc! {"firstName": "Jane"}, None) + .find(doc! {"firstName": "Jane"}) .await? .try_collect() .await?; @@ -540,7 +539,6 @@ async fn main() -> Result<()> { "encryptedIndexed": insert_payload_indexed, "encryptedUnindexed": insert_payload_unindexed, }, - None, ) .await?; @@ -556,7 +554,7 @@ async fn main() -> Result<()> { // Find the document we inserted using the encrypted payload. // The returned document is automatically decrypted. let doc = coll - .find_one(doc! { "encryptedIndexed": find_payload }, None) + .find_one(doc! { "encryptedIndexed": find_payload }) .await?; println!("Returned document: {:?}", doc); @@ -634,9 +632,9 @@ async fn main() -> Result<()> { Algorithm::AeadAes256CbcHmacSha512Deterministic, ) .await?; - coll.insert_one(doc! { "encryptedField": encrypted_field }, None) + coll.insert_one(doc! { "encryptedField": encrypted_field }) .await?; - let mut doc = coll.find_one(None, None).await?.unwrap(); + let mut doc = coll.find_one(doc! {}).await?.unwrap(); println!("Encrypted document: {:?}", doc); // Explicitly decrypt the field: @@ -735,10 +733,10 @@ async fn main() -> Result<()> { Algorithm::AeadAes256CbcHmacSha512Deterministic, ) .await?; - coll.insert_one(doc! { "encryptedField": encrypted_field }, None) + coll.insert_one(doc! { "encryptedField": encrypted_field }) .await?; // Automatically decrypts any encrypted fields. - let doc = coll.find_one(None, None).await?.unwrap(); + let doc = coll.find_one(doc! {}).await?.unwrap(); println!("Decrypted document: {:?}", doc); let unencrypted_coll = Client::with_uri_str(URI) .await? @@ -746,7 +744,7 @@ async fn main() -> Result<()> { .collection::("coll"); println!( "Encrypted document: {:?}", - unencrypted_coll.find_one(None, None).await? + unencrypted_coll.find_one(doc! {}).await? ); Ok(()) diff --git a/manual/src/reading.md b/manual/src/reading.md index cf064d821..b54ba6b4c 100644 --- a/manual/src/reading.md +++ b/manual/src/reading.md @@ -58,7 +58,7 @@ let coll = client.database("items").collection::("in_stock"); for i in 0..5 { // Perform operations that work with directly our model. - coll.insert_one(Item { id: i }, None).await; + coll.insert_one(Item { id: i }).await; } # # Ok(()) @@ -89,9 +89,10 @@ use futures::stream::TryStreamExt; use mongodb::{bson::doc, options::FindOptions}; // Query the books in the collection with a filter and an option. -let filter = doc! { "author": "George Orwell" }; -let find_options = FindOptions::builder().sort(doc! { "title": 1 }).build(); -let mut cursor = typed_collection.find(filter, find_options).await?; +let mut cursor = typed_collection + .find(doc! { "author": "George Orwell" }) + .sort(doc! { "title": 1 }) + .await?; // Iterate over the results of the cursor. while let Some(book) = cursor.try_next().await? { diff --git a/manual/src/tracing.md b/manual/src/tracing.md index f981edef6..2e6ca01cd 100644 --- a/manual/src/tracing.md +++ b/manual/src/tracing.md @@ -62,7 +62,7 @@ async fn main() -> Result<()> { // Insert a document. let coll = client.database("test").collection("test_coll"); - coll.insert_one(doc! { "x" : 1 }, None).await?; + coll.insert_one(doc! { "x" : 1 }).await?; Ok(()) } @@ -114,7 +114,7 @@ async fn main() -> Result<()> { // Insert a document. let coll = client.database("test").collection("test_coll"); - coll.insert_one(doc! { "x" : 1 }, None).await?; + coll.insert_one(doc! { "x" : 1 }).await?; Ok(()) } diff --git a/src/action/find_and_modify.rs b/src/action/find_and_modify.rs index 4ca344909..1429985e8 100644 --- a/src/action/find_and_modify.rs +++ b/src/action/find_and_modify.rs @@ -119,7 +119,7 @@ impl crate::sync::Collection { /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on /// retryable writes. /// - /// [`run`](FindAndModify::run) will return `Result>`. + /// [`run`](FindOneAndDelete::run) will return `Result>`. pub fn find_one_and_delete(&self, filter: Document) -> FindOneAndDelete<'_, T> { self.async_collection.find_one_and_delete(filter) } @@ -134,7 +134,7 @@ impl crate::sync::Collection { /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on /// retryable writes. /// - /// [`run`](FindAndModify::run) will return `Result>`. + /// [`run`](FindOneAndDelete::run) will return `Result>`. pub fn find_one_and_update( &self, filter: Document, @@ -153,6 +153,8 @@ impl crate::sync::Collection { /// retryability. See the documentation /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on /// retryable writes. + /// + /// [`run`](FindOneAndReplace::run) will return `Result>`. pub fn find_one_and_replace( &self, filter: Document, From 04e12be46e3809d3997ed1d3e7ac36eb4446e3f3 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 18 Mar 2024 10:34:26 -0400 Subject: [PATCH 33/39] split findone from find --- src/action.rs | 2 +- src/action/find.rs | 113 +++++++++++++++++++++++--------------------- src/coll/options.rs | 2 +- 3 files changed, 62 insertions(+), 55 deletions(-) diff --git a/src/action.rs b/src/action.rs index dd79e4aa9..c30074125 100644 --- a/src/action.rs +++ b/src/action.rs @@ -36,7 +36,7 @@ pub use delete::Delete; pub use distinct::Distinct; pub use drop::{DropCollection, DropDatabase}; pub use drop_index::DropIndex; -pub use find::Find; +pub use find::{Find, FindOne}; pub use find_and_modify::{FindOneAndDelete, FindOneAndReplace, FindOneAndUpdate}; pub use insert_many::InsertMany; pub use insert_one::InsertOne; diff --git a/src/action/find.rs b/src/action/find.rs index 2381fb183..216fa4913 100644 --- a/src/action/find.rs +++ b/src/action/find.rs @@ -1,10 +1,10 @@ -use std::{marker::PhantomData, time::Duration}; +use std::time::Duration; use bson::{Bson, Document}; use serde::de::DeserializeOwned; use crate::{ - coll::options::{CursorType, FindOptions, Hint}, + coll::options::{FindOneOptions, FindOptions, Hint}, collation::Collation, error::Result, operation::Find as Op, @@ -16,7 +16,7 @@ use crate::{ SessionCursor, }; -use super::{action_impl, option_setters, ExplicitSession, ImplicitSession, Multiple, Single}; +use super::{action_impl, option_setters, ExplicitSession, ImplicitSession}; impl Collection { /// Finds the documents in the collection matching `filter`. @@ -29,7 +29,6 @@ impl Collection { filter, options: None, session: ImplicitSession, - _mode: PhantomData, } } } @@ -38,13 +37,12 @@ impl Collection { /// Finds a single document in the collection matching `filter`. /// /// `await` will return `Result>`. - pub fn find_one(&self, filter: Document) -> Find<'_, T, Single> { - Find { + pub fn find_one(&self, filter: Document) -> FindOne<'_, T> { + FindOne { coll: self, filter, options: None, - session: ImplicitSession, - _mode: PhantomData, + session: None, } } } @@ -65,23 +63,21 @@ impl crate::sync::Collection { /// Finds a single document in the collection matching `filter`. /// /// [`run`](Find::run) will return `Result>`. - pub fn find_one(&self, filter: Document) -> Find<'_, T, Single> { + pub fn find_one(&self, filter: Document) -> FindOne<'_, T> { self.async_collection.find_one(filter) } } -/// Finds the documents in a collection matching a filter. Construct with [`Collection::find`] or -/// [`Collection::find_one`]. +/// Finds the documents in a collection matching a filter. Construct with [`Collection::find`]. #[must_use] -pub struct Find<'a, T: Send + Sync, Mode = Multiple, Session = ImplicitSession> { +pub struct Find<'a, T: Send + Sync, Session = ImplicitSession> { coll: &'a Collection, filter: Document, options: Option, session: Session, - _mode: PhantomData, } -impl<'a, T: Send + Sync, Mode, Session> Find<'a, T, Mode, Session> { +impl<'a, T: Send + Sync, Session> Find<'a, T, Session> { option_setters!(options: FindOptions; allow_partial_results: bool, comment: String, @@ -101,38 +97,23 @@ impl<'a, T: Send + Sync, Mode, Session> Find<'a, T, Mode, Session> { collation: Collation, let_vars: Document, ); -} -// Some options don't make sense for `find_one`. -impl<'a, T: Send + Sync, Session> Find<'a, T, Multiple, Session> { - option_setters!(FindOptions; - allow_disk_use: bool, - batch_size: u32, - cursor_type: CursorType, - limit: i64, - max_await_time: Duration, - no_cursor_timeout: bool, - ); -} - -impl<'a, T: Send + Sync, Mode> Find<'a, T, Mode, ImplicitSession> { /// Use the provided session when running the operation. pub fn session<'s>( self, value: impl Into<&'s mut ClientSession>, - ) -> Find<'a, T, Mode, ExplicitSession<'s>> { + ) -> Find<'a, T, ExplicitSession<'s>> { Find { coll: self.coll, filter: self.filter, options: self.options, session: ExplicitSession(value.into()), - _mode: PhantomData, } } } action_impl! { - impl<'a, T: Send + Sync> Action for Find<'a, T, Multiple, ImplicitSession> { + impl<'a, T: Send + Sync> Action for Find<'a, T, ImplicitSession> { type Future = FindFuture; async fn execute(mut self) -> Result> { @@ -149,7 +130,7 @@ action_impl! { } action_impl! { - impl<'a, T: Send + Sync> Action for Find<'a, T, Multiple, ExplicitSession<'a>> { + impl<'a, T: Send + Sync> Action for Find<'a, T, ExplicitSession<'a>> { type Future = FindSessionFuture; async fn execute(mut self) -> Result> { @@ -166,36 +147,62 @@ action_impl! { } } -action_impl! { - impl<'a, T: DeserializeOwned + Send + Sync> Action for Find<'a, T, Single, ImplicitSession> - { - type Future = FindOneFuture; +/// Finds a single document in a collection matching a filter. Construct with +/// [`Collection::find_one`]. +#[must_use] +pub struct FindOne<'a, T: Send + Sync> { + coll: &'a Collection, + filter: Document, + options: Option, + session: Option<&'a mut ClientSession>, +} - async fn execute(self) -> Result> { - use futures_util::stream::StreamExt; - let mut options = self.options.unwrap_or_default(); - options.limit = Some(-1); - let mut cursor = self.coll.find(self.filter).with_options(options).await?; - cursor.next().await.transpose() - } +impl<'a, T: Send + Sync> FindOne<'a, T> { + option_setters! { options: FindOneOptions; + allow_partial_results: bool, + collation: Collation, + comment: String, + comment_bson: Bson, + hint: Hint, + max: Document, + max_scan: u64, + max_time: Duration, + min: Document, + projection: Document, + read_concern: ReadConcern, + return_key: bool, + selection_criteria: SelectionCriteria, + show_record_id: bool, + skip: u64, + sort: Document, + let_vars: Document, + } + + /// Use the provided session when running the operation. + pub fn session<'s>(mut self, value: impl Into<&'s mut ClientSession>) -> Self { + self.session = Some(value.into()); + self } } action_impl! { - impl<'a, T: DeserializeOwned + Send + Sync> Action for Find<'a, T, Single, ExplicitSession<'a>> { - type Future = FindOneSessionFuture; + impl<'a, T: DeserializeOwned + Send + Sync> Action for FindOne<'a, T> + { + type Future = FindOneFuture; async fn execute(self) -> Result> { use futures_util::stream::StreamExt; - let mut options = self.options.unwrap_or_default(); + let mut options: FindOptions = self.options.unwrap_or_default().into(); options.limit = Some(-1); - let mut cursor = self.coll - .find(self.filter) - .with_options(options) - .session(&mut *self.session.0) - .await?; - let mut stream = cursor.stream(self.session.0); - stream.next().await.transpose() + let find = self.coll.find(self.filter).with_options(options); + if let Some(session) = self.session { + let mut cursor = find.session(session).await?; + let mut stream = cursor.stream(session); + stream.next().await.transpose() + } else { + let mut cursor = self.coll.find(self.filter).with_options(options).await?; + cursor.next().await.transpose() + } } } } diff --git a/src/coll/options.rs b/src/coll/options.rs index 6c8a63a53..1f10bd7af 100644 --- a/src/coll/options.rs +++ b/src/coll/options.rs @@ -939,7 +939,7 @@ where #[serde(rename_all = "camelCase")] #[builder(field_defaults(default, setter(into)))] #[non_exhaustive] -pub(crate) struct FindOneOptions { +pub struct FindOneOptions { /// If true, partial results will be returned from a mongos rather than an error being /// returned if one or more shards is down. pub allow_partial_results: Option, From aa3f7035fbb42e14ba6ed7790bb218712e9a5c43 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 18 Mar 2024 12:22:23 -0400 Subject: [PATCH 34/39] avoid clone --- Cargo.toml | 2 +- src/operation/find_and_modify.rs | 11 ++++++----- src/operation/update.rs | 16 +++++++++++----- 3 files changed, 18 insertions(+), 11 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 87eeccdb5..089fa97db 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -73,7 +73,7 @@ action_macro = { path = "action_macro" } async-trait = "0.1.42" base64 = "0.13.0" bitflags = "1.1.0" -bson = { git = "https://github.com/mongodb/bson-rust", branch = "main" } +bson = { git = "https://github.com/abr-egn/bson-rust", branch = "RUST-1512/append_bytes" } chrono = { version = "0.4.7", default-features = false, features = ["clock", "std"] } derivative = "2.1.1" derive_more = "0.99.17" diff --git a/src/operation/find_and_modify.rs b/src/operation/find_and_modify.rs index 9ee6fe09f..45c029e0b 100644 --- a/src/operation/find_and_modify.rs +++ b/src/operation/find_and_modify.rs @@ -77,11 +77,12 @@ impl OperationWithDefaults for FindAndModify { "query": RawDocumentBuf::from_document(&self.query)?, }; - let (key, modification) = match &self.modification { - Modification::Delete => ("remove", true.into()), - Modification::Update(update_or_replace) => ("update", update_or_replace.to_raw_bson()?), - }; - body.append(key, modification); + match &self.modification { + Modification::Delete => body.append("remove", true), + Modification::Update(update_or_replace) => { + update_or_replace.append_to_rawdoc(&mut body, "update")? + } + } if let Some(ref mut options) = self.options { remove_empty_write_concern!(Some(options)); diff --git a/src/operation/update.rs b/src/operation/update.rs index bb8a29cba..a09609c73 100644 --- a/src/operation/update.rs +++ b/src/operation/update.rs @@ -21,19 +21,25 @@ pub(crate) enum UpdateOrReplace { } impl UpdateOrReplace { - pub(crate) fn to_raw_bson(&self) -> Result { + pub(crate) fn append_to_rawdoc(&self, doc: &mut RawDocumentBuf, key: &str) -> Result<()> { match self { Self::UpdateModifications(update_modifications) => match update_modifications { UpdateModifications::Document(document) => { - Ok(RawDocumentBuf::from_document(document)?.into()) + let raw = RawDocumentBuf::from_document(document)?; + doc.append(key, raw); + } + UpdateModifications::Pipeline(pipeline) => { + let raw = bson_util::to_raw_bson_array(pipeline)?; + doc.append(key, raw); } - UpdateModifications::Pipeline(pipeline) => bson_util::to_raw_bson_array(pipeline), }, Self::Replacement(replacement_doc) => { bson_util::replacement_raw_document_check(replacement_doc)?; - Ok(replacement_doc.clone().into()) + doc.append_ref(key, replacement_doc); } } + + Ok(()) } } @@ -110,8 +116,8 @@ impl OperationWithDefaults for Update { let mut update = rawdoc! { "q": RawDocumentBuf::from_document(&self.filter)?, - "u": self.update.to_raw_bson()?, }; + self.update.append_to_rawdoc(&mut update, "u")?; if let Some(ref options) = self.options { if let Some(upsert) = options.upsert { From e17358351826fb4720dae41931ba4c7f6eb996af Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 18 Mar 2024 12:46:47 -0400 Subject: [PATCH 35/39] fixes --- Cargo.toml | 3 +++ src/action/find.rs | 6 +++--- src/gridfs.rs | 3 +-- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 089fa97db..231a9bfcf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -68,6 +68,9 @@ in-use-encryption-unstable = ["mongocrypt", "rayon", "num_cpus"] # TODO: pending https://github.com/tokio-rs/tracing/issues/2036 stop depending directly on log. tracing-unstable = ["tracing", "log"] +[patch.crates-io] +bson = { git = "https://github.com/abr-egn/bson-rust", branch = "RUST-1512/append_bytes" } + [dependencies] action_macro = { path = "action_macro" } async-trait = "0.1.42" diff --git a/src/action/find.rs b/src/action/find.rs index 216fa4913..5c6fd4c4d 100644 --- a/src/action/find.rs +++ b/src/action/find.rs @@ -179,7 +179,7 @@ impl<'a, T: Send + Sync> FindOne<'a, T> { } /// Use the provided session when running the operation. - pub fn session<'s>(mut self, value: impl Into<&'s mut ClientSession>) -> Self { + pub fn session(mut self, value: impl Into<&'a mut ClientSession>) -> Self { self.session = Some(value.into()); self } @@ -196,11 +196,11 @@ action_impl! { options.limit = Some(-1); let find = self.coll.find(self.filter).with_options(options); if let Some(session) = self.session { - let mut cursor = find.session(session).await?; + let mut cursor = find.session(&mut *session).await?; let mut stream = cursor.stream(session); stream.next().await.transpose() } else { - let mut cursor = self.coll.find(self.filter).with_options(options).await?; + let mut cursor = find.await?; cursor.next().await.transpose() } } diff --git a/src/gridfs.rs b/src/gridfs.rs index 40a4191a7..6738a4985 100644 --- a/src/gridfs.rs +++ b/src/gridfs.rs @@ -246,8 +246,7 @@ impl GridFsBucket { ) -> Result> { let find_options = options .into() - .map(FindOneOptions::from) - .map(FindOptions::from); + .map(FindOneOptions::from); self.files() .find_one(filter) .with_options(find_options) From 2e512adc5d6a0a6bd2e1621c194edad22c8ff6bd Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 18 Mar 2024 12:54:02 -0400 Subject: [PATCH 36/39] fix find options --- Cargo.toml | 4 ++-- src/action/find.rs | 8 +++++++- src/gridfs.rs | 4 +--- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 231a9bfcf..e268e5c14 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -68,7 +68,7 @@ in-use-encryption-unstable = ["mongocrypt", "rayon", "num_cpus"] # TODO: pending https://github.com/tokio-rs/tracing/issues/2036 stop depending directly on log. tracing-unstable = ["tracing", "log"] -[patch.crates-io] +[patch."https://github.com/mongodb/bson-rust"] bson = { git = "https://github.com/abr-egn/bson-rust", branch = "RUST-1512/append_bytes" } [dependencies] @@ -76,7 +76,7 @@ action_macro = { path = "action_macro" } async-trait = "0.1.42" base64 = "0.13.0" bitflags = "1.1.0" -bson = { git = "https://github.com/abr-egn/bson-rust", branch = "RUST-1512/append_bytes" } +bson = { git = "https://github.com/mongodb/bson-rust", branch = "main" } chrono = { version = "0.4.7", default-features = false, features = ["clock", "std"] } derivative = "2.1.1" derive_more = "0.99.17" diff --git a/src/action/find.rs b/src/action/find.rs index 5c6fd4c4d..628b43609 100644 --- a/src/action/find.rs +++ b/src/action/find.rs @@ -4,7 +4,7 @@ use bson::{Bson, Document}; use serde::de::DeserializeOwned; use crate::{ - coll::options::{FindOneOptions, FindOptions, Hint}, + coll::options::{CursorType, FindOneOptions, FindOptions, Hint}, collation::Collation, error::Result, operation::Find as Op, @@ -79,14 +79,20 @@ pub struct Find<'a, T: Send + Sync, Session = ImplicitSession> { impl<'a, T: Send + Sync, Session> Find<'a, T, Session> { option_setters!(options: FindOptions; + allow_disk_use: bool, allow_partial_results: bool, + batch_size: u32, comment: String, comment_bson: Bson, + cursor_type: CursorType, hint: Hint, + limit: i64, max: Document, + max_await_time: Duration, max_scan: u64, max_time: Duration, min: Document, + no_cursor_timeout: bool, projection: Document, read_concern: ReadConcern, return_key: bool, diff --git a/src/gridfs.rs b/src/gridfs.rs index 6738a4985..34ecce928 100644 --- a/src/gridfs.rs +++ b/src/gridfs.rs @@ -244,9 +244,7 @@ impl GridFsBucket { filter: Document, options: impl Into>, ) -> Result> { - let find_options = options - .into() - .map(FindOneOptions::from); + let find_options = options.into().map(FindOneOptions::from); self.files() .find_one(filter) .with_options(find_options) From 6eda33f31c667d94d2ddf57aeec366cb5a1717ce Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 18 Mar 2024 12:58:47 -0400 Subject: [PATCH 37/39] last fix --- src/test/spec/unified_runner/operation.rs | 3 +-- src/test/spec/v2_runner/operation.rs | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/test/spec/unified_runner/operation.rs b/src/test/spec/unified_runner/operation.rs index d6b179c89..f8e7164e3 100644 --- a/src/test/spec/unified_runner/operation.rs +++ b/src/test/spec/unified_runner/operation.rs @@ -1070,10 +1070,9 @@ impl TestOperation for FindOne { ) -> BoxFuture<'a, Result>> { async move { let collection = test_runner.get_collection(id).await; - let options: FindOptions = self.options.clone().into(); let result = collection .find_one(self.filter.clone().unwrap_or_default()) - .with_options(options) + .with_options(self.options.clone()) .await?; match result { Some(result) => Ok(Some(Bson::from(result).into())), diff --git a/src/test/spec/v2_runner/operation.rs b/src/test/spec/v2_runner/operation.rs index c0afd77ab..838f6553a 100644 --- a/src/test/spec/v2_runner/operation.rs +++ b/src/test/spec/v2_runner/operation.rs @@ -680,7 +680,7 @@ impl TestOperation for FindOne { async move { let action = collection .find_one(self.filter.clone().unwrap_or_default()) - .with_options(self.options.clone().map(FindOptions::from)); + .with_options(self.options.clone()); let result = match session { Some(session) => action.session(session).await?, None => action.await?, From 5840fb68086eea831618ef71772c66e2b7736887 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 18 Mar 2024 16:12:22 -0400 Subject: [PATCH 38/39] remove patch --- Cargo.toml | 3 --- 1 file changed, 3 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index e268e5c14..87eeccdb5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -68,9 +68,6 @@ in-use-encryption-unstable = ["mongocrypt", "rayon", "num_cpus"] # TODO: pending https://github.com/tokio-rs/tracing/issues/2036 stop depending directly on log. tracing-unstable = ["tracing", "log"] -[patch."https://github.com/mongodb/bson-rust"] -bson = { git = "https://github.com/abr-egn/bson-rust", branch = "RUST-1512/append_bytes" } - [dependencies] action_macro = { path = "action_macro" } async-trait = "0.1.42" From d432400d9e04ed7d7529ff9f0195daa991e58d71 Mon Sep 17 00:00:00 2001 From: Abraham Egnor Date: Mon, 18 Mar 2024 16:49:55 -0400 Subject: [PATCH 39/39] merge fix --- src/test/spec/oidc.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/test/spec/oidc.rs b/src/test/spec/oidc.rs index 09d028b9d..d3ec5f5b8 100644 --- a/src/test/spec/oidc.rs +++ b/src/test/spec/oidc.rs @@ -1,3 +1,5 @@ +use bson::doc; + use crate::{ client::{ auth::{oidc, AuthMechanism, Credential},