Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
476a692
convert find
abr-egn Mar 1, 2024
e98a589
update find callers
abr-egn Mar 1, 2024
e525206
wip find_one conversion
abr-egn Mar 1, 2024
b6fef3c
allow where clause
abr-egn Mar 1, 2024
cade439
more find_one wip
abr-egn Mar 1, 2024
6254506
wip proc_macro
abr-egn Mar 1, 2024
acf434f
proc_macro progress
abr-egn Mar 1, 2024
5f77e83
full proc macro
abr-egn Mar 2, 2024
ae05bdb
swap to new macro impl
abr-egn Mar 2, 2024
10d39c1
update find_one callers
abr-egn Mar 4, 2024
eacd2b4
convert find_one_and_delete
abr-egn Mar 4, 2024
5fb898a
generify FindAndModify action impl
abr-egn Mar 4, 2024
53bacb5
add sync
abr-egn Mar 4, 2024
599f8a5
update find_one_and_delete callers
abr-egn Mar 4, 2024
a618615
eureka
abr-egn Mar 4, 2024
243459d
update find_one_and_update callers
abr-egn Mar 5, 2024
5e305c6
update find_one_and_replace callers
abr-egn Mar 5, 2024
43bfd0b
insert wip: move conversion out of insert op
abr-egn Mar 7, 2024
657b11e
insert wip: inefficient execute
abr-egn Mar 7, 2024
ba3d7e6
less inefficient
abr-egn Mar 7, 2024
6b289cd
update insert_many callers
abr-egn Mar 7, 2024
e87ccbf
convert insert_one
abr-egn Mar 7, 2024
c7dd86e
convert insert_one callers
abr-egn Mar 7, 2024
0a526df
post-merge fixes
abr-egn Mar 11, 2024
602ae3a
convert replace_one
abr-egn Mar 11, 2024
1c50076
tweak action
abr-egn Mar 11, 2024
5881f8f
tweak documentation
abr-egn Mar 11, 2024
9b280d1
fix doctests
abr-egn Mar 11, 2024
bdd3a91
simplify
abr-egn Mar 11, 2024
d5430d8
pre-review tidy
abr-egn Mar 11, 2024
6e5abbd
fix find
abr-egn Mar 11, 2024
5047b15
minor fixes
abr-egn Mar 11, 2024
04e12be
split findone from find
abr-egn Mar 18, 2024
aa3f703
avoid clone
abr-egn Mar 18, 2024
e173583
fixes
abr-egn Mar 18, 2024
2e512ad
fix find options
abr-egn Mar 18, 2024
6eda33f
last fix
abr-egn Mar 18, 2024
5840fb6
remove patch
abr-egn Mar 18, 2024
d432400
merge fix
abr-egn Mar 18, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ in-use-encryption-unstable = ["mongocrypt", "rayon", "num_cpus"]
tracing-unstable = ["tracing", "log"]

[dependencies]
action_macro = { path = "action_macro" }
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Proc macros have to be in different crates than where they're used.

async-trait = "0.1.42"
base64 = "0.13.0"
bitflags = "1.1.0"
Expand Down
1 change: 1 addition & 0 deletions action_macro/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
/target/
15 changes: 15 additions & 0 deletions action_macro/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
[package]
name = "action_macro"
version = "0.1.0"
edition = "2021"
license = "Apache-2.0"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
proc-macro2 = "1.0.78"
quote = "1.0.35"
syn = { version = "2.0.52", features = ["full", "parsing", "proc-macro"] }

[lib]
proc-macro = true
220 changes: 220 additions & 0 deletions action_macro/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,220 @@
extern crate proc_macro;

use quote::quote;
use syn::{
braced,
parenthesized,
parse::{Parse, ParseStream},
parse_macro_input,
parse_quote,
parse_quote_spanned,
spanned::Spanned,
Block,
Error,
Generics,
Ident,
Lifetime,
Token,
Type,
};

/// Generates:
/// * an `IntoFuture` executing the given method body
/// * an opaque wrapper type for the future in case we want to do something more fancy than
/// BoxFuture.
/// * a `run` method for sync execution, optionally with a wrapper function
#[proc_macro]
pub fn action_impl(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let ActionImpl {
generics,
lifetime,
action,
future_name,
exec_self_mut,
exec_output,
exec_body,
sync_wrap,
} = parse_macro_input!(input as ActionImpl);

let mut unbounded_generics = generics.clone();
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is the part that caused the conversion. With declarative macros, there's no way to say "match on a standard generics clause", let alone "give me a copy of that with the constraints stripped". Both of those are trivial for a proc macro.

This was needed because the remaining actions had various constraints on the generics that previous ones didn't (Serialize, etc.) and attempting to handle that in declarative mode rapidly got very ugly.

As a side benefit, this also removed the need for the various nested macro hacks to deal with optional parameters.

for lt in unbounded_generics.lifetimes_mut() {
lt.bounds.clear();
}
for ty in unbounded_generics.type_params_mut() {
ty.bounds.clear();
}

let SyncWrap {
sync_arg_mut,
sync_arg,
sync_output,
sync_body,
} = sync_wrap.unwrap_or_else(|| {
parse_quote! { fn sync_wrap(out) -> #exec_output { out } }
});

quote! {
impl #generics crate::action::private::Sealed for #action { }

impl #generics crate::action::Action for #action { }

impl #generics std::future::IntoFuture for #action {
type Output = #exec_output;
type IntoFuture = #future_name #unbounded_generics;

fn into_future(#exec_self_mut self) -> Self::IntoFuture {
#future_name (Box::pin(async move {
#exec_body
}))
}
}

pub struct #future_name #generics (crate::BoxFuture<#lifetime, #exec_output>);

impl #generics std::future::Future for #future_name #unbounded_generics {
type Output = #exec_output;

fn poll(mut self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll<Self::Output> {
self.0.as_mut().poll(cx)
}
}

#[cfg(feature = "sync")]
impl #generics #action {
/// Synchronously execute this action.
pub fn run(self) -> #sync_output {
let #sync_arg_mut #sync_arg = crate::sync::TOKIO_RUNTIME.block_on(std::future::IntoFuture::into_future(self));
#sync_body
}
}
}.into()
}

// impl<generics> Action for ActionType {
// type Future = FutureName;
// async fn execute([mut] self) -> OutType { <exec body> }
// [SyncWrap]
// }
struct ActionImpl {
generics: Generics,
lifetime: Lifetime,
action: Type,
future_name: Ident,
exec_self_mut: Option<Token![mut]>,
exec_output: Type,
exec_body: Block,
sync_wrap: Option<SyncWrap>,
}

impl Parse for ActionImpl {
fn parse(input: ParseStream) -> syn::Result<Self> {
// impl<generics> Action for ActionType
input.parse::<Token![impl]>()?;
let generics: Generics = input.parse()?;
let mut lifetime = None;
for lt in generics.lifetimes() {
if lifetime.is_some() {
return Err(input.error("only one lifetime argument permitted"));
}
lifetime = Some(lt);
}
let lifetime = match lifetime {
Some(lt) => lt.lifetime.clone(),
None => parse_quote_spanned! { generics.span() => 'static },
};
parse_name(input, "Action")?;
input.parse::<Token![for]>()?;
let action = input.parse()?;

let impl_body;
braced!(impl_body in input);

// type Future = FutureName;
impl_body.parse::<Token![type]>()?;
parse_name(&impl_body, "Future")?;
impl_body.parse::<Token![=]>()?;
let future_name = impl_body.parse()?;
impl_body.parse::<Token![;]>()?;

// async fn execute([mut] self) -> OutType { <exec body> }
impl_body.parse::<Token![async]>()?;
impl_body.parse::<Token![fn]>()?;
parse_name(&impl_body, "execute")?;
let exec_args;
parenthesized!(exec_args in impl_body);
let exec_self_mut = exec_args.parse()?;
exec_args.parse::<Token![self]>()?;
if !exec_args.is_empty() {
return Err(exec_args.error("unexpected token"));
}
impl_body.parse::<Token![->]>()?;
let exec_output = impl_body.parse()?;
let exec_body = impl_body.parse()?;

// Optional SyncWrap.
let sync_wrap = if impl_body.peek(Token![fn]) {
Some(impl_body.parse()?)
} else {
None
};

if !impl_body.is_empty() {
return Err(exec_args.error("unexpected token"));
}

Ok(ActionImpl {
generics,
lifetime,
action,
future_name,
exec_self_mut,
exec_output,
exec_body,
sync_wrap,
})
}
}

// fn sync_wrap([mut] out) -> OutType { <out body> }
struct SyncWrap {
sync_arg_mut: Option<Token![mut]>,
sync_arg: Ident,
sync_output: Type,
sync_body: Block,
}

impl Parse for SyncWrap {
fn parse(input: ParseStream) -> syn::Result<Self> {
input.parse::<Token![fn]>()?;
parse_name(input, "sync_wrap")?;
let args_input;
parenthesized!(args_input in input);
let sync_arg_mut = args_input.parse()?;
let sync_arg = args_input.parse()?;
if !args_input.is_empty() {
return Err(args_input.error("unexpected token"));
}
input.parse::<Token![->]>()?;
let sync_output = input.parse()?;
let sync_body = input.parse()?;

Ok(SyncWrap {
sync_arg_mut,
sync_arg,
sync_output,
sync_body,
})
}
}

/// Parse an identifier with a specific expected value.
fn parse_name(input: ParseStream, name: &str) -> syn::Result<()> {
let ident = input.parse::<Ident>()?;
if ident.to_string() != name {
return Err(Error::new(
ident.span(),
format!("expected '{}', got '{}'", name, ident),
));
}
Ok(())
}
2 changes: 1 addition & 1 deletion manual/src/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ e.g.
# let client = Client::with_uri_str("mongodb://example.com").await?;
let collection = client.database("foo").collection("bar");
let handle = tokio::task::spawn(async move {
collection.insert_one(doc! { "x": 1 }, None).await
collection.insert_one(doc! { "x": 1 }).await
});

tokio::time::timeout(Duration::from_secs(5), handle).await???;
Expand Down
30 changes: 14 additions & 16 deletions manual/src/encryption.md
Original file line number Diff line number Diff line change
Expand Up @@ -187,16 +187,16 @@ async fn main() -> Result<()> {
// Clear old data.
coll.drop().await?;

coll.insert_one(doc! { "encryptedField": "123456789" }, None)
coll.insert_one(doc! { "encryptedField": "123456789" })
.await?;
println!("Decrypted document: {:?}", coll.find_one(None, None).await?);
println!("Decrypted document: {:?}", coll.find_one(doc! {}).await?);
let unencrypted_coll = Client::with_uri_str(URI)
.await?
.database(&encrypted_namespace.db)
.collection::<Document>(&encrypted_namespace.coll);
println!(
"Encrypted document: {:?}",
unencrypted_coll.find_one(None, None).await?
unencrypted_coll.find_one(doc! {}).await?
);

Ok(())
Expand Down Expand Up @@ -294,19 +294,19 @@ async fn main() -> Result<()> {
.validator(doc! { "$jsonSchema": schema })
.await?;

coll.insert_one(doc! { "encryptedField": "123456789" }, None)
coll.insert_one(doc! { "encryptedField": "123456789" })
.await?;
println!("Decrypted document: {:?}", coll.find_one(None, None).await?);
println!("Decrypted document: {:?}", coll.find_one(doc! {}).await?);
let unencrypted_coll = Client::with_uri_str(URI)
.await?
.database(&encrypted_namespace.db)
.collection::<Document>(&encrypted_namespace.coll);
println!(
"Encrypted document: {:?}",
unencrypted_coll.find_one(None, None).await?
unencrypted_coll.find_one(doc! {}).await?
);
// This would return a Write error with the message "Document failed validation".
// unencrypted_coll.insert_one(doc! { "encryptedField": "123456789" }, None)
// unencrypted_coll.insert_one(doc! { "encryptedField": "123456789" })
// .await?;

Ok(())
Expand Down Expand Up @@ -407,11 +407,10 @@ async fn main() -> Result<()> {
db.create_collection("encryptedCollection").await?;
coll.insert_one(
doc! { "_id": 1, "firstName": "Jane", "lastName": "Doe" },
None,
)
.await?;
let docs: Vec<_> = coll
.find(doc! {"firstName": "Jane"}, None)
.find(doc! {"firstName": "Jane"})
.await?
.try_collect()
.await?;
Expand Down Expand Up @@ -540,7 +539,6 @@ async fn main() -> Result<()> {
"encryptedIndexed": insert_payload_indexed,
"encryptedUnindexed": insert_payload_unindexed,
},
None,
)
.await?;

Expand All @@ -556,7 +554,7 @@ async fn main() -> Result<()> {
// Find the document we inserted using the encrypted payload.
// The returned document is automatically decrypted.
let doc = coll
.find_one(doc! { "encryptedIndexed": find_payload }, None)
.find_one(doc! { "encryptedIndexed": find_payload })
.await?;
println!("Returned document: {:?}", doc);

Expand Down Expand Up @@ -634,9 +632,9 @@ async fn main() -> Result<()> {
Algorithm::AeadAes256CbcHmacSha512Deterministic,
)
.await?;
coll.insert_one(doc! { "encryptedField": encrypted_field }, None)
coll.insert_one(doc! { "encryptedField": encrypted_field })
.await?;
let mut doc = coll.find_one(None, None).await?.unwrap();
let mut doc = coll.find_one(doc! {}).await?.unwrap();
println!("Encrypted document: {:?}", doc);

// Explicitly decrypt the field:
Expand Down Expand Up @@ -735,18 +733,18 @@ async fn main() -> Result<()> {
Algorithm::AeadAes256CbcHmacSha512Deterministic,
)
.await?;
coll.insert_one(doc! { "encryptedField": encrypted_field }, None)
coll.insert_one(doc! { "encryptedField": encrypted_field })
.await?;
// Automatically decrypts any encrypted fields.
let doc = coll.find_one(None, None).await?.unwrap();
let doc = coll.find_one(doc! {}).await?.unwrap();
println!("Decrypted document: {:?}", doc);
let unencrypted_coll = Client::with_uri_str(URI)
.await?
.database("test")
.collection::<Document>("coll");
println!(
"Encrypted document: {:?}",
unencrypted_coll.find_one(None, None).await?
unencrypted_coll.find_one(doc! {}).await?
);

Ok(())
Expand Down
9 changes: 5 additions & 4 deletions manual/src/reading.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ let coll = client.database("items").collection::<Item>("in_stock");

for i in 0..5 {
// Perform operations that work with directly our model.
coll.insert_one(Item { id: i }, None).await;
coll.insert_one(Item { id: i }).await;
}
#
# Ok(())
Expand Down Expand Up @@ -89,9 +89,10 @@ use futures::stream::TryStreamExt;
use mongodb::{bson::doc, options::FindOptions};

// Query the books in the collection with a filter and an option.
let filter = doc! { "author": "George Orwell" };
let find_options = FindOptions::builder().sort(doc! { "title": 1 }).build();
let mut cursor = typed_collection.find(filter, find_options).await?;
let mut cursor = typed_collection
.find(doc! { "author": "George Orwell" })
.sort(doc! { "title": 1 })
.await?;

// Iterate over the results of the cursor.
while let Some(book) = cursor.try_next().await? {
Expand Down
Loading