Bump bson to 1.1

bson_11
Thijs Cadier 4 years ago
parent 984fc9ea2b
commit 2123095be0

@ -19,7 +19,7 @@ is-it-maintained-open-issues = { repository = "thijsc/mongo-rust-driver" }
[dependencies] [dependencies]
libc = "^0.2" libc = "^0.2"
log = "^0.4" log = "^0.4"
bson = "^0.12" bson = "^1.1"
serde = "1.0" serde = "1.0"
serde_derive = "1.0" serde_derive = "1.0"

@ -25,7 +25,7 @@ impl Bsonc {
pub fn from_document(document: &bson::Document) -> Result<Bsonc> { pub fn from_document(document: &bson::Document) -> Result<Bsonc> {
let mut buffer = Vec::new(); let mut buffer = Vec::new();
bson::encode_document(&mut buffer, document)?; document.to_writer(&mut buffer)?;
let inner = unsafe { let inner = unsafe {
bindings::bson_new_from_data( bindings::bson_new_from_data(
@ -60,28 +60,7 @@ impl Bsonc {
slice::from_raw_parts(data_ptr, data_len) slice::from_raw_parts(data_ptr, data_len)
}; };
Ok(bson::decode_document(&mut slice)?) Ok(bson::Document::from_reader(&mut slice)?)
}
/// Decode a bson from the C side to a document with lossy UTF-8 decoding
pub fn as_document_utf8_lossy(&self) -> Result<bson::Document> {
assert!(!self.inner.is_null());
// This pointer should not be modified or freed
// See: http://mongoc.org/libbson/current/bson_get_data.html
let data_ptr = unsafe { bindings::bson_get_data(self.inner) };
assert!(!data_ptr.is_null());
let data_len = unsafe {
let bson = *self.inner;
bson.len
} as usize;
let mut slice = unsafe {
slice::from_raw_parts(data_ptr, data_len)
};
Ok(bson::decode_document_utf8_lossy(&mut slice)?)
} }
pub fn as_json(&self) -> String { pub fn as_json(&self) -> String {
@ -123,7 +102,7 @@ impl Drop for Bsonc {
mod tests { mod tests {
#[test] #[test]
fn test_bsonc_from_and_as_document() { fn test_bsonc_from_and_as_document() {
let document = doc! { "key" => "value" }; let document = doc! { "key": "value" };
let bsonc = super::Bsonc::from_document(&document).unwrap(); let bsonc = super::Bsonc::from_document(&document).unwrap();
let decoded = bsonc.as_document().unwrap(); let decoded = bsonc.as_document().unwrap();
@ -134,16 +113,16 @@ mod tests {
fn test_bsonc_from_and_as_document_invalid_utf8() { fn test_bsonc_from_and_as_document_invalid_utf8() {
let bytes = b"\x80\xae".to_vec(); let bytes = b"\x80\xae".to_vec();
let value = unsafe { String::from_utf8_unchecked(bytes) }; let value = unsafe { String::from_utf8_unchecked(bytes) };
let document = doc! { "key" => value }; let document = doc! { "key": value };
let bsonc = super::Bsonc::from_document(&document).unwrap(); let bsonc = super::Bsonc::from_document(&document).unwrap();
let decoded = bsonc.as_document_utf8_lossy().unwrap(); let decoded = bsonc.as_document().unwrap();
assert_eq!(decoded.get_str("key").unwrap(), "<22><>"); assert_eq!(decoded.get_str("key").unwrap(), "<22><>");
} }
#[test] #[test]
fn test_bsonc_as_json() { fn test_bsonc_as_json() {
let document = doc! { "key" => "value" }; let document = doc! { "key": "value" };
let bsonc = super::Bsonc::from_document(&document).unwrap(); let bsonc = super::Bsonc::from_document(&document).unwrap();
assert_eq!("{ \"key\" : \"value\" }".to_owned(), bsonc.as_json()); assert_eq!("{ \"key\" : \"value\" }".to_owned(), bsonc.as_json());
} }

@ -305,7 +305,7 @@ impl<'a> Client<'a> {
}; };
if success == 1 { if success == 1 {
match reply.as_document_utf8_lossy() { match reply.as_document() {
Ok(document) => return Ok(document), Ok(document) => return Ok(document),
Err(error) => return Err(error.into()) Err(error) => return Err(error.into())
} }
@ -347,7 +347,7 @@ impl<'a> Client<'a> {
}; };
if success == 1 { if success == 1 {
match reply.as_document_utf8_lossy() { match reply.as_document() {
Ok(document) => return Ok(document), Ok(document) => return Ok(document),
Err(error) => return Err(error.into()) Err(error) => return Err(error.into())
} }

@ -342,7 +342,7 @@ impl<'a> Collection<'a> {
}; };
if success == 1 { if success == 1 {
match reply.as_document_utf8_lossy() { match reply.as_document() {
Ok(document) => return Ok(document), Ok(document) => return Ok(document),
Err(error) => return Err(error.into()) Err(error) => return Err(error.into())
} }
@ -547,7 +547,7 @@ impl<'a> Collection<'a> {
}; };
if success == 1 { if success == 1 {
match reply.as_document_utf8_lossy() { match reply.as_document() {
Ok(document) => return Ok(document), Ok(document) => return Ok(document),
Err(error) => return Err(error.into()) Err(error) => return Err(error.into())
} }
@ -884,7 +884,7 @@ impl<'a>BulkOperation<'a> {
) )
}; };
let document = match reply.as_document_utf8_lossy() { let document = match reply.as_document() {
Ok(document) => document, Ok(document) => document,
Err(error) => return Err(BulkOperationError{error: error.into(), reply: doc!{}}) Err(error) => return Err(BulkOperationError{error: error.into(), reply: doc!{}})
}; };

@ -205,7 +205,7 @@ impl<'a> Iterator for TailingCursor<'a> {
// Add the last seen id to the query if it's present. // Add the last seen id to the query if it's present.
match self.last_seen_id.take() { match self.last_seen_id.take() {
Some(id) => { Some(id) => {
self.query.insert_bson("_id".to_string(), Bson::Document(doc!{ "$gt" => id })); self.query.insert("_id".to_string(), Bson::Document(doc!{ "$gt": id }));
}, },
None => () None => ()
}; };
@ -339,7 +339,7 @@ fn batch_to_array(doc: Document) -> Result<(Option<DocArray>,Option<CursorId>)>
bson::from_bson(Bson::Document(doc.clone())) bson::from_bson(Bson::Document(doc.clone()))
.map_err(|err| { .map_err(|err| {
error!("cannot read batch from db: {}", err); error!("cannot read batch from db: {}", err);
ValueAccessError(bson::ValueAccessError::NotPresent) ValueAccessError(bson::document::ValueAccessError::NotPresent)
}); });
doc_result.map(|v| { doc_result.map(|v| {

@ -146,7 +146,7 @@ impl<'a> Database<'a> {
}; };
if success == 1 { if success == 1 {
match reply.as_document_utf8_lossy() { match reply.as_document() {
Ok(document) => return Ok(document), Ok(document) => return Ok(document),
Err(error) => return Err(error.into()) Err(error) => return Err(error.into())
} }

@ -3,9 +3,11 @@ use std::fmt;
use std::borrow::Cow; use std::borrow::Cow;
use std::ffi::CStr; use std::ffi::CStr;
use bson::{DecoderError,EncoderError,ValueAccessError,Document};
use std::ffi::NulError; use std::ffi::NulError;
use bson::{de,ser,Document};
use bson::document::ValueAccessError;
use crate::mongoc::bindings; use crate::mongoc::bindings;
/// Wrapper for all errors that can occur in the driver. /// Wrapper for all errors that can occur in the driver.
@ -13,9 +15,9 @@ pub enum MongoError {
/// Error in the underlying C driver. /// Error in the underlying C driver.
Bsonc(BsoncError), Bsonc(BsoncError),
/// Error decoding Bson. /// Error decoding Bson.
Decoder(DecoderError), Decoder(de::Error),
/// Error encoding Bson. /// Error encoding Bson.
Encoder(EncoderError), Encoder(ser::Error),
/// Error accessing a value on a Bson document. /// Error accessing a value on a Bson document.
ValueAccessError(ValueAccessError), ValueAccessError(ValueAccessError),
/// Invalid params error that can be reported by the underlying C driver. /// Invalid params error that can be reported by the underlying C driver.
@ -63,14 +65,14 @@ impl error::Error for MongoError {
} }
} }
impl From<DecoderError> for MongoError { impl From<de::Error> for MongoError {
fn from(error: DecoderError) -> MongoError { fn from(error: de::Error) -> MongoError {
MongoError::Decoder(error) MongoError::Decoder(error)
} }
} }
impl From<EncoderError> for MongoError { impl From<ser::Error> for MongoError {
fn from(error: EncoderError) -> MongoError { fn from(error: ser::Error) -> MongoError {
MongoError::Encoder(error) MongoError::Encoder(error)
} }
} }

@ -6,7 +6,7 @@ use chrono::prelude::*;
use mongo_driver::client::{ClientPool,Uri}; use mongo_driver::client::{ClientPool,Uri};
use bson::{bson,doc}; use bson::{doc,Binary};
use bson::oid::ObjectId; use bson::oid::ObjectId;
use bson::spec::BinarySubtype; use bson::spec::BinarySubtype;
@ -24,16 +24,19 @@ fn test_bson_encode_decode() {
let datetime = Utc.ymd(2014, 7, 8).and_hms(9, 10, 11); let datetime = Utc.ymd(2014, 7, 8).and_hms(9, 10, 11);
let document = doc! { let document = doc! {
"_id" => (ObjectId::new().unwrap()), "_id": ObjectId::new(),
"floating_point" => 10.0, "floating_point": 10.0,
"string" => "a value", "string": "a value",
"array" => [10, 20, 30], "array": [10, 20, 30],
"doc" => {"key" => 1}, "doc": {"key": 1},
"bool" => true, "bool": true,
"i32" => 1i32, "i32": 1i32,
"i64" => 1i64, "i64": 1i64,
"datetime" => datetime, "datetime": datetime,
"binary_generic" => (BinarySubtype::Generic, vec![0, 1, 2, 3, 4]) "binary": (Binary {
subtype: BinarySubtype::Generic,
bytes: vec![0, 1, 2, 3, 4]
})
}; };
assert!(collection.insert(&document, None).is_ok()); assert!(collection.insert(&document, None).is_ok());

@ -3,7 +3,7 @@ extern crate mongo_driver;
use std::env; use std::env;
use bson::{bson,doc}; use bson::doc;
use mongo_driver::client::{ClientPool,Uri}; use mongo_driver::client::{ClientPool,Uri};
#[test] #[test]
@ -33,7 +33,7 @@ fn test_basics() {
let bulk_operation = collection.create_bulk_operation(None); let bulk_operation = collection.create_bulk_operation(None);
let document = doc! {"key_1" => "Value 1"}; let document = doc! {"key_1": "Value 1"};
bulk_operation.insert(&document).expect("Could not insert"); bulk_operation.insert(&document).expect("Could not insert");
assert!(bulk_operation.execute().is_ok()); assert!(bulk_operation.execute().is_ok());
@ -54,7 +54,7 @@ fn test_utf8() {
let bulk_operation = collection.create_bulk_operation(None); let bulk_operation = collection.create_bulk_operation(None);
let document = doc! {"key_1" => "kācaṃ śaknomyattum; nopahinasti mām."}; let document = doc! {"key_1": "kācaṃ śaknomyattum; nopahinasti mām."};
bulk_operation.insert(&document).expect("Could not insert"); bulk_operation.insert(&document).expect("Could not insert");
assert!(bulk_operation.execute().is_ok()); assert!(bulk_operation.execute().is_ok());
@ -82,8 +82,8 @@ fn test_insert_remove_replace_update_extended() {
let bulk_operation = collection.create_bulk_operation(None); let bulk_operation = collection.create_bulk_operation(None);
let document = doc! { let document = doc! {
"key_1" => "Value 1", "key_1": "Value 1",
"key_2" => "Value 2" "key_2": "Value 2"
}; };
for _ in 0..5 { for _ in 0..5 {
bulk_operation.insert(&document).unwrap(); bulk_operation.insert(&document).unwrap();
@ -94,7 +94,7 @@ fn test_insert_remove_replace_update_extended() {
assert_eq!( assert_eq!(
result.ok().unwrap().get("nInserted").unwrap(), result.ok().unwrap().get("nInserted").unwrap(),
&bson::Bson::I32(5) &bson::Bson::Int32(5)
); );
assert_eq!(5, collection.count(&doc!{}, None).unwrap()); assert_eq!(5, collection.count(&doc!{}, None).unwrap());
} }
@ -102,7 +102,7 @@ fn test_insert_remove_replace_update_extended() {
let query = doc!{}; let query = doc!{};
let update_document = doc! { let update_document = doc! {
"$set" => {"key_1" => "Value update"} "$set": {"key_1": "Value update"}
}; };
// Update one // Update one
@ -120,7 +120,7 @@ fn test_insert_remove_replace_update_extended() {
assert_eq!( assert_eq!(
result.ok().unwrap().get("nModified").unwrap(), result.ok().unwrap().get("nModified").unwrap(),
&bson::Bson::I32(1) &bson::Bson::Int32(1)
); );
let first_document = collection.find(&doc!{}, None).unwrap().next().unwrap().unwrap(); let first_document = collection.find(&doc!{}, None).unwrap().next().unwrap().unwrap();
@ -147,7 +147,7 @@ fn test_insert_remove_replace_update_extended() {
assert_eq!( assert_eq!(
result.ok().unwrap().get("nModified").unwrap(), result.ok().unwrap().get("nModified").unwrap(),
&bson::Bson::I32(4) &bson::Bson::Int32(4)
); );
collection.find(&doc!{}, None).unwrap().next().unwrap().unwrap(); collection.find(&doc!{}, None).unwrap().next().unwrap().unwrap();
@ -162,7 +162,7 @@ fn test_insert_remove_replace_update_extended() {
// Replace one // Replace one
{ {
let replace_document = doc! { "key_1" => "Value replace" }; let replace_document = doc! { "key_1": "Value replace" };
let bulk_operation = collection.create_bulk_operation(None); let bulk_operation = collection.create_bulk_operation(None);
bulk_operation.replace_one( bulk_operation.replace_one(
@ -176,7 +176,7 @@ fn test_insert_remove_replace_update_extended() {
assert_eq!( assert_eq!(
result.ok().unwrap().get("nModified").unwrap(), result.ok().unwrap().get("nModified").unwrap(),
&bson::Bson::I32(1) &bson::Bson::Int32(1)
); );
let first_document = collection.find(&doc!{}, None).unwrap().next().unwrap().unwrap(); let first_document = collection.find(&doc!{}, None).unwrap().next().unwrap().unwrap();
@ -198,7 +198,7 @@ fn test_insert_remove_replace_update_extended() {
assert_eq!( assert_eq!(
result.ok().unwrap().get("nRemoved").unwrap(), result.ok().unwrap().get("nRemoved").unwrap(),
&bson::Bson::I32(1) &bson::Bson::Int32(1)
); );
assert_eq!(4, collection.count(&query, None).unwrap()); assert_eq!(4, collection.count(&query, None).unwrap());
} }
@ -213,7 +213,7 @@ fn test_insert_remove_replace_update_extended() {
assert_eq!( assert_eq!(
result.ok().unwrap().get("nRemoved").unwrap(), result.ok().unwrap().get("nRemoved").unwrap(),
&bson::Bson::I32(4) &bson::Bson::Int32(4)
); );
assert_eq!(0, collection.count(&query, None).unwrap()); assert_eq!(0, collection.count(&query, None).unwrap());
} }

@ -6,7 +6,7 @@ use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use std::thread; use std::thread;
use bson::{bson,doc}; use bson::doc;
use mongo_driver::client::{ClientPool,SslOptions,Uri}; use mongo_driver::client::{ClientPool,SslOptions,Uri};
#[test] #[test]
@ -95,8 +95,8 @@ fn test_read_command_with_opts() {
let collection = client.get_collection(db_name, coll_name); let collection = client.get_collection(db_name, coll_name);
let document = doc! { let document = doc! {
"key_1" => "Value 1", "key_1": "Value 1",
"key_2" => "kācaṃ śaknomyattum; nopahinasti mām. \u{0}" "key_2": "kācaṃ śaknomyattum; nopahinasti mām. \u{0}"
}; };
collection.insert(&document, None).expect("Could not insert document"); collection.insert(&document, None).expect("Could not insert document");
@ -161,6 +161,6 @@ fn test_ssl_connection_success() {
let client = pool.pop(); let client = pool.pop();
let database = client.get_database("admin"); let database = client.get_database("admin");
let result = database.command_simple(doc!{"ping" => 1}, None).unwrap(); let result = database.command_simple(doc!{"ping": 1}, None).unwrap();
assert!(result.contains_key("ok")); assert!(result.contains_key("ok"));
} }

@ -1,7 +1,7 @@
extern crate bson; extern crate bson;
extern crate mongo_driver; extern crate mongo_driver;
use bson::{bson,doc}; use bson::doc;
use mongo_driver::CommandAndFindOptions; use mongo_driver::CommandAndFindOptions;
use mongo_driver::collection::{CountOptions,FindAndModifyOperation}; use mongo_driver::collection::{CountOptions,FindAndModifyOperation};
@ -17,15 +17,15 @@ fn test_aggregate() {
collection.drop().unwrap_or(()); collection.drop().unwrap_or(());
for _ in 0..5 { for _ in 0..5 {
assert!(collection.insert(&doc!{"key" => 1}, None).is_ok()); assert!(collection.insert(&doc!{"key": 1}, None).is_ok());
} }
let pipeline = doc!{ let pipeline = doc!{
"pipeline" => [ "pipeline": [
{ {
"$group" => { "$group": {
"_id" => "$key", "_id": "$key",
"total" => {"$sum" => "$key"} "total": {"$sum": "$key"}
} }
} }
] ]
@ -44,7 +44,7 @@ fn test_command() {
let client = pool.pop(); let client = pool.pop();
let collection = client.get_collection("rust_driver_test", "items"); let collection = client.get_collection("rust_driver_test", "items");
let command = doc! { "ping" => 1 }; let command = doc! { "ping": 1 };
let result = collection.command(command, None).unwrap().next().unwrap().unwrap(); let result = collection.command(command, None).unwrap().next().unwrap().unwrap();
assert!(result.contains_key("ok")); assert!(result.contains_key("ok"));
@ -57,7 +57,7 @@ fn test_command_simple() {
let client = pool.pop(); let client = pool.pop();
let collection = client.get_collection("rust_driver_test", "items"); let collection = client.get_collection("rust_driver_test", "items");
let command = doc! { "ping" => 1 }; let command = doc! { "ping": 1 };
let result = collection.command_simple(command, None).unwrap(); let result = collection.command_simple(command, None).unwrap();
assert!(result.contains_key("ok")); assert!(result.contains_key("ok"));
@ -75,8 +75,8 @@ fn test_mutation_and_finding() {
assert_eq!("items", collection.get_name().to_mut()); assert_eq!("items", collection.get_name().to_mut());
let document = doc! { let document = doc! {
"key_1" => "Value 1", "key_1": "Value 1",
"key_2" => "kācaṃ śaknomyattum; nopahinasti mām. \u{0}" "key_2": "kācaṃ śaknomyattum; nopahinasti mām. \u{0}"
}; };
collection.insert(&document, None).expect("Could not insert document"); collection.insert(&document, None).expect("Could not insert document");
{ {
@ -92,7 +92,7 @@ fn test_mutation_and_finding() {
} }
let second_document = doc! { let second_document = doc! {
"key_1" => "Value 3" "key_1": "Value 3"
}; };
assert!(collection.insert(&second_document, None).is_ok()); assert!(collection.insert(&second_document, None).is_ok());
@ -118,12 +118,12 @@ fn test_mutation_and_finding() {
); );
// Update the second document // Update the second document
let update = doc!{"$set" => {"key_1" => "Value 4"}}; let update = doc!{"$set": {"key_1": "Value 4"}};
assert!(collection.update(&second_document, &update, None).is_ok()); assert!(collection.update(&second_document, &update, None).is_ok());
// Reload and check value // Reload and check value
let query_after_update = doc! { let query_after_update = doc! {
"key_1" => "Value 4" "key_1": "Value 4"
}; };
let mut found_document = collection.find(&query_after_update, None).unwrap().next().unwrap().unwrap(); let mut found_document = collection.find(&query_after_update, None).unwrap().next().unwrap().unwrap();
assert_eq!( assert_eq!(
@ -163,7 +163,7 @@ fn test_mutation_and_finding() {
skip: 0, skip: 0,
limit: 0, limit: 0,
batch_size: 0, batch_size: 0,
fields: Some(doc! { "key_1" => true }), fields: Some(doc! { "key_1": true }),
read_prefs: None read_prefs: None
}; };
@ -194,10 +194,10 @@ fn test_find_and_modify() {
// Upsert something, it should now exist // Upsert something, it should now exist
let query = doc! { let query = doc! {
"key_1" => "Value 1" "key_1": "Value 1"
}; };
let update = doc! { let update = doc! {
"$set" => {"content" => 1i32} "$set": {"content": 1i32}
}; };
let result = collection.find_and_modify( let result = collection.find_and_modify(
&query, &query,
@ -211,7 +211,7 @@ fn test_find_and_modify() {
// Update this record // Update this record
let update2 = doc! { let update2 = doc! {
"$set" => {"content" => 2i32} "$set": {"content": 2i32}
}; };
let result = collection.find_and_modify( let result = collection.find_and_modify(
&query, &query,

@ -5,7 +5,7 @@ use std::sync::Arc;
use std::thread; use std::thread;
use std::time::Duration; use std::time::Duration;
use bson::{bson,doc}; use bson::doc;
use mongo_driver::client::{ClientPool,Uri}; use mongo_driver::client::{ClientPool,Uri};
use mongo_driver::Result; use mongo_driver::Result;
@ -17,7 +17,7 @@ fn test_cursor() {
let client = pool.pop(); let client = pool.pop();
let mut collection = client.get_collection("rust_driver_test", "cursor_items"); let mut collection = client.get_collection("rust_driver_test", "cursor_items");
let document = doc! { "key" => "value" }; let document = doc! { "key": "value" };
collection.drop().unwrap_or(()); collection.drop().unwrap_or(());
for _ in 0..10 { for _ in 0..10 {
@ -45,8 +45,8 @@ fn test_tailing_cursor() {
database.get_collection("not_capped").drop().unwrap_or(()); database.get_collection("not_capped").drop().unwrap_or(());
let options = doc! { let options = doc! {
"capped" => true, "capped": true,
"size" => 100000 "size": 100000
}; };
let capped_collection = database.create_collection("capped", Some(&options)).unwrap(); let capped_collection = database.create_collection("capped", Some(&options)).unwrap();
let normal_collection = database.create_collection("not_capped", None).unwrap(); let normal_collection = database.create_collection("not_capped", None).unwrap();
@ -56,7 +56,7 @@ fn test_tailing_cursor() {
let failing_result = failing_cursor.into_iter().next().expect("Nothing in iterator"); let failing_result = failing_cursor.into_iter().next().expect("Nothing in iterator");
assert!(failing_result.is_err()); assert!(failing_result.is_err());
let document = doc! { "key_1" => "Value 1" }; let document = doc! { "key_1": "Value 1" };
// Insert a first document into the collection // Insert a first document into the collection
capped_collection.insert(&document, None).unwrap(); capped_collection.insert(&document, None).unwrap();
@ -120,7 +120,7 @@ fn test_batch_cursor() {
assert_eq!( assert_eq!(
result.ok().unwrap().get("nInserted").unwrap(), // why is this an i32? result.ok().unwrap().get("nInserted").unwrap(), // why is this an i32?
&bson::Bson::I32(NUM_TO_TEST) &bson::Bson::Int32(NUM_TO_TEST)
); );
assert_eq!(NUM_TO_TEST as i64, collection.count(&doc!{}, None).unwrap()); assert_eq!(NUM_TO_TEST as i64, collection.count(&doc!{}, None).unwrap());
} }

@ -1,7 +1,7 @@
extern crate bson; extern crate bson;
extern crate mongo_driver; extern crate mongo_driver;
use bson::{bson,doc}; use bson::doc;
use mongo_driver::client::{ClientPool,Uri}; use mongo_driver::client::{ClientPool,Uri};
@ -13,7 +13,7 @@ fn test_command() {
let client = pool.pop(); let client = pool.pop();
let database = client.get_database("rust_test"); let database = client.get_database("rust_test");
let command = doc! { "ping" => 1 }; let command = doc! { "ping": 1 };
let result = database.command(command, None).unwrap().next().unwrap().unwrap(); let result = database.command(command, None).unwrap().next().unwrap().unwrap();
assert!(result.contains_key("ok")); assert!(result.contains_key("ok"));
@ -26,7 +26,7 @@ fn test_command_simple() {
let client = pool.pop(); let client = pool.pop();
let database = client.get_database("rust_test"); let database = client.get_database("rust_test");
let command = doc! { "ping" => 1 }; let command = doc! { "ping": 1 };
let result = database.command_simple(command, None).unwrap(); let result = database.command_simple(command, None).unwrap();
assert!(result.contains_key("ok")); assert!(result.contains_key("ok"));

Loading…
Cancel
Save