Skip to content

Commit

Permalink
Make clippy happy after Rust v1.74.0
Browse files Browse the repository at this point in the history
  • Loading branch information
adzialocha committed Nov 16, 2023
1 parent d33102e commit efe445c
Show file tree
Hide file tree
Showing 21 changed files with 103 additions and 116 deletions.
31 changes: 11 additions & 20 deletions aquadoggo/src/db/query/filter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -414,12 +414,12 @@ mod tests {
filter.get(0).unwrap().by,
FilterBy::Contains("Panda is the best".into())
);
assert_eq!(filter.get(0).unwrap().exclusive, false,);
assert!(!filter.get(0).unwrap().exclusive,);
assert_eq!(
filter.get(1).unwrap().by,
FilterBy::Contains("Llama".into())
);
assert_eq!(filter.get(1).unwrap().exclusive, true);
assert!(filter.get(1).unwrap().exclusive);
}

#[test]
Expand Down Expand Up @@ -450,7 +450,7 @@ mod tests {

assert_eq!(filter.len(), 1);
assert_eq!(filter.get(0).unwrap().field, field);
assert_eq!(filter.get(0).unwrap().exclusive, false);
assert!(!filter.get(0).unwrap().exclusive);
assert_eq!(
filter.get(0).unwrap().by,
FilterBy::Set(vec![panda, turtle, llama])
Expand Down Expand Up @@ -535,7 +535,7 @@ mod tests {
filter.get(0).unwrap().by,
FilterBy::Element(OperationValue::Boolean(false))
);
assert_eq!(filter.get(0).unwrap().exclusive, false);
assert!(!filter.get(0).unwrap().exclusive);

// Change exclusive filter manually
filter.add_not(
Expand All @@ -549,7 +549,7 @@ mod tests {
filter.get(0).unwrap().by,
FilterBy::Element(OperationValue::Boolean(true))
);
assert_eq!(filter.get(0).unwrap().exclusive, true);
assert!(filter.get(0).unwrap().exclusive);

// Change filter manually again
filter.add(
Expand All @@ -563,45 +563,36 @@ mod tests {
filter.get(0).unwrap().by,
FilterBy::Element(OperationValue::Boolean(false))
);
assert_eq!(filter.get(0).unwrap().exclusive, false);
assert!(!filter.get(0).unwrap().exclusive);
}

#[test]
fn overwrite_bool_fields() {
let mut filter = Filter::new();

// Add boolean filter
filter.add(
&Field::new("is_admin".into()),
&OperationValue::Boolean(true),
);
filter.add(&Field::new("is_admin"), &OperationValue::Boolean(true));

// Overwrite the same filter with other value
filter.add(
&Field::new("is_admin".into()),
&OperationValue::Boolean(false),
);
filter.add(&Field::new("is_admin"), &OperationValue::Boolean(false));

// .. check if it got correctly overwritten
assert_eq!(filter.len(), 1);
assert_eq!(
filter.get(0).unwrap().by,
FilterBy::Element(OperationValue::Boolean(false))
);
assert_eq!(filter.get(0).unwrap().exclusive, false);
assert!(!filter.get(0).unwrap().exclusive);

// Overwrite it again, but with an exclusive filter
filter.add_not(
&Field::new("is_admin".into()),
&OperationValue::Boolean(false),
);
filter.add_not(&Field::new("is_admin"), &OperationValue::Boolean(false));

// .. check if it got correctly overwritten
assert_eq!(filter.len(), 1);
assert_eq!(
filter.get(0).unwrap().by,
FilterBy::Element(OperationValue::Boolean(false))
);
assert_eq!(filter.get(0).unwrap().exclusive, true);
assert!(filter.get(0).unwrap().exclusive);
}
}
2 changes: 1 addition & 1 deletion aquadoggo/src/db/query/test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,6 @@ mod tests {
#[case] value: &[OperationValue],
#[case] expected: (String, FilterBy, bool),
) {
assert_eq!(parse_str(key, &value).expect("Should succeed"), expected);
assert_eq!(parse_str(key, value).expect("Should succeed"), expected);
}
}
10 changes: 5 additions & 5 deletions aquadoggo/src/db/query/validate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -229,39 +229,39 @@ mod tests {
#[case::invalid_meta_field_type(
Select::default(),
Filter::new().meta_fields(&[
("documentId".into(), &["test".into()])
("documentId", &["test".into()])
]),
Order::default(),
"Filter type 'str' for field 'documentId' is not matching schema type 'relation(doggo_schema_0020b177ec1bf26dfb3b7010d473e6d44713b29b765b99c6e60ecbfae742de496543)'"
)]
#[case::invalid_field_type(
Select::default(),
Filter::new().fields(&[
("username".into(), &[2020.into()])
("username", &[2020.into()])
]),
Order::default(),
"Filter type 'int' for field 'username' is not matching schema type 'str'"
)]
#[case::invalid_interval(
Select::default(),
Filter::new().fields(&[
("is_admin_in".into(), &[true.into(), false.into()])
("is_admin_in", &[true.into(), false.into()])
]),
Order::default(),
"Can't apply set filter as field 'is_admin' is of type boolean"
)]
#[case::invalid_search(
Select::default(),
Filter::new().fields(&[
("age_contains".into(), &[22.into()])
("age_contains", &[22.into()])
]),
Order::default(),
"Can't apply search filter as field 'age' is not of type string"
)]
#[case::invalid_set_types(
Select::default(),
Filter::new().fields(&[
("username_in".into(), &["bubu".into(), 2020.into()])
("username_in", &["bubu".into(), 2020.into()])
]),
Order::default(),
"Filter type 'int' for field 'username' is not matching schema type 'str'"
Expand Down
10 changes: 5 additions & 5 deletions aquadoggo/src/db/stores/blob.rs
Original file line number Diff line number Diff line change
Expand Up @@ -410,7 +410,7 @@ mod tests {
fn get_blob(key_pair: KeyPair) {
test_runner(|mut node: TestNode| async move {
let blob_data = "Hello, World!".as_bytes();
let blob_view_id = add_blob(&mut node, &blob_data, 6, "text/plain", &key_pair).await;
let blob_view_id = add_blob(&mut node, blob_data, 6, "text/plain", &key_pair).await;
let document_id: DocumentId = blob_view_id.to_string().parse().unwrap();

// Get blob by document id
Expand Down Expand Up @@ -537,7 +537,7 @@ mod tests {
fn purge_blob(key_pair: KeyPair) {
test_runner(|mut node: TestNode| async move {
let blob_data = "Hello, World!".as_bytes();
let blob_view_id = add_blob(&mut node, &blob_data, 7, "text/plain", &key_pair).await;
let blob_view_id = add_blob(&mut node, blob_data, 7, "text/plain", &key_pair).await;

// There is one blob and two blob pieces in database.
//
Expand Down Expand Up @@ -580,7 +580,7 @@ mod tests {
let _ = populate_and_materialize(&mut node, &config).await;

let blob_data = "Hello, World!".as_bytes();
let blob_view_id = add_blob(&mut node, &blob_data, 7, "text/plain", &key_pair).await;
let blob_view_id = add_blob(&mut node, blob_data, 7, "text/plain", &key_pair).await;

// There is one blob and two blob pieces in database.
//
Expand Down Expand Up @@ -614,7 +614,7 @@ mod tests {
fn does_not_purge_blob_if_still_pinned(key_pair: KeyPair) {
test_runner(|mut node: TestNode| async move {
let blob_data = "Hello, World!".as_bytes();
let blob_view_id = add_blob(&mut node, &blob_data, 7, "text/plain", &key_pair).await;
let blob_view_id = add_blob(&mut node, blob_data, 7, "text/plain", &key_pair).await;

let _ = add_schema_and_documents(
&mut node,
Expand Down Expand Up @@ -658,7 +658,7 @@ mod tests {
fn purge_all_pieces_of_updated_blob(key_pair: KeyPair) {
test_runner(|mut node: TestNode| async move {
let blob_data = "Hello, World!".as_bytes();
let blob_view_id = add_blob(&mut node, &blob_data, 7, "text/plain", &key_pair).await;
let blob_view_id = add_blob(&mut node, blob_data, 7, "text/plain", &key_pair).await;

// Create a new blob piece.
let new_blob_pieces = add_document(
Expand Down
6 changes: 3 additions & 3 deletions aquadoggo/src/db/stores/document.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1323,7 +1323,7 @@ mod tests {
let document = node
.context
.store
.get_document_by_view_id(&document.view_id())
.get_document_by_view_id(document.view_id())
.await
.unwrap();
assert!(document.is_some());
Expand Down Expand Up @@ -1388,7 +1388,7 @@ mod tests {
assert_query(&node, "SELECT name FROM document_view_fields", 22).await;

// Purge one document from the database, we now expect half the rows to be remaining.
let result = node.context.store.purge_document(&document_id).await;
let result = node.context.store.purge_document(document_id).await;
assert!(result.is_ok(), "{:#?}", result);

assert_query(&node, "SELECT entry_hash FROM entries", 1).await;
Expand All @@ -1413,7 +1413,7 @@ mod tests {
let document_id = documents[0].id();
let public_key = config.authors[0].public_key();

let _ = node.context.store.purge_document(&document_id).await;
let _ = node.context.store.purge_document(document_id).await;

let result = next_args(
&node.context.store,
Expand Down
42 changes: 21 additions & 21 deletions aquadoggo/src/db/stores/query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1530,7 +1530,7 @@ mod tests {
.fields()
.expect("Expected document fields")
.get(field)
.expect(&format!("Expected '{field}' field to exist in document"))
.unwrap_or_else(|| panic!("{}", "Expected '{field}' field to exist in document"))
.value()
.to_owned()
}
Expand Down Expand Up @@ -1640,7 +1640,7 @@ mod tests {
),
],
],
&key_pair,
key_pair,
)
.await
}
Expand Down Expand Up @@ -1867,9 +1867,9 @@ mod tests {
}

if view_ids.len() - 1 == index {
assert_eq!(pagination_data.has_next_page, false);
assert!(!pagination_data.has_next_page);
} else {
assert_eq!(pagination_data.has_next_page, true);
assert!(pagination_data.has_next_page);
}

assert_eq!(pagination_data.total_count, Some(view_ids.len() as u64));
Expand All @@ -1890,7 +1890,7 @@ mod tests {

assert_eq!(pagination_data.total_count, Some(view_ids.len() as u64));
assert_eq!(pagination_data.end_cursor, None);
assert_eq!(pagination_data.has_next_page, false);
assert!(!pagination_data.has_next_page);
assert_eq!(documents.len(), 0);
});
}
Expand Down Expand Up @@ -1944,9 +1944,9 @@ mod tests {
}

if view_ids_len - 1 == index {
assert_eq!(pagination_data.has_next_page, false);
assert!(!pagination_data.has_next_page);
} else {
assert_eq!(pagination_data.has_next_page, true);
assert!(pagination_data.has_next_page);
}

assert_eq!(pagination_data.total_count, Some(5));
Expand All @@ -1967,7 +1967,7 @@ mod tests {

assert_eq!(pagination_data.total_count, Some(5));
assert_eq!(pagination_data.end_cursor, None);
assert_eq!(pagination_data.has_next_page, false);
assert!(!pagination_data.has_next_page);
assert_eq!(documents.len(), 0);
});
}
Expand Down Expand Up @@ -2002,7 +2002,7 @@ mod tests {
);

// Select the pinned relation list "venues" of the first visited document
let list = RelationList::new_pinned(&visited_view_ids[0], "venues".into());
let list = RelationList::new_pinned(&visited_view_ids[0], "venues");

let (pagination_data, documents) = node
.context
Expand All @@ -2023,7 +2023,7 @@ mod tests {
);

// Select the pinned relation list "venues" of the second visited document
let list = RelationList::new_pinned(&visited_view_ids[1], "venues".into());
let list = RelationList::new_pinned(&visited_view_ids[1], "venues");

let (pagination_data, documents) = node
.context
Expand Down Expand Up @@ -2093,7 +2093,7 @@ mod tests {
);

// Select the pinned relation list "venues" for the visited document
let list = RelationList::new_pinned(&visited_view_id, "venues".into());
let list = RelationList::new_pinned(&visited_view_id, "venues");

let (_, documents) = node
.context
Expand Down Expand Up @@ -2121,7 +2121,7 @@ mod tests {
);

// Select the pinned relation list "venues" for the visited document
let list = RelationList::new_pinned(&visited_view_id, "venues".into());
let list = RelationList::new_pinned(&visited_view_id, "venues");

let (_, documents) = node
.context
Expand Down Expand Up @@ -2188,7 +2188,7 @@ mod tests {
let mut cursor: Option<PaginationCursor> = None;

// Select the pinned relation list "venues" of the second visited document
let list = RelationList::new_pinned(&visited_view_ids[0], "venues".into());
let list = RelationList::new_pinned(&visited_view_ids[0], "venues");

let mut args = Query::new(
&Pagination::new(
Expand Down Expand Up @@ -2227,9 +2227,9 @@ mod tests {
}

if view_ids_len - 1 == index {
assert_eq!(pagination_data.has_next_page, false);
assert!(!pagination_data.has_next_page);
} else {
assert_eq!(pagination_data.has_next_page, true);
assert!(pagination_data.has_next_page);
}

assert_eq!(pagination_data.total_count, Some(7));
Expand All @@ -2250,7 +2250,7 @@ mod tests {

assert_eq!(pagination_data.total_count, Some(7));
assert_eq!(pagination_data.end_cursor, None);
assert_eq!(pagination_data.has_next_page, false);
assert!(!pagination_data.has_next_page);
assert_eq!(documents.len(), 0);
});
}
Expand Down Expand Up @@ -2326,7 +2326,7 @@ mod tests {
let mut cursor: Option<PaginationCursor> = None;

// Select the pinned relation list "venues" of the first visited document
let list = RelationList::new_pinned(&visited_view_ids[0], "venues".into());
let list = RelationList::new_pinned(&visited_view_ids[0], "venues");

let mut args = Query::new(
&Pagination::new(
Expand Down Expand Up @@ -2368,9 +2368,9 @@ mod tests {

// Check if `has_next_page` flag is correct
if documents_len - 1 == index as u64 {
assert_eq!(pagination_data.has_next_page, false);
assert!(!pagination_data.has_next_page);
} else {
assert_eq!(pagination_data.has_next_page, true);
assert!(pagination_data.has_next_page);
}

// Check if pagination info is correct
Expand All @@ -2396,7 +2396,7 @@ mod tests {
.await
.expect("Query failed");

assert_eq!(pagination_data.has_next_page, false);
assert!(!pagination_data.has_next_page);
assert_eq!(pagination_data.total_count, Some(documents_len));
assert_eq!(pagination_data.end_cursor, None);
assert_eq!(documents.len(), 0);
Expand Down Expand Up @@ -2459,7 +2459,7 @@ mod tests {
);

// Select the pinned relation list "venues" of the first visited document
let list = RelationList::new_pinned(&visited_view_ids[0], "venues".into());
let list = RelationList::new_pinned(&visited_view_ids[0], "venues");

let result = node
.context
Expand Down
2 changes: 1 addition & 1 deletion aquadoggo/src/graphql/input_values/fields_filter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ use crate::graphql::utils::filter_name;
pub fn build_filter_input_object(schema: &Schema) -> InputObject {
// Construct the document fields object which will be named `<schema_id>Filter`
let schema_field_name = filter_name(schema.id());
let mut filter_input = InputObject::new(&schema_field_name);
let mut filter_input = InputObject::new(schema_field_name);

// For every field in the schema we create a type with a resolver
for (name, field_type) in schema.fields().iter() {
Expand Down
Loading

0 comments on commit efe445c

Please sign in to comment.