Skip to content

Commit

Permalink
fn json rpc: retry on empty events (#19469)
Browse files Browse the repository at this point in the history
## Description 

when query transaction with showEvents set, it's possible that the
transaction effects is available while events are not, as events are
outputs of fn local execution, thus instead of returning empty events
and make whole transaction response in-complete, we should retry, in
that sense as long as fn responds, the response is complete.

## Test plan 

ci

---

## Release notes

Check each box that your changes affect. If none of the boxes relate to
your changes, release notes aren't required.

For each box you select, include information after the relevant heading
that describes the impact of your changes that a user might notice and
any actions they must take to implement updates.

- [ ] Protocol: 
- [ ] Nodes (Validators and Full nodes): 
- [ ] Indexer: 
- [ ] JSON-RPC: 
- [ ] GraphQL: 
- [ ] CLI: 
- [ ] Rust SDK:
- [ ] REST API:
  • Loading branch information
gegaowp authored Sep 25, 2024
1 parent 34c9e3e commit c22271c
Show file tree
Hide file tree
Showing 3 changed files with 72 additions and 25 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions crates/sui-json-rpc/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ edition = "2021"

[dependencies]
arc-swap.workspace = true
backoff.workspace = true
chrono.workspace = true
fastcrypto.workspace = true
jsonrpsee.workspace = true
Expand Down
95 changes: 70 additions & 25 deletions crates/sui-json-rpc/src/read_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,12 @@

use std::collections::HashMap;
use std::sync::Arc;
use std::time::Duration;

use anyhow::anyhow;
use async_trait::async_trait;
use backoff::future::retry;
use backoff::ExponentialBackoff;
use futures::future::join_all;
use indexmap::map::IndexMap;
use itertools::Itertools;
Expand Down Expand Up @@ -316,54 +319,96 @@ impl ReadApi {

if opts.show_events {
trace!("getting events");

let event_digests_list = temp_response
let events_digests_list = temp_response
.values()
.filter_map(|cache_entry| match &cache_entry.effects {
Some(eff) => eff.events_digest().cloned(),
None => None,
})
.collect::<Vec<TransactionEventsDigest>>();
// filter out empty events digest, as they do not have to be read from the DB
let empty_events_digest = TransactionEvents::default().digest();
let events_digests_list = events_digests_list
.into_iter()
.filter(|d| d != &empty_events_digest)
.collect::<Vec<_>>();

// fetch events from the DB
let events = self
.transaction_kv_store
.multi_get_events(&event_digests_list)
let mut events_digest_to_events = if events_digests_list.is_empty() {
HashMap::new()
} else {
// fetch events from the DB with retry, retry each 0.5s for 3s
let backoff = ExponentialBackoff {
max_elapsed_time: Some(Duration::from_secs(3)),
multiplier: 1.0,
..ExponentialBackoff::default()
};
let events = retry(backoff, || async {
match self
.transaction_kv_store
.multi_get_events(&events_digests_list)
.await
{
// Only return Ok when all the queried transaction events are found, otherwise retry
// until timeout, then return Err.
Ok(events) if !events.contains(&None) => Ok(events),
Ok(_) => Err(backoff::Error::transient(Error::UnexpectedError(
"Events not found, transaction execution may be incomplete.".into(),
))),
Err(e) => Err(backoff::Error::permanent(Error::UnexpectedError(format!(
"Failed to call multi_get_events: {e:?}"
)))),
}
})
.await
.map_err(|e| {
Error::UnexpectedError(format!("Failed to call multi_get_events for transactions {digests:?} with event digests {event_digests_list:?}: {e:?}"))
Error::UnexpectedError(format!(
"Retrieving events with retry failed for events digests {events_digests_list:?}: {e:?}"
))
})?
.into_iter();

// construct a hashmap of event digests -> events for fast lookup
let event_digest_to_events = event_digests_list
.into_iter()
.zip(events)
.collect::<HashMap<_, _>>();
// construct a hashmap of events digests -> events for fast lookup
let events_map = events_digests_list
.into_iter()
.zip(events)
.collect::<HashMap<_, _>>();
// Double check that all events are `Some` and their digests match the key
for (events_digest, events) in events_map.iter() {
if let Some(events) = events {
if &events.digest() != events_digest {
return Err(Error::UnexpectedError(format!(
"Events digest {events_digest:?} does not match the key {:?}",
events.digest()
)));
}
} else {
return Err(Error::UnexpectedError(format!(
"Events of digest {events_digest:?} is None, but it should not be"
)));
}
}
events_map
};
events_digest_to_events.insert(empty_events_digest, Some(TransactionEvents::default()));

// fill cache with the events
for (_, cache_entry) in temp_response.iter_mut() {
let transaction_digest = cache_entry.digest;
let event_digest: Option<Option<TransactionEventsDigest>> = cache_entry
.effects
.as_ref()
.map(|e| e.events_digest().cloned());
let event_digest = event_digest.flatten();
if event_digest.is_some() {
// safe to unwrap because `is_some` is checked
let event_digest = event_digest.as_ref().unwrap();
let events= event_digest_to_events
.get(event_digest)
if let Some(events_digest) =
cache_entry.effects.as_ref().and_then(|e| e.events_digest())
{
let events = events_digest_to_events
.get(events_digest)
.cloned()
.unwrap_or_else(|| panic!("Expect event digest {event_digest:?} to be found in cache for transaction {transaction_digest}"))
.unwrap_or_else(|| panic!("Expect event digest {events_digest:?} to be found in cache for transaction {transaction_digest}"))
.map(|events| to_sui_transaction_events(self, cache_entry.digest, events));
match events {
Some(Ok(e)) => cache_entry.events = Some(e),
Some(Err(e)) => cache_entry.errors.push(e.to_string()),
None => {
error!("Failed to fetch events with event digest {event_digest:?} for txn {transaction_digest}");
error!("Failed to fetch events with event digest {events_digest:?} for txn {transaction_digest}");
cache_entry.errors.push(format!(
"Failed to fetch events with event digest {event_digest:?}",
"Failed to fetch events with event digest {events_digest:?}",
))
}
}
Expand Down

0 comments on commit c22271c

Please sign in to comment.