-
Notifications
You must be signed in to change notification settings - Fork 0
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Setting up tide-disco bindings #9
Changes from 2 commits
7b9b0c8
212ec65
d1b0cf5
8e9a91e
e8ff75e
4d4bed9
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,58 @@ | ||
// Copyright (c) 2022 Espresso Systems (espressosys.com) | ||
// This file is part of the HotShot Query Service library. | ||
// | ||
// This program is free software: you can redistribute it and/or modify it under the terms of the GNU | ||
// General Public License as published by the Free Software Foundation, either version 3 of the | ||
// License, or (at your option) any later version. | ||
// This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without | ||
// even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
// General Public License for more details. | ||
// You should have received a copy of the GNU General Public License along with this program. If not, | ||
// see <https://www.gnu.org/licenses/>. | ||
|
||
use std::fs; | ||
use std::path::Path; | ||
use tide_disco::api::{Api, ApiError}; | ||
use toml::{map::Entry, Value}; | ||
|
||
pub(crate) fn load_api<State, Error>( | ||
path: Option<impl AsRef<Path>>, | ||
default: &str, | ||
extensions: impl IntoIterator<Item = Value>, | ||
) -> Result<Api<State, Error>, ApiError> { | ||
let mut toml = match path { | ||
Some(path) => load_toml(path.as_ref())?, | ||
None => toml::from_str(default).map_err(|err| ApiError::CannotReadToml { | ||
reason: err.to_string(), | ||
})?, | ||
}; | ||
for extension in extensions { | ||
merge_toml(&mut toml, extension); | ||
} | ||
Api::new(toml) | ||
} | ||
|
||
fn merge_toml(into: &mut Value, from: Value) { | ||
if let (Value::Table(into), Value::Table(from)) = (into, from) { | ||
for (key, value) in from { | ||
match into.entry(key) { | ||
Entry::Occupied(mut entry) => merge_toml(entry.get_mut(), value), | ||
Entry::Vacant(entry) => { | ||
entry.insert(value); | ||
} | ||
} | ||
} | ||
} | ||
} | ||
|
||
fn load_toml(path: &Path) -> Result<Value, ApiError> { | ||
let bytes = fs::read(path).map_err(|err| ApiError::CannotReadToml { | ||
reason: err.to_string(), | ||
})?; | ||
let string = std::str::from_utf8(&bytes).map_err(|err| ApiError::CannotReadToml { | ||
reason: err.to_string(), | ||
})?; | ||
toml::from_str(string).map_err(|err| ApiError::CannotReadToml { | ||
reason: err.to_string(), | ||
}) | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,17 +1,43 @@ | ||
use std::marker::PhantomData; | ||
use std::{hash::Hash, marker::PhantomData}; | ||
|
||
use hotshot_types::traits::node_implementation::NodeType; | ||
use commit::{Commitment, Committable}; | ||
use hotshot_types::traits::{node_implementation::NodeType, BlockPayload}; | ||
use serde::{Deserialize, Serialize}; | ||
use sha2::digest::{generic_array::GenericArray, typenum}; | ||
use sha2::{Digest, Sha256}; | ||
|
||
pub type BlockHash = GenericArray<u8, typenum::consts::U32>; | ||
pub struct HashableBlock<I: NodeType>( | ||
<I as NodeType>::BlockPayload, | ||
<<I as NodeType>::BlockPayload as BlockPayload>::Metadata, | ||
); | ||
pub type BlockHash<I: NodeType> = Commitment<HashableBlock<I>>; | ||
Check failure on line 12 in src/block_metadata.rs GitHub Actions / clippybounds on generic parameters are not enforced in type aliases
Check failure on line 12 in src/block_metadata.rs GitHub Actions / clippybounds on generic parameters are not enforced in type aliases
|
||
impl<I: NodeType> Default for HashableBlock<I> { | ||
fn default() -> Self { | ||
let (bp, bm) = <I as NodeType>::BlockPayload::from_transactions(Vec::new()) | ||
.unwrap_or_else(|_| <I as NodeType>::BlockPayload::genesis()); | ||
Self(bp, bm) | ||
} | ||
} | ||
|
||
impl<I: NodeType> Committable for HashableBlock<I> { | ||
fn commit(&self) -> Commitment<Self> { | ||
let builder = commit::RawCommitmentBuilder::new("Hashable Block Payload"); | ||
let mut hasher = Sha256::new(); | ||
jbearer marked this conversation as resolved.
Show resolved
Hide resolved
|
||
let encoded = if let Ok(encoder) = self.0.encode() { | ||
encoder.collect() | ||
} else { | ||
Vec::new() | ||
}; | ||
hasher.update(&encoded); | ||
let generic_array = hasher.finalize(); | ||
builder.generic_byte_array(&generic_array).finalize() | ||
} | ||
} | ||
|
||
#[derive(Clone, Debug, Default, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)] | ||
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)] | ||
#[serde(bound = "")] | ||
pub struct BlockMetadata<I: NodeType> { | ||
block_hash: BlockHash, | ||
block_hash: BlockHash<I>, | ||
block_size: u64, | ||
offered_fee: u64, | ||
_phantom: PhantomData<I>, | ||
} | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,117 @@ | ||
use std::{fmt::Display, path::PathBuf}; | ||
|
||
use clap::Args; | ||
use derive_more::From; | ||
use futures::FutureExt; | ||
use hotshot_types::{ | ||
data::VidCommitment, | ||
Check failure on line 7 in src/builder.rs GitHub Actions / clippyunused import: `data::VidCommitment`
Check failure on line 7 in src/builder.rs GitHub Actions / clippyunused import: `data::VidCommitment`
|
||
traits::{node_implementation::NodeType, signature_key::SignatureKey}, | ||
}; | ||
use serde::{Deserialize, Serialize}; | ||
use snafu::{OptionExt, ResultExt, Snafu}; | ||
Check failure on line 11 in src/builder.rs GitHub Actions / clippyunused import: `OptionExt`
|
||
use tagged_base64::TaggedBase64; | ||
use tide_disco::{api::ApiError, method::ReadState, Api, RequestError, StatusCode}; | ||
|
||
use crate::{ | ||
api::load_api, | ||
block_metadata::BlockHash, | ||
Check failure on line 17 in src/builder.rs GitHub Actions / clippyunused imports: `block_metadata::BlockHash`, `self`
Check failure on line 17 in src/builder.rs GitHub Actions / clippyunused imports: `block_metadata::BlockHash`, `self`
|
||
data_source::{self, BuilderDataSource}, | ||
}; | ||
|
||
#[derive(Args, Default)] | ||
pub struct Options { | ||
#[arg(long = "builder-api-path", env = "HOTSHOT_BUILDER_API_PATH")] | ||
pub api_path: Option<PathBuf>, | ||
|
||
/// Additional API specification files to merge with `builder-api-path`. | ||
/// | ||
/// These optional files may contain route definitions for application-specific routes that have | ||
/// been added as extensions to the basic builder API. | ||
#[arg( | ||
long = "builder-extension", | ||
env = "HOTSHOT_BUILDER_EXTENSIONS", | ||
value_delimiter = ',' | ||
)] | ||
pub extensions: Vec<toml::Value>, | ||
} | ||
|
||
#[derive(Clone, Debug, Snafu, Deserialize, Serialize)] | ||
#[snafu(visibility(pub))] | ||
pub enum BuildError { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Assuming we will change this to be more specific to the errors the builder can encounter, once we start implementing the traits? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yes. |
||
/// The requested resource does not exist or is not known to this builder service. | ||
NotFound, | ||
/// The requested resource exists but is not currently available. | ||
Missing, | ||
/// There was an error while trying to fetch the requested resource. | ||
#[snafu(display("Failed to fetch requested resource: {message}"))] | ||
Error { message: String }, | ||
} | ||
|
||
#[derive(Clone, Debug, From, Snafu, Deserialize, Serialize)] | ||
#[snafu(visibility(pub))] | ||
pub enum Error { | ||
Request { | ||
source: RequestError, | ||
}, | ||
#[snafu(display("error building block from {resource}: {source}"))] | ||
#[from(ignore)] | ||
BlockAvailable { | ||
source: BuildError, | ||
resource: String, | ||
}, | ||
#[snafu(display("error claiming block {resource}: {source}"))] | ||
#[from(ignore)] | ||
BlockClaim { | ||
source: BuildError, | ||
resource: String, | ||
}, | ||
Custom { | ||
message: String, | ||
status: StatusCode, | ||
}, | ||
} | ||
|
||
pub fn define_api<State, Types: NodeType>(options: &Options) -> Result<Api<State, Error>, ApiError> | ||
where | ||
State: 'static + Send + Sync + ReadState, | ||
<State as ReadState>::State: Send + Sync + BuilderDataSource<Types>, | ||
Types: NodeType, | ||
<<Types as NodeType>::SignatureKey as SignatureKey>::PureAssembledSignatureType: | ||
for<'a> TryFrom<&'a TaggedBase64> + Into<TaggedBase64> + Display, | ||
for<'a> <<<Types as NodeType>::SignatureKey as SignatureKey>::PureAssembledSignatureType as TryFrom< | ||
&'a TaggedBase64, | ||
>>::Error: Display, | ||
{ | ||
let mut api = load_api::<State, Error>( | ||
options.api_path.as_ref(), | ||
include_str!("../api/builder.toml"), | ||
options.extensions.clone(), | ||
)?; | ||
api.with_version("0.0.1".parse().unwrap()) | ||
.get("available_blocks", |req, state| { | ||
async move { | ||
let hash = req.blob_param("parent_hash")?; | ||
state | ||
.get_available_blocks(&hash) | ||
.await | ||
.context(BlockAvailableSnafu { | ||
resource: hash.to_string(), | ||
}) | ||
} | ||
.boxed() | ||
})? | ||
.get("claim_block", |req, state| { | ||
async move { | ||
let hash = req.blob_param("block_hash")?; | ||
let signature = req.blob_param("signature")?; | ||
state | ||
.claim_block(&hash, &signature) | ||
.await | ||
.context(BlockClaimSnafu { | ||
resource: hash.to_string(), | ||
}) | ||
} | ||
.boxed() | ||
})?; | ||
Ok(api) | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,13 +1,33 @@ | ||
use std::sync::Arc; | ||
Check failure on line 1 in src/data_source.rs GitHub Actions / clippyunused import: `std::sync::Arc`
|
||
|
||
use async_trait::async_trait; | ||
use hotshot_types::{data::VidCommitment, traits::{node_implementation::NodeType, signature_key::SignatureKey}}; | ||
use commit::Committable; | ||
Check failure on line 4 in src/data_source.rs GitHub Actions / clippyunused import: `commit::Committable`
|
||
use hotshot_types::{ | ||
data::VidCommitment, | ||
traits::{node_implementation::NodeType, signature_key::SignatureKey, BlockPayload}, | ||
Check failure on line 7 in src/data_source.rs GitHub Actions / clippyunused import: `BlockPayload`
Check failure on line 7 in src/data_source.rs GitHub Actions / clippyunused import: `BlockPayload`
|
||
}; | ||
use tagged_base64::TaggedBase64; | ||
|
||
use crate::block_metadata::{BlockHash, BlockMetadata}; | ||
use crate::{ | ||
block_metadata::{BlockHash, BlockMetadata}, | ||
builder::BuildError, | ||
}; | ||
|
||
#[async_trait] | ||
pub trait BuilderDataSource<I: NodeType> { | ||
async fn get_available_blocks(&self, for_parent: &VidCommitment) -> Vec<BlockMetadata<I>>; | ||
async fn claim_block(&self, block_hash: BlockHash, signature: <<I as NodeType>::SignatureKey as SignatureKey>::PureAssembledSignatureType) -> Arc<Vec<u8>>; | ||
async fn submit_txn(&self, txn: <I as NodeType>::Transaction); | ||
pub trait BuilderDataSource<I> | ||
where | ||
I: NodeType, | ||
<<I as NodeType>::SignatureKey as SignatureKey>::PureAssembledSignatureType: | ||
for<'a> TryFrom<&'a TaggedBase64> + Into<TaggedBase64>, | ||
{ | ||
async fn get_available_blocks( | ||
&self, | ||
for_parent: &VidCommitment, | ||
) -> Result<Vec<BlockMetadata<I>>, BuildError>; | ||
async fn claim_block( | ||
&self, | ||
block_hash: &BlockHash<I>, | ||
signature: &<<I as NodeType>::SignatureKey as SignatureKey>::PureAssembledSignatureType, | ||
) -> Result<I::BlockPayload, BuildError>; | ||
async fn submit_txn(&self, txn: <I as NodeType>::Transaction) -> Result<(), BuildError>; | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,5 @@ | ||
mod block_metadata; | ||
mod data_source; | ||
mod query_data; | ||
|
||
mod api; | ||
pub mod block_metadata; | ||
pub mod builder; | ||
pub mod data_source; | ||
pub mod query_data; |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Why do we need default for this?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The tide-disco type resolver needed it for extracting the parameter from
TaggedBase64
forclaim_block
. I didn't really take the time to dig into why...There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
That's odd. I don't see any
Default
requirement in https://github.com/EspressoSystems/tide-disco/blob/main/src/request.rs. Could you point me to where the requirement is coming from? I feel like this shouldn't be required (although it's not a big deal for now and does not have to block this PR)There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Looking at it again, I think it's because of the way I originally invoked
.blob_param()
, which I changed before the push...... but this should be gone once I update with Artemii's changes to HotShot.