Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support dynamically specified networks #187

Open
wants to merge 14 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion deployment/config/indexer/cardano_node_docker.mainnet.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,6 @@ sink:
db:
type: postgres
database_url: postgresql://carp:1234@postgres:5432/carp_mainnet
network: mainnet # preview / preprod / testnet
network: mainnet # preview / preprod / testnet / custom

start_block:
2 changes: 1 addition & 1 deletion deployment/config/indexer/oura_docker.mainnet.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,6 @@ sink:
db:
type: postgres
database_url: postgresql://carp:1234@postgres:5432/carp_mainnet
network: mainnet # preview / preprod / testnet
network: mainnet # preview / preprod / testnet / custom

start_block:
2 changes: 1 addition & 1 deletion docs/docs/indexer/run.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ sink:
db:
type: postgres
database_url: postgresql://carp:1234@localhost:5432/carp_mainnet
network: mainnet # preview / preprod / testnet
network: mainnet # preview / preprod / testnet / custom

start_block:
```
Expand Down
10 changes: 6 additions & 4 deletions indexer/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,12 @@ strip = true

[dependencies]
# [core]
dcspark-core = { git = "https://github.com/dcSpark/dcspark-core.git", rev = "572af17e3e22101dee64e0999049a571aea26e0f" }
dcspark-blockchain-source = { git = "https://github.com/dcSpark/dcspark-core.git", rev = "572af17e3e22101dee64e0999049a571aea26e0f" }
multiverse = { git = "https://github.com/dcSpark/dcspark-core.git", rev = "572af17e3e22101dee64e0999049a571aea26e0f" }

dcspark-core = { git = "https://github.com/dcSpark/dcspark-core.git", rev = "98c21b8d6c4f72a1c1813eaec3b9ccf1ff67106e" }
dcspark-blockchain-source = { git = "https://github.com/dcSpark/dcspark-core.git", rev = "98c21b8d6c4f72a1c1813eaec3b9ccf1ff67106e" }
#dcspark-core = { path = "../../dcspark-core/core" }
#dcspark-blockchain-source = { path = "../../dcspark-core/blockchain-source" }
#multiverse = { path = "../../dcspark-core/multiverse" }
multiverse = { git = "https://github.com/dcSpark/dcspark-core.git", rev = "98c21b8d6c4f72a1c1813eaec3b9ccf1ff67106e" }
# [local]
entity = { path = "entity" }
migration = { path = "migration" }
Expand Down
2 changes: 1 addition & 1 deletion indexer/configs/cardano_node.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,6 @@ sink:
db:
type: postgres
database_url: postgresql://carp:1234@localhost:5432/carp_mainnet
network: mainnet # preview / preprod / testnet
network: mainnet # preview / preprod / testnet / custom

start_block:
38 changes: 38 additions & 0 deletions indexer/configs/custom.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
source:
type: cardano_net
relay:
- localhost
- 3001

sink:
type: cardano
db:
type: postgres
database_url: postgresql://carp:1234@localhost:5432/carp_custom5
network: custom # preview / preprod / testnet / custom
genesis_folder: /home/user/Cardano/carp/indexer/genesis/
custom_config:
chain_info:
network_id: 1
protocol_magic: 42
relay:
- "localhost"
- 3001
from:
BlockHeader:
slot_nb: 1
hash: "ba8066f73eb9cf4ad9adf38051c54d3a51d92cb98561cffc1f202b1b97739cd5"
genesis_parent: "0ded594a3411f6d3236228abc1e2ef8c2a21e09d859ea23bfc2182f92853cba8"
genesis:
BlockHeader:
slot_nb: 0
hash: "7a32184d9e0068b0fa75fd0ecaad798f9bc573d4921c519b12968e26ff0747a3"
shelley_era_config:
first_slot: 0
start_epoch: 0
known_time: 1722355346
slot_length: 1
epoch_length_seconds: 500
Copy link
Contributor Author

@SebastienGllmt SebastienGllmt Sep 14, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not convinced about this approach. How would you convert the yaci-devkit to this data type dynamically?

Especially because the genesis config can contain stuff that we actually have to insert into the database (ex: for Shelley, the Shelley config can contain some default pools and registrations for the Shelley era that Carp would have to know about)

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You can do the config file dynamically with the env variable, wouldn't that be enough?


start_block:

2 changes: 1 addition & 1 deletion indexer/configs/default.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,6 @@ sink:
db:
type: postgres
database_url: postgresql://carp:1234@localhost:5432/carp_mainnet
network: mainnet # preview / preprod / testnet
network: mainnet # preview / preprod / testnet / custom

start_block:
2 changes: 1 addition & 1 deletion indexer/configs/oura.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,6 @@ sink:
db:
type: postgres
database_url: postgresql://carp:1234@localhost:5432/carp_mainnet
network: mainnet # preview / preprod / testnet
network: mainnet # preview / preprod / testnet / custom

start_block:
23 changes: 3 additions & 20 deletions indexer/src/genesis.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,36 +13,19 @@ use migration::DbErr;
use tasks::utils::TaskPerfAggregator;
use tasks::{execution_plan::ExecutionPlan, genesis::genesis_executor::process_genesis_block};

const GENESIS_MAINNET: &str = "./genesis/mainnet-byron-genesis.json";
const GENESIS_PREVIEW: &str = "./genesis/preview-byron-genesis.json";
const GENESIS_PREPROD: &str = "./genesis/preprod-byron-genesis.json";
const GENESIS_TESTNET: &str = "./genesis/testnet-byron-genesis.json";

pub async fn process_genesis(
conn: &DatabaseConnection,
network: &str,
genesis_folder: &str,
exec_plan: Arc<ExecutionPlan>,
) -> anyhow::Result<()> {
// https://github.com/txpipe/oura/blob/67b01e8739ed2927ced270e08daea74b03bcc7f7/src/sources/common.rs#L91
let genesis_path = match dbg!(network) {
"mainnet" => GENESIS_MAINNET,
"testnet" => GENESIS_TESTNET,
"preview" => GENESIS_PREVIEW,
"preprod" => GENESIS_PREPROD,
rest => {
return Err(anyhow!(
"{} is invalid. NETWORK must be either mainnet/preview/preprod/testnet",
rest
))
}
};

let task_perf_aggregator = Arc::new(Mutex::new(TaskPerfAggregator::default()));

tracing::info!("Parsing genesis file...");
let mut time_counter = std::time::Instant::now();

let file = fs::File::open(genesis_path).expect("Failed to open genesis file");
let file = fs::File::open(format!("{}/{}-byron-genesis.json", genesis_folder, network))
.expect("Failed to open genesis file");
let genesis_file: Box<GenesisData> = Box::new(
parse_genesis_data(file).map_err(|err| anyhow!("can't parse genesis data: {:?}", err))?,
);
Expand Down
31 changes: 20 additions & 11 deletions indexer/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,13 @@ pub enum DbConfig {
#[serde(tag = "type", rename_all = "snake_case")]
#[serde(deny_unknown_fields)]
pub enum SinkConfig {
Cardano { db: DbConfig, network: String },
Cardano {
db: DbConfig,
network: String,
/// Custom configuration. If not present it will be inferred from the network name
custom_config: Option<dcspark_blockchain_source::cardano::NetworkConfiguration>,
genesis_folder: Option<String>,
},
}

pub enum Network {}
Expand Down Expand Up @@ -154,10 +160,20 @@ async fn main() -> anyhow::Result<()> {
config
};

let (network, mut sink) = match config.sink {
SinkConfig::Cardano { ref network, .. } => (
let (network, base_config, mut sink) = match &config.sink {
SinkConfig::Cardano { network, custom_config, .. } => (
network.clone(),
CardanoSink::new(config.sink, exec_plan)
match custom_config {
Some(custom_config) => custom_config.clone(),
None => match network.as_ref() {
"mainnet" => dcspark_blockchain_source::cardano::NetworkConfiguration::mainnet(),
"preprod" => dcspark_blockchain_source::cardano::NetworkConfiguration::preprod(),
"preview" => dcspark_blockchain_source::cardano::NetworkConfiguration::preview(),
"custom" => panic!("sink.custom_config is mandatory when setting network to custom"),
unknown_network => return Err(anyhow::anyhow!("network {unknown_network} not supported by source")),
}
},
CardanoSink::new(config.sink.clone(), exec_plan)
.await
.context("Can't create cardano sink")?,
),
Expand All @@ -180,13 +196,6 @@ async fn main() -> anyhow::Result<()> {
main_loop(source, sink, start_from, running, processing_finished).await
}
SourceConfig::CardanoNet { relay } => {
let base_config = match network.as_ref() {
"mainnet" => dcspark_blockchain_source::cardano::NetworkConfiguration::mainnet(),
"preprod" => dcspark_blockchain_source::cardano::NetworkConfiguration::preprod(),
"preview" => dcspark_blockchain_source::cardano::NetworkConfiguration::preview(),
_ => return Err(anyhow::anyhow!("network not supported by source")),
};

// try to find a confirmed point.
//
// this way the multiverse can be temporary, which saves setting up the extra db
Expand Down
36 changes: 33 additions & 3 deletions indexer/src/sinks/cardano.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ use crate::perf_aggregator::PerfAggregator;
use crate::sink::Sink;
use crate::types::{MultiEraBlock, StoppableService};
use crate::{genesis, DbConfig, SinkConfig};
use anyhow::anyhow;
use async_trait::async_trait;

use dcspark_blockchain_source::cardano::Point;
Expand All @@ -28,6 +29,7 @@ use tasks::utils::TaskPerfAggregator;
pub struct CardanoSink {
db: DatabaseConnection,
network: String,
genesis_folder: Option<String>,
exec_plan: Arc<ExecutionPlan>,

last_epoch: i128,
Expand All @@ -38,8 +40,13 @@ pub struct CardanoSink {
impl CardanoSink {
#[allow(unreachable_patterns)]
pub async fn new(config: SinkConfig, exec_plan: Arc<ExecutionPlan>) -> anyhow::Result<Self> {
let (db_config, network) = match config {
SinkConfig::Cardano { db, network } => (db, network),
let (db_config, network, genesis_folder) = match config {
SinkConfig::Cardano {
db,
network,
custom_config: _,
genesis_folder,
} => (db, network, genesis_folder),
_ => todo!("Invalid sink config provided"),
};
match db_config {
Expand All @@ -49,6 +56,7 @@ impl CardanoSink {
Ok(Self {
db: conn,
network,
genesis_folder,
exec_plan,
last_epoch: -1,
epoch_start_time: std::time::Instant::now(),
Expand Down Expand Up @@ -118,6 +126,8 @@ impl CardanoSink {
}
}

const KNOWN_GENESIS_FOLDER: &str = "./genesis";

#[async_trait]
impl Sink for CardanoSink {
type From = Point;
Expand All @@ -130,7 +140,27 @@ impl Sink for CardanoSink {
};

if start.is_empty() {
genesis::process_genesis(&self.db, &self.network, self.exec_plan.clone()).await?;
// https://github.com/txpipe/oura/blob/67b01e8739ed2927ced270e08daea74b03bcc7f7/src/sources/common.rs#L91
let genesis_folder: &str = match dbg!(&self.network[..]) {
"mainnet" | "testnet" | "preview" | "preprod" => KNOWN_GENESIS_FOLDER,
"custom" => &self
.genesis_folder
.as_ref()
.expect("genesis_folder should be specified for custom networks")[..],
rest => {
return Err(anyhow!(
"{} is invalid. NETWORK must be either mainnet/preview/preprod/testnet or a 'custom' network",
rest
))
}
};
genesis::process_genesis(
&self.db,
&self.network,
genesis_folder,
self.exec_plan.clone(),
)
.await?;
return self.get_latest_point().await;
}

Expand Down
Loading