Skip to content

Commit

Permalink
cargo fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
sslivkoff committed Jul 26, 2023
1 parent 560e390 commit 323cc76
Show file tree
Hide file tree
Showing 9 changed files with 52 additions and 27 deletions.
2 changes: 1 addition & 1 deletion crates/cli/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ mod run;
mod summaries;

// used in main.rs but not lib.rs
use tokio as _;
use eyre as _;
use tokio as _;

pub use args::Args;
pub use parse::parse_opts;
Expand Down
4 changes: 2 additions & 2 deletions crates/cli/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ use eyre::Result;
async fn main() -> Result<()> {
let args = Args::parse();
match run::run(args).await {
Ok(Some(_freeze_summary)) => { Ok(()) },
Ok(None) => { Ok(()) },
Ok(Some(_freeze_summary)) => Ok(()),
Ok(None) => Ok(()),
Err(e) => Err(eyre::Report::from(e)),
}
}
2 changes: 1 addition & 1 deletion crates/cli/src/parse/args.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ use std::sync::Arc;

use cryo_freeze::FileOutput;
use cryo_freeze::MultiQuery;
use cryo_freeze::Source;
use cryo_freeze::ParseError;
use cryo_freeze::Source;

use crate::args::Args;

Expand Down
49 changes: 36 additions & 13 deletions crates/cli/src/parse/file_output.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,19 @@ use crate::args::Args;
pub(crate) fn parse_file_output(args: &Args, source: &Source) -> Result<FileOutput, ParseError> {
// process output directory
let output_dir = std::fs::canonicalize(args.output_dir.clone())
.map_err(|_e| ParseError::ParseError("Failed to canonicalize output directory".to_string()))?
.map_err(|_e| {
ParseError::ParseError("Failed to canonicalize output directory".to_string())
})?
.to_string_lossy()
.into_owned();
match fs::create_dir_all(&output_dir) {
Ok(_) => {}
Err(e) => return Err(ParseError::ParseError(format!("Error creating directory: {}", e))),
Err(e) => {
return Err(ParseError::ParseError(format!(
"Error creating directory: {}",
e
)))
}
};

let file_suffix = &args.file_suffix;
Expand Down Expand Up @@ -65,7 +72,9 @@ pub(crate) fn parse_network_name(args: &Args, chain_id: u64) -> String {

pub(crate) fn parse_output_format(args: &Args) -> Result<FileFormat, ParseError> {
match (args.csv, args.json) {
(true, true) => Err(ParseError::ParseError("choose one of parquet, csv, or json".to_string())),
(true, true) => Err(ParseError::ParseError(
"choose one of parquet, csv, or json".to_string(),
)),
(true, _) => Ok(FileFormat::Csv),
(_, true) => Ok(FileFormat::Json),
(false, false) => Ok(FileFormat::Parquet),
Expand All @@ -81,30 +90,44 @@ fn parse_compression(input: &Vec<String>) -> Result<ParquetCompression, ParseErr
[algorithm, level_str] if algorithm.as_str() == "gzip" => match level_str.parse::<u8>() {
Ok(level) => match GzipLevel::try_new(level) {
Ok(gzip_level) => Ok(ParquetCompression::Gzip(Some(gzip_level))),
Err(_) => Err(ParseError::ParseError("Invalid compression level".to_string())),
Err(_) => Err(ParseError::ParseError(
"Invalid compression level".to_string(),
)),
},
Err(_) => Err(ParseError::ParseError("Invalid compression level".to_string())),
Err(_) => Err(ParseError::ParseError(
"Invalid compression level".to_string(),
)),
},
[algorithm, level_str] if algorithm.as_str() == "brotli" => {
match level_str.parse::<u32>() {
Ok(level) => match BrotliLevel::try_new(level) {
Ok(brotli_level) => Ok(ParquetCompression::Brotli(Some(brotli_level))),
Err(_) => Err(ParseError::ParseError("Invalid compression level".to_string())),
Err(_) => Err(ParseError::ParseError(
"Invalid compression level".to_string(),
)),
},
Err(_) => Err(ParseError::ParseError("Invalid compression level".to_string())),
Err(_) => Err(ParseError::ParseError(
"Invalid compression level".to_string(),
)),
}
}
[algorithm, level_str] if algorithm.as_str() == "zstd" => match level_str.parse::<i32>() {
Ok(level) => match ZstdLevel::try_new(level) {
Ok(zstd_level) => Ok(ParquetCompression::Zstd(Some(zstd_level))),
Err(_) => Err(ParseError::ParseError("Invalid compression level".to_string())),
Err(_) => Err(ParseError::ParseError(
"Invalid compression level".to_string(),
)),
},
Err(_) => Err(ParseError::ParseError("Invalid compression level".to_string())),
Err(_) => Err(ParseError::ParseError(
"Invalid compression level".to_string(),
)),
},
[algorithm] if ["gzip", "brotli", "zstd"].contains(&algorithm.as_str()) => {
Err(ParseError::ParseError("Missing compression level".to_string()))
}
_ => Err(ParseError::ParseError("Invalid compression algorithm".to_string())),
[algorithm] if ["gzip", "brotli", "zstd"].contains(&algorithm.as_str()) => Err(
ParseError::ParseError("Missing compression level".to_string()),
),
_ => Err(ParseError::ParseError(
"Invalid compression algorithm".to_string(),
)),
}
}

Expand Down
7 changes: 4 additions & 3 deletions crates/cli/src/parse/query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,9 +109,10 @@ fn parse_schemas(args: &Args) -> Result<HashMap<Datatype, Table>, ParseError> {
)
.map(|schema| (*datatype, schema))
.map_err(|_e| {
ParseError::ParseError(
format!("Failed to get schema for datatype: {:?}", datatype),
)
ParseError::ParseError(format!(
"Failed to get schema for datatype: {:?}",
datatype
))
})
})
.collect();
Expand Down
2 changes: 1 addition & 1 deletion crates/cli/src/run.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ use std::time::SystemTime;
use crate::args;
use crate::parse;
use crate::summaries;
use cryo_freeze::FreezeSummary;
use cryo_freeze::FreezeError;
use cryo_freeze::FreezeSummary;

/// run freeze for given Args
pub async fn run(args: args::Args) -> Result<Option<FreezeSummary>, FreezeError> {
Expand Down
4 changes: 3 additions & 1 deletion crates/freeze/src/types/dataframes/export.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,9 @@ fn df_to_csv(df: &mut DataFrame, filename: &str) -> Result<(), FileError> {
/// write polars dataframe to json file
fn df_to_json(df: &mut DataFrame, filename: &str) -> Result<(), FileError> {
let file = std::fs::File::create(filename).map_err(|_e| FileError::FileWriteError)?;
let result = JsonWriter::new(file).with_json_format(JsonFormat::Json).finish(df);
let result = JsonWriter::new(file)
.with_json_format(JsonFormat::Json)
.finish(df);
match result {
Err(_e) => Err(FileError::FileWriteError),
_ => Ok(()),
Expand Down
1 change: 0 additions & 1 deletion crates/freeze/src/types/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,6 @@ pub enum ParseError {
ParseIntError(#[from] std::num::ParseIntError),
}


/// Error performing a chunk operation
#[derive(Error, Debug)]
pub enum ChunkError {
Expand Down
8 changes: 4 additions & 4 deletions crates/freeze/src/types/summaries.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use std::collections::HashMap;
use crate::types::Datatype;
use std::collections::HashMap;

/// Summary of freeze operation
pub struct FreezeSummary {
Expand Down Expand Up @@ -34,9 +34,9 @@ impl FreezeSummaryAgg for Vec<FreezeChunkSummary> {
}
for (datatype, path) in chunk_summary.paths {
paths_by_type
.entry(datatype)
.or_insert_with(Vec::new)
.push(path);
.entry(datatype)
.or_insert_with(Vec::new)
.push(path);
}
}

Expand Down

0 comments on commit 323cc76

Please sign in to comment.