Skip to content

Commit

Permalink
refactor: rewrite enum Url
Browse files Browse the repository at this point in the history
  • Loading branch information
aphronyx committed May 17, 2024
1 parent aa9a954 commit ec29c9c
Show file tree
Hide file tree
Showing 5 changed files with 69 additions and 52 deletions.
36 changes: 31 additions & 5 deletions src/rust/zh.copymanga/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions src/rust/zh.copymanga/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,5 @@ cbc = "0.1.2"
chinese-number = { version = "0.7.7", default-features = false, features = ["chinese-to-number"] }
hex = { version = "0.4.3", default-features = false, features = ["alloc"] }
regex = { version = "1.10.3", default-features = false, features = ["unicode"] }
strum_macros = "0.26.2"
uuid = { version = "1.4.1", default-features = false }
27 changes: 17 additions & 10 deletions src/rust/zh.copymanga/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ fn get_manga_list(filters: Vec<Filter>, page: i32) -> Result<MangaPageResult> {

#[get_manga_details]
fn get_manga_details(manga_id: String) -> Result<Manga> {
let manga_page = Url::Manga(&manga_id).get_html()?;
let manga_page = Url::Manga { id: &manga_id }.get_html()?;

let cover = manga_page
.get_attr("img.lazyload", "data-src")
Expand All @@ -50,7 +50,7 @@ fn get_manga_details(manga_id: String) -> Result<Manga> {

let description = manga_page.get_text("p.intro");

let manga_url = Url::Manga(&manga_id).to_string();
let manga_url = Url::Manga { id: &manga_id }.to_string();

let categories = manga_page
.select("span.comicParticulars-left-theme-all.comicParticulars-tag > a")
Expand Down Expand Up @@ -82,7 +82,7 @@ fn get_manga_details(manga_id: String) -> Result<Manga> {

#[get_chapter_list]
fn get_chapter_list(manga_id: String) -> Result<Vec<Chapter>> {
let group_values = Url::ChapterList(&manga_id)
let group_values = Url::ChapterList { id: &manga_id }
.get_json()?
.as_object()?
.get_as_string("results")?
Expand Down Expand Up @@ -140,7 +140,11 @@ fn get_chapter_list(manga_id: String) -> Result<Vec<Chapter>> {
.map(|(chapter_id, title, date_updated)| {
let part = title.parse::<Part>()?;

let chapter_url = Url::Chapter(&manga_id, chapter_id).to_string();
let chapter_url = Url::Chapter {
manga_id: &manga_id,
chapter_id,
}
.to_string();

Ok(Chapter {
id: chapter_id.clone(),
Expand All @@ -163,12 +167,15 @@ fn get_chapter_list(manga_id: String) -> Result<Vec<Chapter>> {
fn get_page_list(manga_id: String, chapter_id: String) -> Result<Vec<Page>> {
let mut pages = Vec::<Page>::new();

let page_arr = Url::Chapter(&manga_id, &chapter_id)
.get_html()?
.get_attr("div.imageData", "contentkey")
.decrypt()
.json()?
.as_array()?;
let page_arr = Url::Chapter {
manga_id: &manga_id,
chapter_id: &chapter_id,
}
.get_html()?
.get_attr("div.imageData", "contentkey")
.decrypt()
.json()?
.as_array()?;

let image_format = defaults_get("imageFormat").and_then(|v| v.as_string().map(|v| v.read()))?;

Expand Down
2 changes: 1 addition & 1 deletion src/rust/zh.copymanga/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ impl MangaArr for ArrayRef {
.collect::<Vec<_>>()
.join("、");

let manga_url = Url::Manga(&manga_id).to_string();
let manga_url = Url::Manga { id: &manga_id }.to_string();

let status_code = manga_obj.get("status").as_int().unwrap_or(-1);
let status = match status_code {
Expand Down
55 changes: 19 additions & 36 deletions src/rust/zh.copymanga/src/url.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
use aidoku::{
error::Result,
helpers::uri::QueryParameters,
prelude::format,
std::{html::Node, net::Request, ValueRef, Vec},
Filter, FilterType,
};
use alloc::string::ToString;
use core::fmt::Display;
use strum_macros::Display;

#[derive(Display)]
#[strum(prefix = "https://copymanga.site")]
pub enum Url<'a> {
/// https://copymanga.site/comics?theme={}&status={}&region={}&ordering={}&offset={}&limit={}
///
/// ---
///
/// ## `theme`
///
/// - : 全部
Expand Down Expand Up @@ -111,12 +111,9 @@ pub enum Url<'a> {
/// ## `limit`
///
/// Manga per response
Filters(QueryParameters),
#[strum(to_string = "/comics?{query}")]
Filters { query: QueryParameters },

/// https://copymanga.site/api/kb/web/searchb/comics?offset={}&platform={}&limit={}&q={}&q_type={}
///
/// ---
///
/// ## `offset`
///
/// `({page} - 1) * {limit}`
Expand All @@ -139,16 +136,20 @@ pub enum Url<'a> {
/// - `name`: 名稱
/// - `author`: 作者
/// - `local`: 漢化組
Search(QueryParameters),
#[strum(to_string = "/api/kb/web/searchb/comics?{query}")]
Search { query: QueryParameters },

/// https://copymanga.site/comic/{manga_id}
Manga(&'a str),
#[strum(to_string = "/comic/{id}")]
Manga { id: &'a str },

/// https://copymanga.site/comicdetail/{manga_id}/chapters
ChapterList(&'a str),
#[strum(to_string = "/comicdetail/{id}/chapters")]
ChapterList { id: &'a str },

/// https://copymanga.site/comic/{manga_id}/chapter/{chapter_id}
Chapter(&'a str, &'a str),
#[strum(to_string = "/comic/{manga_id}/chapter/{chapter_id}")]
Chapter {
manga_id: &'a str,
chapter_id: &'a str,
},
}

/// # 狀態
Expand Down Expand Up @@ -286,24 +287,6 @@ impl<'a> Url<'a> {
}
}

impl<'a> Display for Url<'a> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
Self::Filters(query) => write!(f, "{}/comics?{}", DOMAIN, query),
Self::Search(query) => write!(f, "{}/api/kb/web/searchb/comics?{}", DOMAIN, query),
Self::Manga(manga_id) => write!(f, "{}{}{}", DOMAIN, MANGA_PATH, manga_id),
Self::ChapterList(manga_id) => {
write!(f, "{}/comicdetail/{}/chapters", DOMAIN, manga_id)
}
Self::Chapter(manga_id, chapter_id) => write!(
f,
"{}{}{}{}{}",
DOMAIN, MANGA_PATH, manga_id, CHAPTER_PATH, chapter_id
),
}
}
}

impl<'a> From<(Vec<Filter>, i32)> for Url<'a> {
fn from((filters, page): (Vec<Filter>, i32)) -> Self {
let mut genre_index = 0;
Expand Down Expand Up @@ -357,7 +340,7 @@ impl<'a> From<(Vec<Filter>, i32)> for Url<'a> {
query.push("q", Some(&search_str));
query.push_encoded("q_type", None);

return Url::Search(query);
return Url::Search { query };
}

_ => continue,
Expand All @@ -369,7 +352,7 @@ impl<'a> From<(Vec<Filter>, i32)> for Url<'a> {
query.push_encoded("region", Some(region.to_string().as_str()));
query.push_encoded("ordering", Some(sort_by.to_string().as_str()));

Url::Filters(query)
Url::Filters { query }
}
}

Expand Down

0 comments on commit ec29c9c

Please sign in to comment.