get_page() now returns the actual page data (and will probably crash if you give it an invalid page)
This commit is contained in:
parent
ac4f30ce9e
commit
b9def3324e
27
src/lib.rs
27
src/lib.rs
|
@ -53,6 +53,7 @@ pub struct Config {
|
||||||
pub cache_folder: String,
|
pub cache_folder: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct Page {
|
pub struct Page {
|
||||||
pub date: String,
|
pub date: String,
|
||||||
pub next_page: Option<String>,
|
pub next_page: Option<String>,
|
||||||
|
@ -60,7 +61,7 @@ pub struct Page {
|
||||||
pub image: String,
|
pub image: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_page(date: String) {
|
pub async fn get_page(date: String) -> Page {
|
||||||
let page_html = &reqwest::get(format!(
|
let page_html = &reqwest::get(format!(
|
||||||
"https://girlgeniusonline.com/comic.php?date={date}"
|
"https://girlgeniusonline.com/comic.php?date={date}"
|
||||||
))
|
))
|
||||||
|
@ -73,9 +74,29 @@ pub async fn get_page(date: String) {
|
||||||
let parsed = Html::parse_document(page_html);
|
let parsed = Html::parse_document(page_html);
|
||||||
|
|
||||||
// get image url for this page
|
// get image url for this page
|
||||||
let selector = Selector::parse("img[alt=Comic]").unwrap();
|
let imageselector = Selector::parse("img[alt=Comic]").unwrap();
|
||||||
let mut image = String::new();
|
let mut image = String::new();
|
||||||
for element in parsed.select(&selector) {
|
for element in parsed.select(&imageselector) {
|
||||||
image = element.value().attr("src").unwrap().replace('"', "");
|
image = element.value().attr("src").unwrap().replace('"', "");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// get pages
|
||||||
|
let next = Selector::parse("#topnext").unwrap();
|
||||||
|
let mut next_page = None;
|
||||||
|
for element in parsed.select(&next) {
|
||||||
|
next_page = Some(element.value().attr("href").unwrap().to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
let prev = Selector::parse("#topprev").unwrap();
|
||||||
|
let mut prev_page = None;
|
||||||
|
for element in parsed.select(&prev) {
|
||||||
|
prev_page = Some(element.value().attr("href").unwrap().to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
Page {
|
||||||
|
date,
|
||||||
|
next_page,
|
||||||
|
prev_page,
|
||||||
|
image,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,5 +18,6 @@ async fn main() {
|
||||||
let config = ggg::read_config(&config_file);
|
let config = ggg::read_config(&config_file);
|
||||||
ggg::ensure_exists(config.cache_folder);
|
ggg::ensure_exists(config.cache_folder);
|
||||||
|
|
||||||
ggg::get_page(config.latest_date).await;
|
let current_page = ggg::get_page(config.latest_date).await;
|
||||||
|
println!("{current_page:?}");
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue