diff --git a/src/data_source.rs b/src/data_source.rs index 4f869f1..a57e88d 100644 --- a/src/data_source.rs +++ b/src/data_source.rs @@ -7,18 +7,27 @@ use serde_json::{Value, json}; use std::fs; use std::path::Path; +/// Represents a full item set for a champion, ready to be serialized to JSON. #[derive(Serialize, Deserialize)] struct ItemSet { + /// The title of the item set. title: String, + /// The type of the item set (e.g., "custom"). #[serde(rename = "type")] type_: String, + /// The map this item set is for (e.g., "any"). map: String, + /// The mode this item set is for (e.g., "any"). mode: String, + /// Whether this item set has priority. priority: bool, + /// The sort rank of the item set. sortrank: u32, + /// The blocks of items in the set. blocks: Vec, } +/// Represents a build (block) of items. #[derive(Serialize, Deserialize)] pub struct Build { #[serde(rename = "type")] @@ -26,6 +35,7 @@ pub struct Build { pub items: Vec, } +/// Represents a single item in a build. #[derive(Serialize, Deserialize)] pub struct Item { pub id: String, @@ -39,26 +49,33 @@ pub struct Stat { pub patch: String, } +/// Trait for a data source that can provide champion item set data. pub trait DataSource { + /// Returns the alias of the data source. fn get_alias(&self) -> &str; + /// Returns the timeout for the data source. fn get_timeout(&self) -> u64; + /// Returns a map of champion IDs to their possible positions. fn get_champs_with_positions(&self, champion: &Champion) -> IndexMap>; - fn make_item_set(&self, items: Vec<&str>, label: String) -> Value { + /// Creates a JSON value representing an item set block from a list of item IDs and a label. + fn make_item_set(&self, items: Vec, label: String) -> Value { json!({ - "items": items.iter().map(|x| json!({"id": x.to_string(), "count": 1})).collect::>(), + "items": items.iter().map(|x| json!({"id": x, "count": 1})).collect::>(), "type": label }) } + /// Returns champion data with win percentage for the given positions. fn get_champ_data_with_win_pourcentage( &self, champ: &ChampInfo, positions: &[String], ) -> Vec<(String, Vec, Stat)>; + /// Writes item sets for the given champion and positions to the specified path. fn write_item_set( &self, champ: &ChampInfo, @@ -73,19 +90,11 @@ pub trait DataSource { ); let data = self.get_champ_data_with_win_pourcentage(champ, positions); - let mut missing_roles = vec![]; - for pos in positions { - let mut ok = false; - for build in &data { - if build.0 == *pos { - ok = true; - break; - } - } - if !ok { - missing_roles.push(pos.to_owned()); - } - } + let missing_roles: Vec<_> = positions + .iter() + .filter(|pos| !data.iter().any(|build| &build.0 == *pos)) + .cloned() + .collect(); if !missing_roles.is_empty() { error!( "{}: Can't get data for {} at {}", diff --git a/src/kb_data_source.rs b/src/kb_data_source.rs index 3203834..0481544 100644 --- a/src/kb_data_source.rs +++ b/src/kb_data_source.rs @@ -1,10 +1,10 @@ use crate::ChampInfo; use crate::Champion as ChampionLoL; -use crate::data_source::{Build, DataSource, Item, Stat}; +use crate::data_source::{DataSource, Item, Stat}; use indexmap::IndexMap; use log::error; use serde_derive::Deserialize; -use serde_json::{Value, json}; +use serde_json::Value; pub struct KBDataSource { client: ureq::Agent, @@ -160,127 +160,74 @@ impl KBDataSource { } fn get_positions(position: Option) -> Vec { - let mut positions = Vec::new(); + let positions = vec!["TOP", "JUNGLE", "MID", "BOT", "SUPPORT"]; if let Some(pos) = position { - if pos.top > 0 { - positions.push("TOP".to_owned()); - } - if pos.jungle > 0 { - positions.push("JUNGLE".to_owned()); - } - if pos.mid > 0 { - positions.push("MID".to_owned()); - } - if pos.bot > 0 { - positions.push("BOT".to_owned()); - } - if pos.support > 0 { - positions.push("SUPPORT".to_owned()); - } + positions + .into_iter() + .zip([pos.top, pos.jungle, pos.mid, pos.bot, pos.support]) + .filter_map(|(name, count)| { + if count > 0 { + Some(name.to_owned()) + } else { + None + } + }) + .collect() + } else { + positions.into_iter().map(|s| s.to_owned()).collect() } - - // TODO: find better solution, activate all positions for retrieve older builds - if positions.is_empty() { - positions.push("TOP".to_owned()); - positions.push("JUNGLE".to_owned()); - positions.push("MID".to_owned()); - positions.push("BOT".to_owned()); - positions.push("SUPPORT".to_owned()); - } - - positions } fn get_build(&self, build: &KBBuild) -> (String, Vec, Stat) { let mut starting_items: Vec = vec![]; let mut blocks = vec![]; - if build.str_item_sets[0].str_item0.item_id != 0 { - starting_items.push(Item { - id: build.str_item_sets[0].str_item0.item_id.to_string(), - count: 1, - }) + for i in 0..6 { + let item_id = match i { + 0 => build.str_item_sets[0].str_item0.item_id, + 1 => build.str_item_sets[0].str_item1.item_id, + 2 => build.str_item_sets[0].str_item2.item_id, + 3 => build.str_item_sets[0].str_item3.item_id, + 4 => build.str_item_sets[0].str_item4.item_id, + 5 => build.str_item_sets[0].str_item5.item_id, + _ => 0, + }; + if item_id != 0 { + starting_items.push(Item { + id: item_id.to_string(), + count: 1, + }); + } } - if build.str_item_sets[0].str_item1.item_id != 0 { - starting_items.push(Item { - id: build.str_item_sets[0].str_item1.item_id.to_string(), - count: 1, - }) - } - if build.str_item_sets[0].str_item2.item_id != 0 { - starting_items.push(Item { - id: build.str_item_sets[0].str_item2.item_id.to_string(), - count: 1, - }) - } - if build.str_item_sets[0].str_item3.item_id != 0 { - starting_items.push(Item { - id: build.str_item_sets[0].str_item3.item_id.to_string(), - count: 1, - }) - } - if build.str_item_sets[0].str_item4.item_id != 0 { - starting_items.push(Item { - id: build.str_item_sets[0].str_item4.item_id.to_string(), - count: 1, - }) - } - if build.str_item_sets[0].str_item5.item_id != 0 { - starting_items.push(Item { - id: build.str_item_sets[0].str_item5.item_id.to_string(), - count: 1, - }) - } - blocks.push(json!(Build { - type_: format!( + blocks.push(self.make_item_set( + starting_items.iter().map(|item| item.id.clone()).collect(), + format!( "Early game items | skillOrder : {}", build.skill_sets[0].skill_order ), - items: starting_items - })); + )); let mut final_items: Vec = vec![]; - if build.item_sets[0].item0.item_id != 0 { - final_items.push(Item { - id: build.item_sets[0].item0.item_id.to_string(), - count: 1, - }) + for item in [ + &build.item_sets[0].item0, + &build.item_sets[0].item1, + &build.item_sets[0].item2, + &build.item_sets[0].item3, + &build.item_sets[0].item4, + &build.item_sets[0].item5, + ] { + if item.item_id != 0 { + final_items.push(Item { + id: item.item_id.to_string(), + count: 1, + }); + } } - if build.item_sets[0].item1.item_id != 0 { - final_items.push(Item { - id: build.item_sets[0].item1.item_id.to_string(), - count: 1, - }) - } - if build.item_sets[0].item2.item_id != 0 { - final_items.push(Item { - id: build.item_sets[0].item2.item_id.to_string(), - count: 1, - }) - } - if build.item_sets[0].item3.item_id != 0 { - final_items.push(Item { - id: build.item_sets[0].item3.item_id.to_string(), - count: 1, - }) - } - if build.item_sets[0].item4.item_id != 0 { - final_items.push(Item { - id: build.item_sets[0].item4.item_id.to_string(), - count: 1, - }) - } - if build.item_sets[0].item5.item_id != 0 { - final_items.push(Item { - id: build.item_sets[0].item5.item_id.to_string(), - count: 1, - }) - } - blocks.push(json!(Build { - type_: format!( + blocks.push(self.make_item_set( + final_items.iter().map(|item| item.id.clone()).collect(), + format!( "Item order by time finished | Summoner : {}", build.summoner.name ), - items: final_items - })); + )); ( build.position.position_name.to_uppercase(), @@ -325,12 +272,13 @@ impl DataSource for KBDataSource { ) -> Vec<(String, Vec, Stat)> { let mut champ_data = vec![]; if let Some(token) = &self.token { + let url = format!( + "https://api.koreanbuilds.net/builds?chmpname={}&patchid=-2&position=COMPOSITE", + champ.name.replace(" ", "%20") + ); let data: BuildResponse = match self .client - .get(&format!( - "https://api.koreanbuilds.net/builds?chmpname={}&patchid=-2&position=COMPOSITE", - champ.name - )) + .get(&url) .header("Accept", "application/json") .header("Authorization", token.as_str()) .call() @@ -343,7 +291,7 @@ impl DataSource for KBDataSource { } }, Err(x) => { - error!("Call failed: {}", x); + error!("Call failed for URL: {}, error: {}", url, x); return vec![]; } }; @@ -392,7 +340,7 @@ mod tests { let champ = ChampInfo { id: String::from("Annie"), name: String::from("Annie"), - key: String::from("1"), + key: 1, }; let result = DATASOURCE.get_champ_data_with_win_pourcentage(&champ, &vec!["MID".to_string()]); diff --git a/src/main.rs b/src/main.rs index 0d1f1bd..1840fb7 100644 --- a/src/main.rs +++ b/src/main.rs @@ -26,6 +26,7 @@ mod ms_data_source; use data_source::DataSource; use kb_data_source::KBDataSource; use ms_data_source::MSDataSource; +use serde::{self, Deserialize, Deserializer}; #[derive(Deserialize)] struct Realm { @@ -41,7 +42,16 @@ pub struct Champion { pub struct ChampInfo { id: String, name: String, - key: String, + #[serde(deserialize_with = "from_string_to_u32")] + key: u32, +} + +fn from_string_to_u32<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + let s = String::deserialize(deserializer)?; + s.parse::().map_err(serde::de::Error::custom) } const DEFAULT_LOL_CHAMPS_DIR: &str = "./champs"; @@ -56,40 +66,16 @@ const REG_KEY_WIN_64_UNINSTALL: &str = const REG_KEY_WIN_UNINSTALL: &str = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\"; fn main() -> Result<(), Box> { - let args: Vec = env::args().collect(); - let mut level = LevelFilter::Info; - for s in &args { - if s.eq_ignore_ascii_case("-v") || s.eq_ignore_ascii_case("--verbose") { - level = LevelFilter::Debug; - break; - } - } - logsy::set_echo(true); - logsy::set_level(level); - info!("CGG Item Sets"); + setup_logging_from_args(); + info!("CGG Item Sets v{}", env!("CARGO_PKG_VERSION")); - let lol_champs_dir: PathBuf = match lol_champ_dir() { - Ok(x) => x, - Err(_e) => PathBuf::from(DEFAULT_LOL_CHAMPS_DIR), - }; + let lol_champs_dir = get_lol_champs_dir(); info!("LoL Champs Folder: {}", lol_champs_dir.display()); - let client: Agent = create_http_client(); - - let realm: Realm = client - .get("https://ddragon.leagueoflegends.com/realms/euw.json") - .call()? - .body_mut() - .read_json()?; + let client = create_http_client(); + let realm = fetch_realm(&client)?; info!("LoL version: {}", realm.v); - let champion: Champion = client - .get(&format!( - "https://ddragon.leagueoflegends.com/cdn/{}/data/en_US/champion.json", - realm.v - )) - .call()? - .body_mut() - .read_json()?; + let champion = fetch_champions(&client, &realm)?; info!("LoL numbers of champs: {}", champion.data.len()); let data_sources: Vec> = vec![ @@ -108,13 +94,54 @@ fn main() -> Result<(), Box> { Ok(()) } -fn get_champ_from_key(champs: &Champion, key: u32) -> Option { - for champ in champs.data.values() { - if key.to_string() == champ.key { - return Some(champ.id.to_owned()); +fn setup_logging_from_args() { + let args: Vec = env::args().collect(); + let mut level = LevelFilter::Info; + for s in &args { + if s.eq_ignore_ascii_case("-v") || s.eq_ignore_ascii_case("--verbose") { + level = LevelFilter::Debug; + break; } } - None + logsy::set_echo(true); + logsy::set_level(level); +} + +fn get_lol_champs_dir() -> PathBuf { + match lol_champ_dir() { + Ok(x) => x, + Err(e) => { + error!("Failed to detect LoL champs dir: {e}, using default."); + PathBuf::from(DEFAULT_LOL_CHAMPS_DIR) + } + } +} + +fn fetch_realm(client: &Agent) -> Result> { + let url = "https://ddragon.leagueoflegends.com/realms/euw.json"; + info!("Fetching realm info from {url}"); + let realm: Realm = client.get(url).call()?.body_mut().read_json()?; + Ok(realm) +} + +fn fetch_champions(client: &Agent, realm: &Realm) -> Result> { + let url = format!( + "https://ddragon.leagueoflegends.com/cdn/{}/data/en_US/champion.json", + realm.v + ); + info!("Fetching champion data from {url}"); + let champion: Champion = client.get(&url).call()?.body_mut().read_json()?; + Ok(champion) +} + +fn get_champ_from_key(champs: &Champion, key: u32) -> Option { + champs.data.values().find_map(|champ| { + if champ.key == key { + Some(champ.id.clone()) + } else { + None + } + }) } fn execute_data_source( @@ -151,31 +178,33 @@ fn get_and_write_item_set( id: u32, positions: &[String], ) { - if let Some(champ_id) = get_champ_from_key(champion, id) { - if let Some(champ) = champion.data.get(&champ_id) { - if positions.is_empty() { - error!("{}: {} empty positions", data_source.get_alias(), &champ_id); - } else { - let path = lol_champs_dir.join(&champ_id).join("Recommended"); - match fs::create_dir_all(&path) { - Ok(_) => match data_source.write_item_set(champ, positions, &path) { - Ok(_) => (), - Err(e) => error!( - "{}: Failed to write item set for {} at {}: {}", - data_source.get_alias(), - champ.name, - positions.join(", "), - e - ), - }, - Err(e) => { - error!("Failed to create directory for {}: {}", champ_id, e); - } - } - } - } else { - error!("{} not found in LoL champs", &champ_id); + let champ = match get_champ_from_key(champion, id).and_then(|id| champion.data.get(&id)) { + Some(champ) => champ, + None => { + error!("{} not found in LoL champs", &id); + return; } + }; + + if positions.is_empty() { + error!("{}: {} empty positions", data_source.get_alias(), &champ.id); + return; + } + + let path = lol_champs_dir.join(&champ.id).join("Recommended"); + if let Err(e) = fs::create_dir_all(&path) { + error!("Failed to create directory for {}: {}", &champ.id, e); + return; + } + + if let Err(e) = data_source.write_item_set(champ, positions, &path) { + error!( + "{}: Failed to write item set for {} at {}: {}", + data_source.get_alias(), + champ.id, + positions.join(", "), + e + ); } } diff --git a/src/ms_data_source.rs b/src/ms_data_source.rs index 1c50c21..c247419 100644 --- a/src/ms_data_source.rs +++ b/src/ms_data_source.rs @@ -1,8 +1,9 @@ use indexmap::IndexMap; -use log::error; +use log::{error, warn}; use regex::Regex; use serde_derive::Deserialize; use serde_json::Value; +use std::sync::LazyLock; use crate::ChampInfo; use crate::Champion; @@ -19,25 +20,116 @@ struct MSChampion { search_terms: String, } -fn get_champ_from_name(champs: &Champion, name: String) -> Option { - for champ in champs.data.values() { +// Compile regexes once for performance +static NUMBER_REGEX: LazyLock = LazyLock::new(|| Regex::new(r"([0-9]+\.?[0-9]+)").unwrap()); +static ITEM_REGEX: LazyLock = LazyLock::new(|| Regex::new(r"/item/([0-9]+)\.").unwrap()); + +fn get_champ_from_name(champs: &Champion, name: &str) -> Option { + champs.data.values().find_map(|champ| { if name == champ.name || name == champ.id { - return champ.key.parse::().ok(); + Some(champ.key) + } else { + None + } + }) +} + +fn find_next_number(rest: &str) -> f32 { + if let Some(cap) = NUMBER_REGEX.captures(rest) { + if let Some(matched) = cap.get(1) { + return matched.as_str().parse::().unwrap_or(0.0); + } + } + 0.0 +} + +fn find_matching_div(html: &str, start_pos: usize) -> Option<(usize, usize)> { + let mut open_divs = 0; + let mut i = start_pos; + let len = html.len(); + + while i < len { + if html[i..].starts_with("") { + open_divs -= 1; + i += 6; + if open_divs == 0 { + return Some((start_pos, i)); + } + } else { + i += 1; } } None } -fn find_next_number(rest: &str) -> f32 { - let re = Regex::new(r"([0-9]+\.?[0-9]+)"); - if let Ok(re) = re { - if let Some(cap) = re.captures(rest) { - if let Some(matched) = cap.get(1) { - return matched.as_str().parse::().unwrap_or(0.0); - } +fn extract_items_from_section(page: &str, section_title: &str) -> Vec { + if let Some(h3_pos) = page.find(section_title) { + // Find the start of the div containing the h3 + let div_start = page[..h3_pos].rfind(" for section '{}'", + section_title + ); } } - 0.0 + Vec::new() +} + +fn extract_skill_order_from_table(page: &str) -> String { + // Find the table containing the skill order (look for Q.png as anchor) + let table_start = page + .find("Q.png") + .and_then(|pos| page[..pos].rfind("") + .map(|e| table_start + e + 8) + .unwrap_or(page.len()); + let table_html = &page[table_start..table_end]; + + // Extract rows + let rows: Vec<&str> = table_html + .match_indices("") + .map(|e| i + e + 5) + .unwrap_or(table_html.len()); + &table_html[i..end] + }) + .collect(); + + // Only process Q/W/E/R rows (skip header) + let skills = ["Q", "W", "E", "R"]; + let mut order = [""; 18]; + for (i, row) in rows.iter().skip(1).take(4).enumerate() { + let mut col = 0; + let mut pos = 0; + while let Some(td_start) = row[pos..].find("") + .map(|e| td_start + e + 5) + .unwrap_or(row.len()); + let td_html = &row[td_start..td_end]; + if td_html.contains(&format!(">{}<", skills[i])) && col < 18 { + order[col] = skills[i]; + } + col += 1; + pos = td_end; + } + } + order.join("") } impl MSDataSource { @@ -67,11 +159,14 @@ impl DataSource for MSDataSource { .and_then(|mut resp| resp.body_mut().read_json()) { Ok(champs) => champs, - Err(_) => return champs, + Err(e) => { + error!("Failed to fetch champions from MetaSRC: {}", e); + return champs; + } }; for champ in champions { - if let Some(id) = get_champ_from_name(champion, champ.name.to_owned()) { + if let Some(id) = get_champ_from_name(champion, &champ.name) { let allowed_roles = ["TOP", "ADC", "SUPPORT", "JUNGLE", "MID"]; let roles = champ .search_terms @@ -79,9 +174,11 @@ impl DataSource for MSDataSource { .map(|s| s.to_uppercase()) .filter(|role| allowed_roles.contains(&role.as_str())) .collect::>(); - champs.insert(id, roles); + if let Some(first_role) = roles.first() { + champs.insert(id, vec![first_role.clone()]); + } } else { - error!("Could not find champ {} in champion data", champ.name); + warn!("Could not find champ '{}' in champion data", champ.name); } } @@ -109,57 +206,48 @@ impl DataSource for MSDataSource { if let Ok(mut p) = rep { let page = match p.body_mut().read_to_string() { Ok(s) => s, - Err(_) => return builds, - }; - - let mut pos = page.find("Patch "); - let patch = if let Some(p) = pos { - find_next_number(&page[p..]).to_string() - } else { - String::new() - }; - - pos = page.find("Win"); - let win_rate: f32 = if let Some(p) = pos { - find_next_number(&page[p..]) - } else { - 0.0 - }; - - pos = page.find("KDA:"); - let kda: f32 = if let Some(p) = pos { - find_next_number(&page[p..]) - } else { - 0.0 - }; - - pos = page.find("Games:"); - let games: u32 = if let Some(p) = pos { - find_next_number(&page[p..]) as u32 - } else { - 0 - }; - - let mut items = vec![]; - let mut pos: Option = page.find("/item/"); - while let Some(mut p) = pos { - p += 6; - if let Some(dot_pos) = page[p..].find('.') { - let i = &page[p..p + dot_pos]; - items.push(i); + Err(e) => { + warn!("Failed to read page for champ {}: {}", champ.id, e); + return builds; } + }; - let next = page[p..].find("/item/"); - if let Some(n) = next { - pos = Some(p + n); - } else { - pos = None; - } - } + // Extract patch, win rate, kda, games + let patch = page + .find("Patch ") + .map(|p| find_next_number(&page[p..]).to_string()) + .unwrap_or_default(); + + let win_rate = page + .find("Win") + .map(|p| find_next_number(&page[p..])) + .unwrap_or(0.0); + + let kda = page + .find("KDA:") + .map(|p| find_next_number(&page[p..])) + .unwrap_or(0.0); + + let games = page + .find("Games:") + .map(|p| find_next_number(&page[p..]) as u32) + .unwrap_or(0); + + let items = extract_items_from_section(&page, "Item Purchase Order"); + let starting_items = extract_items_from_section(&page, "Starting Items"); builds.push(( positions[0].to_owned(), - vec![self.make_item_set(items, "Set".to_owned())], + vec![ + self.make_item_set( + starting_items, + format!( + "Starting Items | skillOrder: {}", + extract_skill_order_from_table(&page) + ), + ), + self.make_item_set(items, "Item Purchase Order".to_owned()), + ], Stat { win_rate, games, @@ -167,6 +255,11 @@ impl DataSource for MSDataSource { patch, }, )); + } else { + warn!( + "Failed to fetch build page for champ {} at position {}", + champ.id, positions[0] + ); } builds