From 096ef9e2a2f21281f1b516b2de420f04df1db56e Mon Sep 17 00:00:00 2001 From: Baitinq Date: Sun, 23 Oct 2022 18:53:49 +0200 Subject: Crawler+Indexer: Rust cleanup Getting more familiar with the language so fixed some non optimal into_iter() usage, unnecessary .clone()s and unnecessary hack when we could just get a &mut for inserting into the indexer url database. --- crawler/src/main.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) (limited to 'crawler') diff --git a/crawler/src/main.rs b/crawler/src/main.rs index 6161578..e8efe77 100644 --- a/crawler/src/main.rs +++ b/crawler/src/main.rs @@ -90,7 +90,7 @@ async fn crawl_url(http_client: &Client, url: &str) -> Result<(String, Vec| { - us.into_iter() + us.iter() .map(|u| { //https://stackoverflow.com/questions/9646407/two-forward-slashes-in-a-url-src-href-attribute if u.starts_with("//") { @@ -98,14 +98,13 @@ async fn crawl_url(http_client: &Client, url: &str) -> Result<(String, Vec