From 4445c4d168df1ab71431da9db1a053629ed4d0d9 Mon Sep 17 00:00:00 2001 From: Baitinq Date: Sun, 23 Oct 2022 12:06:33 +0200 Subject: Crawler: Only crawl 2 urls per url This makes it so that we dont get rate limited from websites. --- crawler/src/main.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/crawler/src/main.rs b/crawler/src/main.rs index f8dc226..d1333fe 100644 --- a/crawler/src/main.rs +++ b/crawler/src/main.rs @@ -86,9 +86,11 @@ async fn crawl_url(http_client: &Client, url: &str) -> Result<(String, Vec| { us.into_iter() .map(|u| { @@ -105,6 +107,10 @@ async fn crawl_url(http_client: &Client, url: &str) -> Result<(String, Vec