diff options
author | Baitinq <manuelpalenzuelamerino@gmail.com> | 2022-10-25 20:44:20 +0200 |
---|---|---|
committer | Baitinq <manuelpalenzuelamerino@gmail.com> | 2022-10-25 20:44:20 +0200 |
commit | d606fe6a963b985f8f53be9dd54d7ef43ce5ddbf (patch) | |
tree | c281f95440b1d2ddda96ce8170676a601fba4e75 /crawler | |
parent | Crawler: Use async Client (diff) | |
download | OSSE-d606fe6a963b985f8f53be9dd54d7ef43ce5ddbf.tar.gz OSSE-d606fe6a963b985f8f53be9dd54d7ef43ce5ddbf.tar.bz2 OSSE-d606fe6a963b985f8f53be9dd54d7ef43ce5ddbf.zip |
Crawler: Fix bad error handling with match handling
Diffstat (limited to 'crawler')
-rw-r--r-- | crawler/src/main.rs | 15 |
1 files changed, 9 insertions, 6 deletions
diff --git a/crawler/src/main.rs b/crawler/src/main.rs index d7f60c9..dce23a4 100644 --- a/crawler/src/main.rs +++ b/crawler/src/main.rs @@ -35,13 +35,16 @@ async fn crawler(http_client: Client, root_urls: Vec<&str>) { let url = rx_crawling_queue.recv().await.unwrap(); let http_client = http_client.clone(); tokio::spawn(async move { - let crawl_res = crawl_url(&http_client, url.as_str()).await; - if crawl_res.is_err() { - dbg!("Error crawling {}", url); - return; - } + let (content, crawled_urls) = match crawl_url(&http_client, url.as_str()).await { + Err(e) => { + println!("Error crawling ({}): {}", url, e); + return; + } + Ok(result) => result, + }; - let (content, crawled_urls) = crawl_res.unwrap(); + //DONT FORGET ENUMS + //CAN WE DO UNWRAP OR RETURN or lambda //dbg!("Content: {:?}", &content); dbg!("Next urls: {:?}", &crawled_urls); |