diff options
author | Baitinq <manuelpalenzuelamerino@gmail.com> | 2022-10-24 12:58:57 +0200 |
---|---|---|
committer | Baitinq <manuelpalenzuelamerino@gmail.com> | 2022-10-24 12:58:57 +0200 |
commit | 2ae5d9ec67134a7c07673c8ff00ce00b4ac74d6f (patch) | |
tree | d824c9ce3924efe069cbb40dd4a3ad1f814085a3 | |
parent | Misc: Update build/run instructions (diff) | |
download | OSSE-2ae5d9ec67134a7c07673c8ff00ce00b4ac74d6f.tar.gz OSSE-2ae5d9ec67134a7c07673c8ff00ce00b4ac74d6f.tar.bz2 OSSE-2ae5d9ec67134a7c07673c8ff00ce00b4ac74d6f.zip |
Crawler: Set queue size to 2222
-rw-r--r-- | crawler/src/main.rs | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/crawler/src/main.rs b/crawler/src/main.rs index e8efe77..d74f1f8 100644 --- a/crawler/src/main.rs +++ b/crawler/src/main.rs @@ -19,7 +19,7 @@ async fn crawler(http_client: Client, root_urls: Vec<&str>) { dbg!("Starting to crawl!"); //add root urls to queue - TODO: max q size - let (tx_crawling_queue, rx_crawling_queue) = async_channel::bounded::<String>(4444); + let (tx_crawling_queue, rx_crawling_queue) = async_channel::bounded::<String>(2222); for url in root_urls { tx_crawling_queue.send(String::from(url)).await.unwrap(); } |