diff options
author | Baitinq <manuelpalenzuelamerino@gmail.com> | 2022-10-20 23:51:21 +0200 |
---|---|---|
committer | Baitinq <manuelpalenzuelamerino@gmail.com> | 2022-10-20 23:51:21 +0200 |
commit | 79a9412dcee379aa1bf7c2a26c84d0044f7c7da2 (patch) | |
tree | e62f67fb6ae94c5802cbb4660c7f5f42a5a026f7 | |
parent | Crawler: Wrap crawl response in Result type (diff) | |
download | OSSE-79a9412dcee379aa1bf7c2a26c84d0044f7c7da2.tar.gz OSSE-79a9412dcee379aa1bf7c2a26c84d0044f7c7da2.tar.bz2 OSSE-79a9412dcee379aa1bf7c2a26c84d0044f7c7da2.zip |
Crawler: Add indexer interaction skeleton
-rw-r--r-- | crawler/src/main.rs | 6 |
1 files changed, 5 insertions, 1 deletions
diff --git a/crawler/src/main.rs b/crawler/src/main.rs index 9ebfc23..999c0c0 100644 --- a/crawler/src/main.rs +++ b/crawler/src/main.rs @@ -37,6 +37,7 @@ fn crawler(root_urls: Vec<&str>) { println!("Next urls: {:?}", crawled_urls); //push content to index + _ = push_crawl_entry_to_indexer(url, _content); for url in crawled_urls { crawling_queue.push(url); @@ -44,7 +45,6 @@ fn crawler(root_urls: Vec<&str>) { } } -//takes url, returns content and list of urls fn crawl_url(url: &str) -> Result<(String, Vec<String>), ()> { let url = "https://".to_owned() + url; @@ -89,3 +89,7 @@ fn crawl_url(url: &str) -> Result<(String, Vec<String>), ()> { Ok((response_text, next_urls)) } + +fn push_crawl_entry_to_indexer(_url: String, _content: String) -> Result<(), ()> { + Ok(()) +} |