diff options
author | Baitinq <manuelpalenzuelamerino@gmail.com> | 2022-10-30 15:34:46 +0100 |
---|---|---|
committer | Baitinq <manuelpalenzuelamerino@gmail.com> | 2022-10-30 16:12:30 +0100 |
commit | e318a38e3913f6a1c8fe46c12d8ddcdc5b0503e1 (patch) | |
tree | 9aecf149086207b32dc6395c72f15b86957aa592 /crawler | |
parent | Misc: Remove unneeded dependencies (diff) | |
download | OSSE-e318a38e3913f6a1c8fe46c12d8ddcdc5b0503e1.tar.gz OSSE-e318a38e3913f6a1c8fe46c12d8ddcdc5b0503e1.tar.bz2 OSSE-e318a38e3913f6a1c8fe46c12d8ddcdc5b0503e1.zip |
Frontend: Move app-specific code to app.rs
Diffstat (limited to 'crawler')
-rw-r--r-- | crawler/src/main.rs | 1 |
1 files changed, 1 insertions, 0 deletions
diff --git a/crawler/src/main.rs b/crawler/src/main.rs index a3dc06b..ce9943f 100644 --- a/crawler/src/main.rs +++ b/crawler/src/main.rs @@ -124,6 +124,7 @@ async fn crawl_url(http_client: &Client, url: &str) -> Result<(String, Vec<Strin //i dont understand dbg! (how to print {}) //is there empty urls? //user agent? + //frontend: search/query and redirect println!("Returning next urls, {:?}", next_urls); Ok((response_text, next_urls)) |