diff options
author | Baitinq <manuelpalenzuelamerino@gmail.com> | 2022-11-05 18:28:20 +0100 |
---|---|---|
committer | Baitinq <manuelpalenzuelamerino@gmail.com> | 2022-11-05 18:28:22 +0100 |
commit | 6d729376031e5d63bb4bc761f9d14d4f972e0793 (patch) | |
tree | 1fb0a40e40a337981b3317c48349d8243abababf | |
parent | Indexer: Hold indexer lock for less time when in search endpoint (diff) | |
download | OSSE-6d729376031e5d63bb4bc761f9d14d4f972e0793.tar.gz OSSE-6d729376031e5d63bb4bc761f9d14d4f972e0793.tar.bz2 OSSE-6d729376031e5d63bb4bc761f9d14d4f972e0793.zip |
Indexer: Switch back to not serving frontend with actix
This previously caused the frontend to be unresponsive when the crawler was passing results to the indexer. Now the frontend is again independently served by trunk and the api by actix, which makes them separate processes and the frontend can remain responsive.
-rw-r--r-- | README.md | 7 | ||||
-rw-r--r-- | crawler/src/main.rs | 2 | ||||
-rw-r--r-- | frontend/src/main.rs | 12 | ||||
-rw-r--r-- | indexer/src/main.rs | 17 |
4 files changed, 23 insertions, 15 deletions
diff --git a/README.md b/README.md index aee4671..910e133 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,12 @@ Install cargo through your preferred method. ### 2. Run! ``` -$ trunk serve frontend/index.html $ cargo run --bin indexer $ cargo run --bin crawler +$ trunk serve frontend/index.html +``` + +### 3. Use?! +``` + Navigate to 127.0.0.1:8080 on your favorite browser:) ``` diff --git a/crawler/src/main.rs b/crawler/src/main.rs index 557ba4e..3700d65 100644 --- a/crawler/src/main.rs +++ b/crawler/src/main.rs @@ -55,7 +55,7 @@ async fn crawler(http_client: Client, root_urls: Vec<&str>, max_queue_size: usiz //push content to index let indexer_response = match push_crawl_entry_to_indexer( &http_client, - "http://127.0.0.1:8080/api/resource".to_string(), + "http://127.0.0.1:4444/api/resource".to_string(), url, content, ) diff --git a/frontend/src/main.rs b/frontend/src/main.rs index 29bdd71..e449e4a 100644 --- a/frontend/src/main.rs +++ b/frontend/src/main.rs @@ -1,6 +1,7 @@ mod app; use app::OSSE; +use web_sys::window; use yew::prelude::*; use yew_router::prelude::*; @@ -15,12 +16,19 @@ enum Route { } fn switch_routes(routes: Route) -> Html { + let location = window().unwrap().location(); + let api_endpoint = format!( + "{}//{}:{}/api", + location.protocol().unwrap(), + location.hostname().unwrap(), + 4444 + ); match routes { Route::OSSEHome | Route::OSSEHomeEmptySearch => html! { - <OSSE api_endpoint={"/api"} initial_search_query={None as Option<String>} /> + <OSSE api_endpoint={api_endpoint} initial_search_query={None as Option<String>} /> }, Route::OSSESearch { query } => html! { - <OSSE api_endpoint={"/api"} initial_search_query={Some(query)} /> + <OSSE api_endpoint={api_endpoint} initial_search_query={Some(query)} /> }, } } diff --git a/indexer/src/main.rs b/indexer/src/main.rs index 8d738cd..592a0a6 100644 --- a/indexer/src/main.rs +++ b/indexer/src/main.rs @@ -32,35 +32,30 @@ struct AppState { async fn main() -> std::io::Result<()> { println!("Hello, world! Im the indexer!"); - serve_http_endpoint("0.0.0.0", 8080).await + serve_http_endpoint("0.0.0.0", 4444).await } async fn serve_http_endpoint(address: &str, port: u16) -> std::io::Result<()> { let shared_state = web::Data::new(AppState { - indexer: Mutex::new(Box::new(IndexerImplementation::new())), + indexer: Mutex::new(Box::new(IndexerImplementation::new())), //maybe mutex is not the best option }); HttpServer::new(move || { - let cors = Cors::permissive(); App::new() - .wrap(cors) + .wrap(Cors::permissive()) .app_data(shared_state.clone()) .service(add_resource) .service( web::resource(["/api/search", "/api/search/", "/api/search/{query}"]).to(search), ) - .service( - actix_web_lab::web::spa() - .index_file("./frontend/dist/index.html") - .static_resources_mount("/") - .static_resources_location("./frontend/dist") - .finish(), - ) //TODO: maybe separate gui backend from api? }) .bind((address, port))? .run() .await } +//TODO: Max description size +//TODO: Current result below search bar updates with it + //TODO: sufficiently simmilar word in search (algorithm) #[post("/api/resource")] async fn add_resource( |