1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
|
use actix_web::{get, post, web, App, HttpServer, Responder};
use rand::Rng;
use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet};
use std::hash::{Hash, Hasher};
use std::sync::{Arc, Mutex};
#[derive(Debug, Clone, Serialize)]
struct CrawledResource {
url: String,
priority: u32, //how do we even calculate this
word: Arc<String>,
}
//We implement PartialEq, Eq and Hash to ignore the priority field.
impl PartialEq for CrawledResource {
fn eq(&self, other: &Self) -> bool {
self.url == other.url && self.word == other.word
}
}
impl Eq for CrawledResource {}
impl Hash for CrawledResource {
fn hash<H: Hasher>(&self, state: &mut H) {
self.url.hash(state);
self.word.hash(state);
}
}
struct AppState {
database: Mutex<HashMap<String, HashSet<CrawledResource>>>,
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
println!("Hello, world! Im the indexer!");
serve_http_endpoint("0.0.0.0", 4444).await
}
async fn serve_http_endpoint(address: &str, port: u16) -> std::io::Result<()> {
let shared_state = web::Data::new(AppState {
database: Mutex::new(HashMap::new()),
});
HttpServer::new(move || {
App::new()
.app_data(shared_state.clone())
.service(search)
.service(add_resource)
})
.bind((address, port))?
.run()
.await
}
#[derive(Deserialize, Debug)]
struct Resource {
url: String,
content: String,
}
#[post("/resource")]
async fn add_resource(data: web::Data<AppState>, resource: web::Json<Resource>) -> impl Responder {
//parse content
let text = html2text::from_read(resource.content.as_str().as_bytes(), resource.content.len());
let split_words = text.split(' ');
//fixup words (remove words with non alphabetic chars, empty words, transform to lowercase...)
let fixed_words: Vec<String> = split_words
.filter(|w| !w.chars().any(|c| !c.is_ascii_alphabetic()))
.filter(|w| !w.is_empty() && *w != " ")
.map(|w| w.to_ascii_lowercase())
.collect();
println!("xd: {:?}", fixed_words);
//and for each changed content word we add it to the db (word -> list.append(url))
let mut database = data.database.lock().unwrap();
for word in fixed_words {
let resource_to_add = CrawledResource {
url: resource.url.clone(),
priority: calculate_word_priority(&word, resource.content.as_str()),
word: Arc::new(word.clone()),
};
match database.get_mut(&word) {
Some(resources) => _ = resources.insert(resource_to_add),
None => _ = database.insert(word.clone(), HashSet::from([resource_to_add])),
}
}
println!("Added resource! {:?}", database.len());
format!("{:?}", resource)
}
#[get("/search/{term}")]
async fn search(data: web::Data<AppState>, term: web::Path<String>) -> impl Responder {
let query: Vec<&str> = term.split(' ').collect();
let database = data.database.lock().unwrap();
let mut valid_results: Option<HashSet<CrawledResource>> = None;
for w in query {
let curr_word_results = match database.get(w) {
None => return "[]".to_string(),
Some(results) => results,
};
match valid_results {
None => {
valid_results = Some(curr_word_results.to_owned());
}
Some(results) => {
let intersection: HashSet<CrawledResource> = curr_word_results
.intersection(&results)
.map(|s| s.to_owned())
.collect();
valid_results = Some(intersection);
}
}
}
serde_json::to_string(&valid_results.unwrap()).unwrap()
}
//TODO!
fn calculate_word_priority(_word: &str, _html_site: &str) -> u32 {
rand::thread_rng().gen::<u32>()
}
|