ferret/core/src/main.rs

90 lines
2.2 KiB
Rust

#[macro_use]
extern crate log;
use ammonia::clean;
use axum::{
http::StatusCode,
response::IntoResponse,
routing::{get, post},
Json, Router,
};
use scraper::{Html, Selector};
use serde::{Deserialize, Serialize};
use sqlx::sqlite::SqlitePool;
use std::env;
use std::net::SocketAddr;
use whatlang::{detect_lang, Lang};
#[tokio::main]
async fn main() {
tracing_subscriber::fmt::init();
let pool = SqlitePool::connect(&env::var("DATABASE_URL").unwrap())
.await
.unwrap();
let app = Router::new()
// `GET /` goes to `root`
.route("/", get(root));
let addr = SocketAddr::from(([127, 0, 0, 1], 3000));
tracing::debug!("listening on {}", addr);
axum::Server::bind(&addr)
.serve(app.into_make_service())
.await
.unwrap();
}
async fn root() -> &'static str {
"Hello, World!"
}
async fn update_index(pool: &SqlitePool) {
let mut conn = pool.acquire().await.unwrap();
let crawled = sqlx::query!(
r#"
SELECT last_fetched, url, body
FROM crawled_urls
ORDER BY last_fetched
"#
)
.fetch_all(pool)
.await
.unwrap();
for res in crawled {
let size = std::mem::size_of_val(&res.body) as u32;
let lang = detect_lang(&res.body).unwrap().code();
let document = Html::parse_document(&res.body);
let title_selector = Selector::parse("title").unwrap();
let title = match document.select(&title_selector).next() {
Some(v) => v.inner_html(),
None => res.url.clone(),
};
let desc_selector = Selector::parse("p").unwrap();
let summary = match document.select(&desc_selector).next() {
Some(v) => v.inner_html(),
None => String::new(),
};
let id = sqlx::query!(
r#"
REPLACE INTO search_index ( url, size, language, title, summary, content, last_updated )
VALUES ( ?1, ?2, ?3, ?4, ?5, ?6, ?7 )
"#,
res.url,
size,
lang,
title,
summary,
res.body,
res.last_fetched,
)
.execute(&mut *conn)
.await
.unwrap()
.last_insert_rowid();
}
}