From 993c91eb5f20195c1b54688652f01552da9caf24 Mon Sep 17 00:00:00 2001 From: OMGeeky Date: Wed, 8 May 2024 18:49:42 +0200 Subject: [PATCH] disallow crawling and remove errors for it --- src/main.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/main.rs b/src/main.rs index e2c58d4..0fefef3 100644 --- a/src/main.rs +++ b/src/main.rs @@ -32,6 +32,11 @@ async fn handle_request(req: Request) -> Result, Infallible (&Method::GET, "/googleapi/auth") => auth_get(req).await, (&Method::GET, "/") => Ok(Response::new(Body::from("Hello, World!"))), (&Method::GET, "/favicon.ico") => Ok(Response::default()), + (&Method::GET, "/robots.txt") => { + Ok(Response::new(Body::from("User-agent: *\nDisallow: /"))) + } + (&Method::GET, "sitemap.xml") => Ok(Response::default()), + (&Method::GET, "/health") => Ok(Response::new(Body::from("OK"))), other => { error!("404: {:?} {:?}", other.0, other.1); let mut not_found = Response::default();