Begin port to actix-web 3.0

* actix-files is quite easy to use now
* uses futures 0.3 now, and async/await. the streaming archiver is not
  ported yet.

0.11.x versions will be dev versions until the port is done
This commit is contained in:
Дамјан Георгиевски 2020-09-12 04:28:20 +02:00
parent ec93e016bf
commit cce8560bd5
5 changed files with 1443 additions and 1236 deletions

2315
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
[package]
name = "http-server"
version = "0.10.2"
version = "0.11.0"
authors = ["Damjan Georgievski <gdamjan@gmail.com>"]
license = "MIT"
readme = "README.md"
@ -8,13 +8,13 @@ homepage = "https://github.com/gdamjan/http-server-rs"
edition = "2018"
[dependencies]
actix-web = "1.0"
actix-files = "0.1.1"
actix-web = "3.0"
actix-files = "0.3.0"
bytes = "0.4"
clap = "2"
env_logger = "*"
log = "*"
futures = "0.1"
futures = "0.3"
tar = "0.4"
percent-encoding = "2.0"
v_htmlescape = "0.4"

178
src/listing.rs Normal file
View file

@ -0,0 +1,178 @@
use actix_files::Directory;
use actix_web::{HttpRequest, HttpResponse};
use actix_web::dev::ServiceResponse;
use std::path::Path;
use percent_encoding::{utf8_percent_encode, CONTROLS}; // NON_ALPHANUMERIC
use v_htmlescape::escape as escape_html_entity;
use std::fmt::Write;
macro_rules! encode_file_url {
($path:ident) => {
utf8_percent_encode(&$path, CONTROLS)
};
}
// " -- &quot; & -- &amp; ' -- &#x27; < -- &lt; > -- &gt; / -- &#x2f;
macro_rules! encode_file_name {
($entry:ident) => {
escape_html_entity(&$entry.file_name().to_string_lossy())
};
}
pub fn directory_listing(
dir: &Directory,
req: &HttpRequest,
) -> Result<ServiceResponse, std::io::Error> {
let index_of = format!("Index of {}", req.path());
let mut body = String::new();
let base = Path::new(req.path());
for entry in dir.path.read_dir()? {
if dir.is_visible(&entry) {
let entry = entry.unwrap();
let p = match entry.path().strip_prefix(&dir.path) {
Ok(p) if cfg!(windows) => {
base.join(p).to_string_lossy().replace("\\", "/")
}
Ok(p) => base.join(p).to_string_lossy().into_owned(),
Err(_) => continue,
};
// if file is a directory, add '/' to the end of the name
if let Ok(metadata) = entry.metadata() {
if metadata.is_dir() {
let _ = write!(
body,
"<li><a href=\"{}\">{}/</a></li>",
encode_file_url!(p),
encode_file_name!(entry),
);
} else {
let _ = write!(
body,
"<li><a href=\"{}\">{}</a></li>",
encode_file_url!(p),
encode_file_name!(entry),
);
}
} else {
continue;
}
}
}
let html = format!(
"<html>\
<head><title>{}</title></head>\
<body><h1>{}</h1>\
<ul>\
{}\
</ul></body>\n</html>",
index_of, index_of, body
);
Ok(ServiceResponse::new(
req.clone(),
HttpResponse::Ok()
.content_type("text/html; charset=utf-8")
.body(html),
))
}
// fn handle_directory(
// dir: &fs::Directory,
// req: &HttpRequest,
// ) -> Result<ServiceResponse, std::io::Error> {
// let rd = std::fs::read_dir(&dir.path)?;
// fn optimistic_is_dir(entry: &std::fs::DirEntry) -> bool {
// // consider it non directory if metadata reading fails, better than an unwrap() panic
// entry
// .metadata()
// .map(|m| m.file_type().is_dir())
// .unwrap_or(false)
// }
// let mut paths: Vec<_> = rd
// .filter_map(|entry| {
// if dir.is_visible(&entry) {
// entry.ok()
// } else {
// None
// }
// })
// .collect();
// paths.sort_by_key(|entry| (!optimistic_is_dir(entry), entry.file_name()));
// let tar_url = req.path().trim_end_matches('/'); // this is already encoded
// let mut body = String::new();
// writeln!(body, "<h1>Index of {}</h1>", req.path()).unwrap(); // FIXME: decode from url, escape for html
// writeln!(
// body,
// r#"<small>[<a href="{}.tar">.tar</a> of whole directory]</small>"#,
// tar_url
// )
// .unwrap();
// writeln!(body, "<table>").unwrap();
// writeln!(
// body,
// "<tr><td>📁 <a href='../'>../</a></td><td>Size</td></tr>"
// )
// .unwrap();
// for entry in paths {
// let meta = entry.metadata()?;
// let file_url =
// utf8_percent_encode(&entry.file_name().to_string_lossy(), NON_ALPHANUMERIC).to_string();
// let file_name = escape_html_entity(&entry.file_name().to_string_lossy()).to_string();
// let size = meta.len();
// write!(body, "<tr>").unwrap();
// if meta.file_type().is_dir() {
// writeln!(
// body,
// r#"<td>📂 <a href="{}/">{}/</a></td>"#,
// file_url, file_name
// )
// .unwrap();
// write!(
// body,
// r#" <td><small>[<a href="{}.tar">.tar</a>]</small></td>"#,
// file_url
// )
// .unwrap();
// } else {
// writeln!(
// body,
// r#"<td>🗎 <a href="{}">{}</a></td>"#,
// file_url, file_name
// )
// .unwrap();
// write!(body, " <td>{}</td>", size).unwrap();
// }
// writeln!(body, "</tr>").unwrap();
// }
// writeln!(body, "</table>").unwrap();
// writeln!(
// body,
// r#"<footer><a href="{}">{} {}</a></footer>"#,
// env!("CARGO_PKG_HOMEPAGE"),
// env!("CARGO_PKG_NAME"),
// env!("CARGO_PKG_VERSION")
// )
// .unwrap();
// let mut html = String::new();
// writeln!(html, "<!DOCTYPE html>").unwrap();
// writeln!(html, "<html><head>").unwrap();
// writeln!(html, "<title>Index of {}</title>", req.path()).unwrap();
// writeln!(html, "<style>\n{}</style>", include_str!("style.css")).unwrap();
// writeln!(html, "</head>").unwrap();
// writeln!(html, "<body>\n{}</body>", body).unwrap();
// writeln!(html, "</html>").unwrap();
// let resp = HttpResponse::Ok()
// .content_type("text/html; charset=utf-8")
// .body(html);
// Ok(ServiceResponse::new(req.clone(), resp))
// }

View file

@ -1,7 +1,10 @@
mod threaded_archiver;
// mod threaded_archiver;
mod listing;
mod web;
fn main() -> std::io::Result<()> {
#[actix_web::main]
async fn main() -> std::io::Result<()> {
let app = clap::App::new(clap::crate_name!())
.author(clap::crate_authors!("\n"))
.version(clap::crate_version!())
@ -47,5 +50,5 @@ fn main() -> std::io::Result<()> {
let root = std::path::PathBuf::from(chdir).canonicalize()?;
std::env::set_current_dir(&root)?;
web::run(&bind_addr, &root)
web::run(&bind_addr, &root).await
}

View file

@ -1,155 +1,58 @@
use actix_files as fs;
use actix_web::dev::ServiceResponse;
use actix_web::{error, middleware, web, App, HttpRequest, HttpResponse, HttpServer, Responder};
use futures::Stream;
use percent_encoding::{utf8_percent_encode, NON_ALPHANUMERIC};
use v_htmlescape::escape as escape_html_entity;
use actix_files::Files;
use actix_web::{get, middleware, web, App, HttpServer, HttpResponse, Responder};
use crate::threaded_archiver;
use std::fmt::Write;
// use crate::threaded_archiver;
use std::path::PathBuf;
pub fn run(bind_addr: &str, root: &PathBuf) -> std::io::Result<()> {
let root = root.clone();
HttpServer::new(move || {
log::info!("Serving files from {:?}", &root);
let static_files = fs::Files::new("/", &root)
.show_files_listing()
.files_listing_renderer(handle_directory);
pub async fn run(bind_addr: &str, root: &PathBuf) -> std::io::Result<()> {
let root_ = root.clone();
let s = HttpServer::new(move || {
let static_files = Files::new("/", &root_)
.show_files_listing()
.redirect_to_slash_directory()
.files_listing_renderer(crate::listing::directory_listing);
App::new()
.data(root.clone())
.app_data(root_.clone())
.wrap(middleware::Logger::default())
.service(web::resource(r"/{tail:.*}.tar").to(handle_tar))
.service(web::resource(r"/favicon.ico").to(favicon_ico))
.service(favicon_ico)
.service(handle_tar)
.service(static_files)
})
.bind(bind_addr)?
.workers(1)
.run()
.run();
log::info!("Serving files from {:?}", &root);
s.await
}
fn handle_directory(
dir: &fs::Directory,
req: &HttpRequest,
) -> Result<ServiceResponse, std::io::Error> {
let rd = std::fs::read_dir(&dir.path)?;
#[get("/{tail:.*}.tar")]
async fn handle_tar(_root: web::Data<PathBuf>, web::Path(_tail): web::Path<String>) -> impl Responder {
// let relpath = PathBuf::from(tail.trim_end_matches('/'));
// let fullpath = root.join(&relpath).canonicalize()?;
fn optimistic_is_dir(entry: &std::fs::DirEntry) -> bool {
// consider it non directory if metadata reading fails, better than an unwrap() panic
entry
.metadata()
.map(|m| m.file_type().is_dir())
.unwrap_or(false)
}
let mut paths: Vec<_> = rd
.filter_map(|entry| {
if dir.is_visible(&entry) {
entry.ok()
} else {
None
}
})
.collect();
paths.sort_by_key(|entry| (!optimistic_is_dir(entry), entry.file_name()));
// if !(fullpath.is_dir()) {
// return Err(error::ErrorBadRequest("not a directory"));
// }
let tar_url = req.path().trim_end_matches('/'); // this is already encoded
let mut body = String::new();
writeln!(body, "<h1>Index of {}</h1>", req.path()).unwrap(); // FIXME: decode from url, escape for html
writeln!(
body,
r#"<small>[<a href="{}.tar">.tar</a> of whole directory]</small>"#,
tar_url
)
.unwrap();
writeln!(body, "<table>").unwrap();
writeln!(
body,
"<tr><td>📁 <a href='../'>../</a></td><td>Size</td></tr>"
)
.unwrap();
for entry in paths {
let meta = entry.metadata()?;
let file_url =
utf8_percent_encode(&entry.file_name().to_string_lossy(), NON_ALPHANUMERIC).to_string();
let file_name = escape_html_entity(&entry.file_name().to_string_lossy()).to_string();
let size = meta.len();
write!(body, "<tr>").unwrap();
if meta.file_type().is_dir() {
writeln!(
body,
r#"<td>📂 <a href="{}/">{}/</a></td>"#,
file_url, file_name
)
.unwrap();
write!(
body,
r#" <td><small>[<a href="{}.tar">.tar</a>]</small></td>"#,
file_url
)
.unwrap();
} else {
writeln!(
body,
r#"<td>🗎 <a href="{}">{}</a></td>"#,
file_url, file_name
)
.unwrap();
write!(body, " <td>{}</td>", size).unwrap();
}
writeln!(body, "</tr>").unwrap();
}
writeln!(body, "</table>").unwrap();
writeln!(
body,
r#"<footer><a href="{}">{} {}</a></footer>"#,
env!("CARGO_PKG_HOMEPAGE"),
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_VERSION")
)
.unwrap();
let mut html = String::new();
writeln!(html, "<!DOCTYPE html>").unwrap();
writeln!(html, "<html><head>").unwrap();
writeln!(html, "<title>Index of {}</title>", req.path()).unwrap();
writeln!(html, "<style>\n{}</style>", include_str!("style.css")).unwrap();
writeln!(html, "</head>").unwrap();
writeln!(html, "<body>\n{}</body>", body).unwrap();
writeln!(html, "</html>").unwrap();
let resp = HttpResponse::Ok()
.content_type("text/html; charset=utf-8")
.body(html);
Ok(ServiceResponse::new(req.clone(), resp))
// let stream = threaded_archiver::stream_tar_in_thread(fullpath);
// let resp = HttpResponse::Ok()
// .content_type("application/x-tar")
// .streaming(stream.map_err(|_e| error::ErrorBadRequest("stream error")));
// Ok(resp)
HttpResponse::Ok()
}
fn handle_tar(req: HttpRequest) -> impl Responder {
let root = req.app_data::<PathBuf>().unwrap();
let tail = req.match_info().query("tail");
let relpath = PathBuf::from(tail.trim_end_matches('/'));
let fullpath = root.join(&relpath).canonicalize()?;
const FAVICON_ICO: &'static [u8] = include_bytes!("favicon.png");
if !(fullpath.is_dir()) {
return Err(error::ErrorBadRequest("not a directory"));
}
let stream = threaded_archiver::stream_tar_in_thread(fullpath);
let resp = HttpResponse::Ok()
.content_type("application/x-tar")
.streaming(stream.map_err(|_e| error::ErrorBadRequest("stream error")));
Ok(resp)
}
fn favicon_ico() -> impl Responder {
#[get("/favicon.ico")]
async fn favicon_ico() -> impl Responder {
HttpResponse::Ok()
.content_type("image/png")
.header("Cache-Control", "only-if-cached, max-age=86400")
.body(bytes::Bytes::from_static(include_bytes!("favicon.png")))
.body(FAVICON_ICO)
}