Compare commits

..

5 Commits

Author SHA1 Message Date
sigoden
ec60752ba2 chore: upgrade version 2022-05-31 11:10:15 +08:00
sigoden
ed7f5e425a feat: support range requests 2022-05-31 10:58:32 +08:00
sigoden
3032052923 feat: distinct upload and delete operation 2022-05-31 09:23:35 +08:00
sigoden
be3ae2fe00 feat: delete confirm
close #1
2022-05-31 08:01:03 +08:00
sigoden
fb03f7ddb8 refactor: improve code quality 2022-05-31 07:56:05 +08:00
7 changed files with 348 additions and 212 deletions

2
Cargo.lock generated
View File

@@ -296,7 +296,7 @@ dependencies = [
[[package]]
name = "duf"
version = "0.5.0"
version = "0.6.0"
dependencies = [
"async-walkdir",
"async_zip",

View File

@@ -1,6 +1,6 @@
[package]
name = "duf"
version = "0.5.0"
version = "0.6.0"
edition = "2021"
authors = ["sigoden <sigoden@gmail.com>"]
description = "Duf is a simple file server."

View File

@@ -44,14 +44,25 @@ duf
duf folder_name
```
Only serve static files, disable editing operations such as update or delete
Allow all operations such as upload, delete
```
duf --readonly
duf --allow-all
```
Finally, run this command to see a list of all available option
Only allow upload operations
### Curl
```
duf --allow-upload
```
Use http authentication
```
duf --auth user:pass
```
### Api
Download a file
```

View File

@@ -35,10 +35,20 @@ fn app() -> clap::Command<'static> {
.help("Path to a directory for serving files"),
)
.arg(
Arg::new("readonly")
.short('r')
.long("readonly")
.help("Disable change operations such as update or delete"),
Arg::new("allow-all")
.short('A')
.long("allow-all")
.help("Allow all operations"),
)
.arg(
Arg::new("allow-upload")
.long("allow-upload")
.help("Allow upload operation"),
)
.arg(
Arg::new("allow-delete")
.long("allo-delete")
.help("Allow delete operation"),
)
.arg(
Arg::new("auth")
@@ -68,9 +78,10 @@ pub struct Args {
pub address: String,
pub port: u16,
pub path: PathBuf,
pub readonly: bool,
pub auth: Option<String>,
pub no_auth_read: bool,
pub allow_upload: bool,
pub allow_delete: bool,
pub cors: bool,
}
@@ -82,21 +93,22 @@ impl Args {
pub fn parse(matches: ArgMatches) -> BoxResult<Args> {
let address = matches.value_of("address").unwrap_or_default().to_owned();
let port = matches.value_of_t::<u16>("port")?;
let path = matches.value_of_os("path").unwrap_or_default();
let path = Args::parse_path(path)?;
let readonly = matches.is_present("readonly");
let path = Args::parse_path(matches.value_of_os("path").unwrap_or_default())?;
let cors = matches.is_present("cors");
let auth = matches.value_of("auth").map(|v| v.to_owned());
let no_auth_read = matches.is_present("no-auth-read");
let allow_upload = matches.is_present("allow-all") || matches.is_present("allow-upload");
let allow_delete = matches.is_present("allow-all") || matches.is_present("allow-delete");
Ok(Args {
address,
port,
path,
readonly,
auth,
no_auth_read,
cors,
allow_delete,
allow_upload,
})
}

View File

@@ -4,7 +4,7 @@
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width" />
__SLOB__
__SLOT__
</head>
<body>
<div class="head">

View File

@@ -1,6 +1,6 @@
var $toolbox, $tbody, $breadcrumb, $uploaders, $uploadControl;
var uploaderIdx = 0;
var baseDir;
let $tbody, $uploaders;
let uploaderIdx = 0;
let baseDir;
class Uploader {
idx = 0;
@@ -12,8 +12,8 @@ class Uploader {
}
upload() {
var { file, idx } = this;
var url = getUrl(file.name);
const { file, idx } = this;
let url = getUrl(file.name);
if (file.name == baseDir + ".zip") {
url += "?unzip";
}
@@ -24,35 +24,45 @@ class Uploader {
</div>`);
this.$elem = document.getElementById(`file${idx}`);
var ajax = new XMLHttpRequest();
const ajax = new XMLHttpRequest();
ajax.upload.addEventListener("progress", e => this.progress(e), false);
ajax.addEventListener("load", e => this.complete(e), false);
ajax.addEventListener("error", e => this.fail(e), false);
ajax.addEventListener("abort", e => this.fail(e), false);
ajax.addEventListener("readystatechange", () => {
console.log(ajax.readyState, ajax.status)
if(ajax.readyState === 4) {
if (ajax.status == 200) {
this.complete();
} else {
this.fail();
}
}
})
ajax.addEventListener("error", () => this.fail(), false);
ajax.addEventListener("abort", () => this.fail(), false);
ajax.open("PUT", url);
ajax.send(file);
}
progress(event) {
var percent = (event.loaded / event.total) * 100;
const percent = (event.loaded / event.total) * 100;
this.$elem.innerHTML = `${this.file.name} (${percent.toFixed(2)}%)`;
}
complete(event) {
complete() {
this.$elem.innerHTML = `${this.file.name}`;
}
fail(event) {
fail() {
this.$elem.innerHTML = `<strike>${this.file.name}</strike>`;
}
}
function addBreadcrumb(value) {
var parts = value.split("/").filter(v => !!v);
var len = parts.length;
var path = "";
for (var i = 0; i < len; i++) {
var name = parts[i];
const $breadcrumb = document.querySelector(".breadcrumb");
const parts = value.split("/").filter(v => !!v);
const len = parts.length;
let path = "";
for (let i = 0; i < len; i++) {
const name = parts[i];
if (i > 0) {
path += "/" + name;
}
@@ -69,9 +79,9 @@ function addBreadcrumb(value) {
}
function addPath(file, index) {
var url = getUrl(file.name)
var actionDelete = "";
var actionDownload = "";
const url = getUrl(file.name)
let actionDelete = "";
let actionDownload = "";
if (file.path_type.endsWith("Dir")) {
actionDownload = `
<div class="action-btn">
@@ -87,13 +97,13 @@ function addPath(file, index) {
</a>
</div>`;
}
if (!DATA.readonly) {
if (DATA.allow_delete) {
actionDelete = `
<div onclick="deletePath(${index})" class="action-btn" id="deleteBtn${index}" title="Delete ${file.name}">
<svg width="16" height="16" fill="currentColor"viewBox="0 0 16 16"><path d="M6.854 7.146a.5.5 0 1 0-.708.708L7.293 9l-1.147 1.146a.5.5 0 0 0 .708.708L8 9.707l1.146 1.147a.5.5 0 0 0 .708-.708L8.707 9l1.147-1.146a.5.5 0 0 0-.708-.708L8 8.293 6.854 7.146z"/><path d="M14 14V4.5L9.5 0H4a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h8a2 2 0 0 0 2-2zM9.5 3A1.5 1.5 0 0 0 11 4.5h2V14a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1V2a1 1 0 0 1 1-1h5.5v2z"/></svg>
</div>`;
}
var actionCell = `
let actionCell = `
<td class="cell-actions">
${actionDownload}
${actionDelete}
@@ -112,21 +122,27 @@ ${actionCell}
}
async function deletePath(index) {
var file = DATA.paths[index];
const file = DATA.paths[index];
if (!file) return;
var ajax = new XMLHttpRequest();
ajax.open("DELETE", getUrl(file.name));
ajax.addEventListener("readystatechange", function() {
if(ajax.readyState === 4 && ajax.status === 200) {
document.getElementById(`addPath${index}`).remove();
}
if (!confirm(`Delete \`${file.name}\`?`)) return;
try {
const res = await fetch(getUrl(file.name), {
method: "DELETE",
});
ajax.send();
if (res.status === 200) {
document.getElementById(`addPath${index}`).remove();
} else {
throw new Error(await res.text())
}
} catch (err) {
alert(`Cannot delete \`${file.name}\`, ${err.message}`);
}
}
function getUrl(name) {
var url = location.href.split('?')[0];
let url = location.href.split('?')[0];
if (!url.endsWith("/")) url += "/";
url += encodeURI(name);
return url;
@@ -147,12 +163,12 @@ function getSvg(path_type) {
function formatMtime(mtime) {
if (!mtime) return ""
var date = new Date(mtime);
var year = date.getFullYear();
var month = padZero(date.getMonth() + 1, 2);
var day = padZero(date.getDate(), 2);
var hours = padZero(date.getHours(), 2);
var minutes = padZero(date.getMinutes(), 2);
const date = new Date(mtime);
const year = date.getFullYear();
const month = padZero(date.getMonth() + 1, 2);
const day = padZero(date.getDate(), 2);
const hours = padZero(date.getHours(), 2);
const minutes = padZero(date.getMinutes(), 2);
return `${year}/${month}/${day} ${hours}:${minutes}`;
}
@@ -162,28 +178,26 @@ function padZero(value, size) {
function formatSize(size) {
if (!size) return ""
var sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
if (size == 0) return '0 Byte';
var i = parseInt(Math.floor(Math.log(size) / Math.log(1024)));
const i = parseInt(Math.floor(Math.log(size) / Math.log(1024)));
return Math.round(size / Math.pow(1024, i), 2) + ' ' + sizes[i];
}
function ready() {
$toolbox = document.querySelector(".toolbox");
$tbody = document.querySelector(".main tbody");
$breadcrumb = document.querySelector(".breadcrumb");
$uploaders = document.querySelector(".uploaders");
addBreadcrumb(DATA.breadcrumb);
DATA.paths.forEach((file, index) => addPath(file, index));
if (!DATA.readonly) {
if (DATA.allow_upload) {
document.querySelector(".upload-control").classList.remove(["hidden"]);
document.getElementById("file").addEventListener("change", e => {
var files = e.target.files;
for (var file of files) {
const files = e.target.files;
for (let file of files) {
uploaderIdx += 1;
var uploader = new Uploader(uploaderIdx, file);
const uploader = new Uploader(uploaderIdx, file);
uploader.upload();
}
});

View File

@@ -7,8 +7,8 @@ use async_zip::Compression;
use futures::stream::StreamExt;
use futures::TryStreamExt;
use headers::{
AccessControlAllowHeaders, AccessControlAllowOrigin, ContentType, ETag, HeaderMapExt,
IfModifiedSince, IfNoneMatch, LastModified,
AccessControlAllowHeaders, AccessControlAllowOrigin, ContentRange, ContentType, ETag,
HeaderMap, HeaderMapExt, IfModifiedSince, IfNoneMatch, IfRange, LastModified, Range,
};
use hyper::header::{HeaderValue, ACCEPT, CONTENT_TYPE, ORIGIN, RANGE, WWW_AUTHENTICATE};
use hyper::service::{make_service_fn, service_fn};
@@ -16,11 +16,12 @@ use hyper::{Body, Method, StatusCode};
use percent_encoding::percent_decode;
use serde::Serialize;
use std::convert::Infallible;
use std::fs::Metadata;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::SystemTime;
use tokio::fs::File;
use tokio::io::AsyncWrite;
use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWrite};
use tokio::{fs, io};
use tokio_util::codec::{BytesCodec, FramedRead};
use tokio_util::io::{ReaderStream, StreamReader};
@@ -28,20 +29,18 @@ use tokio_util::io::{ReaderStream, StreamReader};
type Request = hyper::Request<Body>;
type Response = hyper::Response<Body>;
macro_rules! status_code {
($status:expr) => {
hyper::Response::builder()
.status($status)
.body($status.canonical_reason().unwrap_or_default().into())
.unwrap()
};
}
const INDEX_HTML: &str = include_str!("assets/index.html");
const INDEX_CSS: &str = include_str!("assets/index.css");
const INDEX_JS: &str = include_str!("assets/index.js");
const BUF_SIZE: usize = 1024 * 16;
macro_rules! status {
($res:ident, $status:expr) => {
*$res.status_mut() = $status;
*$res.body_mut() = Body::from($status.canonical_reason().unwrap_or_default());
};
}
pub async fn serve(args: Args) -> BoxResult<()> {
let address = args.address()?;
let inner = Arc::new(InnerService::new(args));
@@ -76,11 +75,20 @@ impl InnerService {
let method = req.method().clone();
let uri = req.uri().clone();
let cors = self.args.cors;
let mut res = self
.handle(req)
.await
.unwrap_or_else(|_| status_code!(StatusCode::INTERNAL_SERVER_ERROR));
let mut res = match self.handle(req).await {
Ok(res) => {
info!(r#""{} {}" - {}"#, method, uri, res.status());
res
}
Err(err) => {
let mut res = Response::default();
status!(res, StatusCode::INTERNAL_SERVER_ERROR);
error!(r#""{} {}" - {} {}"#, method, uri, res.status(), err);
res
}
};
if cors {
add_cors(&mut res);
}
@@ -88,74 +96,87 @@ impl InnerService {
}
pub async fn handle(self: Arc<Self>, req: Request) -> BoxResult<Response> {
if !self.auth_guard(&req).unwrap_or_default() {
let mut res = status_code!(StatusCode::UNAUTHORIZED);
res.headers_mut()
.insert(WWW_AUTHENTICATE, HeaderValue::from_static("Basic"));
let mut res = Response::default();
if !self.auth_guard(&req, &mut res) {
return Ok(res);
}
let path = req.uri().path();
let filepath = match self.extract_path(path) {
Some(v) => v,
None => {
status!(res, StatusCode::FORBIDDEN);
return Ok(res);
}
};
let filepath = filepath.as_path();
let query = req.uri().query().unwrap_or_default();
let meta = fs::metadata(filepath).await.ok();
let is_miss = meta.is_none();
let is_dir = meta.map(|v| v.is_dir()).unwrap_or_default();
let is_file = !is_miss && !is_dir;
let allow_upload = self.args.allow_upload;
let allow_delete = self.args.allow_delete;
match *req.method() {
Method::GET => self.handle_static(req).await,
Method::PUT => {
if self.args.readonly {
return Ok(status_code!(StatusCode::FORBIDDEN));
Method::GET if is_dir && query == "zip" => {
self.handle_zip_dir(filepath, &mut res).await?
}
self.handle_upload(req).await
Method::GET if is_dir && query.starts_with("q=") => {
self.handle_query_dir(filepath, &query[3..], &mut res)
.await?
}
Method::OPTIONS => Ok(status_code!(StatusCode::NO_CONTENT)),
Method::DELETE => self.handle_delete(req).await,
_ => Ok(status_code!(StatusCode::NOT_FOUND)),
Method::GET if is_file => {
self.handle_send_file(filepath, req.headers(), &mut res)
.await?
}
Method::GET if is_miss && path.ends_with('/') => {
self.handle_ls_dir(filepath, false, &mut res).await?
}
Method::GET => self.handle_ls_dir(filepath, true, &mut res).await?,
Method::OPTIONS => {
status!(res, StatusCode::NO_CONTENT);
}
Method::PUT if !allow_upload || (!allow_delete && is_file) => {
status!(res, StatusCode::FORBIDDEN);
}
Method::PUT => self.handle_upload(filepath, req, &mut res).await?,
Method::DELETE if !allow_delete => {
status!(res, StatusCode::FORBIDDEN);
}
Method::DELETE if !is_miss => self.handle_delete(filepath, is_dir).await?,
_ => {
status!(res, StatusCode::NOT_FOUND);
}
}
async fn handle_static(&self, req: Request) -> BoxResult<Response> {
let req_path = req.uri().path();
let path = match self.get_file_path(req_path)? {
Some(path) => path,
None => return Ok(status_code!(StatusCode::FORBIDDEN)),
};
match fs::metadata(&path).await {
Ok(meta) => {
if meta.is_dir() {
let req_query = req.uri().query().unwrap_or_default();
if req_query == "zip" {
return self.handle_send_dir_zip(path.as_path()).await;
}
if let Some(q) = req_query.strip_prefix("q=") {
return self.handle_query_dir(path.as_path(), q).await;
}
self.handle_ls_dir(path.as_path(), true).await
} else {
self.handle_send_file(&req, path.as_path()).await
}
}
Err(_) => {
if req_path.ends_with('/') {
self.handle_ls_dir(path.as_path(), false).await
} else {
Ok(status_code!(StatusCode::NOT_FOUND))
}
}
}
Ok(res)
}
async fn handle_upload(&self, mut req: Request) -> BoxResult<Response> {
let forbidden = status_code!(StatusCode::FORBIDDEN);
let path = match self.get_file_path(req.uri().path())? {
Some(path) => path,
None => return Ok(forbidden),
};
match path.parent() {
async fn handle_upload(
&self,
path: &Path,
mut req: Request,
res: &mut Response,
) -> BoxResult<()> {
let ensure_parent = match path.parent() {
Some(parent) => match fs::metadata(parent).await {
Ok(meta) => {
if !meta.is_dir() {
return Ok(forbidden);
Ok(meta) => meta.is_dir(),
Err(_) => {
fs::create_dir_all(parent).await?;
true
}
}
Err(_) => fs::create_dir_all(parent).await?,
},
None => return Ok(forbidden),
None => false,
};
if !ensure_parent {
status!(res, StatusCode::FORBIDDEN);
return Ok(());
}
let mut file = fs::File::create(&path).await?;
@@ -194,39 +215,37 @@ impl InnerService {
fs::remove_file(&path).await?;
}
return Ok(status_code!(StatusCode::OK));
Ok(())
}
async fn handle_delete(&self, req: Request) -> BoxResult<Response> {
let path = match self.get_file_path(req.uri().path())? {
Some(path) => path,
None => return Ok(status_code!(StatusCode::FORBIDDEN)),
};
let meta = fs::metadata(&path).await?;
if meta.is_file() {
fs::remove_file(path).await?;
} else {
fs::remove_dir_all(path).await?;
async fn handle_delete(&self, path: &Path, is_dir: bool) -> BoxResult<()> {
match is_dir {
true => fs::remove_dir_all(path).await?,
false => fs::remove_file(path).await?,
}
Ok(status_code!(StatusCode::OK))
Ok(())
}
async fn handle_ls_dir(&self, path: &Path, exist: bool) -> BoxResult<Response> {
async fn handle_ls_dir(&self, path: &Path, exist: bool, res: &mut Response) -> BoxResult<()> {
let mut paths: Vec<PathItem> = vec![];
if exist {
let mut rd = fs::read_dir(path).await?;
while let Some(entry) = rd.next_entry().await? {
let entry_path = entry.path();
if let Ok(item) = get_path_item(entry_path, path.to_path_buf()).await {
if let Ok(item) = to_pathitem(entry_path, path.to_path_buf()).await {
paths.push(item);
}
}
}
self.send_index(path, paths)
self.send_index(path, paths, res)
}
async fn handle_query_dir(&self, path: &Path, q: &str) -> BoxResult<Response> {
async fn handle_query_dir(
&self,
path: &Path,
query: &str,
res: &mut Response,
) -> BoxResult<()> {
let mut paths: Vec<PathItem> = vec![];
let mut walkdir = WalkDir::new(path);
while let Some(entry) = walkdir.next().await {
@@ -235,22 +254,22 @@ impl InnerService {
.file_name()
.to_string_lossy()
.to_lowercase()
.contains(&q.to_lowercase())
.contains(&query.to_lowercase())
{
continue;
}
if fs::symlink_metadata(entry.path()).await.is_err() {
continue;
}
if let Ok(item) = get_path_item(entry.path(), path.to_path_buf()).await {
if let Ok(item) = to_pathitem(entry.path(), path.to_path_buf()).await {
paths.push(item);
}
}
}
self.send_index(path, paths)
self.send_index(path, paths, res)
}
async fn handle_send_dir_zip(&self, path: &Path) -> BoxResult<Response> {
async fn handle_zip_dir(&self, path: &Path, res: &mut Response) -> BoxResult<()> {
let (mut writer, reader) = tokio::io::duplex(BUF_SIZE);
let path = path.to_owned();
tokio::spawn(async move {
@@ -259,49 +278,81 @@ impl InnerService {
}
});
let stream = ReaderStream::new(reader);
let body = Body::wrap_stream(stream);
Ok(Response::new(body))
*res.body_mut() = Body::wrap_stream(stream);
Ok(())
}
async fn handle_send_file(&self, req: &Request, path: &Path) -> BoxResult<Response> {
async fn handle_send_file(
&self,
path: &Path,
headers: &HeaderMap<HeaderValue>,
res: &mut Response,
) -> BoxResult<()> {
let (file, meta) = tokio::join!(fs::File::open(path), fs::metadata(path),);
let (file, meta) = (file?, meta?);
let mut res = Response::default();
if let Ok(mtime) = meta.modified() {
let timestamp = get_timestamp(&mtime);
let size = meta.len();
let etag = format!(r#""{}-{}""#, timestamp, size)
.parse::<ETag>()
.unwrap();
let last_modified = LastModified::from(mtime);
let fresh = {
// `If-None-Match` takes presedence over `If-Modified-Since`.
if let Some(if_none_match) = req.headers().typed_get::<IfNoneMatch>() {
let (mut file, meta) = (file?, meta?);
let mut maybe_range = true;
if let Some((etag, last_modified)) = extract_cache_headers(&meta) {
let cached = {
if let Some(if_none_match) = headers.typed_get::<IfNoneMatch>() {
!if_none_match.precondition_passes(&etag)
} else if let Some(if_modified_since) = req.headers().typed_get::<IfModifiedSince>()
{
!if_modified_since.is_modified(mtime)
} else if let Some(if_modified_since) = headers.typed_get::<IfModifiedSince>() {
!if_modified_since.is_modified(last_modified.into())
} else {
false
}
};
res.headers_mut().typed_insert(last_modified);
res.headers_mut().typed_insert(etag);
if fresh {
*res.status_mut() = StatusCode::NOT_MODIFIED;
return Ok(res);
res.headers_mut().typed_insert(etag.clone());
if cached {
status!(res, StatusCode::NOT_MODIFIED);
return Ok(());
}
if headers.typed_get::<Range>().is_some() {
maybe_range = headers
.typed_get::<IfRange>()
.map(|if_range| !if_range.is_modified(Some(&etag), Some(&last_modified)))
// Always be fresh if there is no validators
.unwrap_or(true);
} else {
maybe_range = false;
}
}
let file_range = if maybe_range {
if let Some(content_range) = headers
.typed_get::<Range>()
.and_then(|range| to_content_range(&range, meta.len()))
{
res.headers_mut().typed_insert(content_range.clone());
*res.status_mut() = StatusCode::PARTIAL_CONTENT;
content_range.bytes_range()
} else {
None
}
} else {
None
};
if let Some(mime) = mime_guess::from_path(&path).first() {
res.headers_mut().typed_insert(ContentType::from(mime));
}
let body = if let Some((begin, end)) = file_range {
file.seek(io::SeekFrom::Start(begin)).await?;
let stream = FramedRead::new(file.take(end - begin + 1), BytesCodec::new());
Body::wrap_stream(stream)
} else {
let stream = FramedRead::new(file, BytesCodec::new());
let body = Body::wrap_stream(stream);
Body::wrap_stream(stream)
};
*res.body_mut() = body;
Ok(res)
Ok(())
}
fn send_index(&self, path: &Path, mut paths: Vec<PathItem>) -> BoxResult<Response> {
fn send_index(
&self,
path: &Path,
mut paths: Vec<PathItem>,
res: &mut Response,
) -> BoxResult<()> {
paths.sort_unstable();
let rel_path = match self.args.path.parent() {
Some(p) => path.strip_prefix(p).unwrap(),
@@ -310,14 +361,15 @@ impl InnerService {
let data = IndexData {
breadcrumb: normalize_path(rel_path),
paths,
readonly: self.args.readonly,
allow_upload: self.args.allow_upload,
allow_delete: self.args.allow_delete,
};
let data = serde_json::to_string(&data).unwrap();
let output = INDEX_HTML.replace(
"__SLOB__",
"__SLOT__",
&format!(
r#"
<title>Files in {} - Duf/</title>
<title>Files in {}/ - Duf</title>
<style>{}</style>
<script>var DATA = {}; {}</script>
"#,
@@ -327,44 +379,51 @@ impl InnerService {
INDEX_JS
),
);
*res.body_mut() = output.into();
Ok(Response::new(output.into()))
Ok(())
}
fn auth_guard(&self, req: &Request) -> BoxResult<bool> {
if let Some(auth) = &self.args.auth {
if let Some(value) = req.headers().get("Authorization") {
let value = value.to_str()?;
let value = if value.contains("Basic ") {
&value[6..]
} else {
return Ok(false);
fn auth_guard(&self, req: &Request, res: &mut Response) -> bool {
let pass = {
match &self.args.auth {
None => true,
Some(auth) => match req.headers().get("Authorization") {
Some(value) => match value.to_str().ok().map(|v| {
let mut it = v.split(' ');
(it.next(), it.next())
}) {
Some((Some("Basic"), Some(tail))) => base64::decode(tail)
.ok()
.and_then(|v| String::from_utf8(v).ok())
.map(|v| v.as_str() == auth)
.unwrap_or_default(),
_ => false,
},
None => self.args.no_auth_read && req.method() == Method::GET,
},
}
};
let value = base64::decode(value)?;
let value = std::str::from_utf8(&value)?;
return Ok(value == auth);
} else {
if self.args.no_auth_read && req.method() == Method::GET {
return Ok(true);
if !pass {
status!(res, StatusCode::UNAUTHORIZED);
res.headers_mut()
.insert(WWW_AUTHENTICATE, HeaderValue::from_static("Basic"));
}
return Ok(false);
}
}
Ok(true)
pass
}
fn get_file_path(&self, path: &str) -> BoxResult<Option<PathBuf>> {
let decoded_path = percent_decode(path[1..].as_bytes()).decode_utf8()?;
fn extract_path(&self, path: &str) -> Option<PathBuf> {
let decoded_path = percent_decode(path[1..].as_bytes()).decode_utf8().ok()?;
let slashes_switched = if cfg!(windows) {
decoded_path.replace('/', "\\")
} else {
decoded_path.into_owned()
};
let path = self.args.path.join(&slashes_switched);
if path.starts_with(&self.args.path) {
Ok(Some(path))
let full_path = self.args.path.join(&slashes_switched);
if full_path.starts_with(&self.args.path) {
Some(full_path)
} else {
Ok(None)
None
}
}
}
@@ -373,7 +432,8 @@ impl InnerService {
struct IndexData {
breadcrumb: String,
paths: Vec<PathItem>,
readonly: bool,
allow_upload: bool,
allow_delete: bool,
}
#[derive(Debug, Serialize, Eq, PartialEq, Ord, PartialOrd)]
@@ -392,7 +452,7 @@ enum PathType {
SymlinkFile,
}
async fn get_path_item<P: AsRef<Path>>(path: P, base_path: P) -> BoxResult<PathItem> {
async fn to_pathitem<P: AsRef<Path>>(path: P, base_path: P) -> BoxResult<PathItem> {
let path = path.as_ref();
let rel_path = path.strip_prefix(base_path).unwrap();
let (meta, meta2) = tokio::join!(fs::metadata(&path), fs::symlink_metadata(&path));
@@ -405,7 +465,7 @@ async fn get_path_item<P: AsRef<Path>>(path: P, base_path: P) -> BoxResult<PathI
(true, false) => PathType::SymlinkFile,
(false, false) => PathType::File,
};
let mtime = get_timestamp(&meta.modified()?);
let mtime = to_timestamp(&meta.modified()?);
let size = match path_type {
PathType::Dir | PathType::SymlinkDir => None,
PathType::File | PathType::SymlinkFile => Some(meta.len()),
@@ -419,7 +479,7 @@ async fn get_path_item<P: AsRef<Path>>(path: P, base_path: P) -> BoxResult<PathI
})
}
fn get_timestamp(time: &SystemTime) -> u64 {
fn to_timestamp(time: &SystemTime) -> u64 {
time.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_millis() as u64
@@ -470,3 +530,42 @@ async fn dir_zip<W: AsyncWrite + Unpin>(writer: &mut W, dir: &Path) -> BoxResult
writer.close().await?;
Ok(())
}
fn extract_cache_headers(meta: &Metadata) -> Option<(ETag, LastModified)> {
let mtime = meta.modified().ok()?;
let timestamp = to_timestamp(&mtime);
let size = meta.len();
let etag = format!(r#""{}-{}""#, timestamp, size)
.parse::<ETag>()
.unwrap();
let last_modified = LastModified::from(mtime);
Some((etag, last_modified))
}
fn to_content_range(range: &Range, complete_length: u64) -> Option<ContentRange> {
use core::ops::Bound::{Included, Unbounded};
let mut iter = range.iter();
let bounds = iter.next();
if iter.next().is_some() {
// Found multiple byte-range-spec. Drop.
return None;
}
bounds.and_then(|b| match b {
(Included(start), Included(end)) if start <= end && start < complete_length => {
ContentRange::bytes(
start..=end.min(complete_length.saturating_sub(1)),
complete_length,
)
.ok()
}
(Included(start), Unbounded) if start < complete_length => {
ContentRange::bytes(start.., complete_length).ok()
}
(Unbounded, Included(end)) if end > 0 => {
ContentRange::bytes(complete_length.saturating_sub(end).., complete_length).ok()
}
_ => None,
})
}