Compare commits

...

25 Commits

Author SHA1 Message Date
sigoden
6cc8a18a3d chore: release v0.44.0 (#606) 2025-08-14 09:05:35 +08:00
Matthias Möller
a387d727b2 chore: removes clippy warning with rust 1.89 (#609) 2025-08-08 06:50:30 +08:00
sigoden
19d65a5aa4 refactor: fix typos (#605) 2025-08-02 17:04:20 +08:00
sigoden
d37762d2b9 refactor: update deps (#604) 2025-08-02 16:56:07 +08:00
sigoden
9c9fca75d3 feat: support downloading via token auth (#603) 2025-08-02 14:37:49 +08:00
sigoden
089d30c5a5 feat: support noscript fallback (#602) 2025-08-02 09:50:00 +08:00
Matthias Möller
459a4d4f4a refactor: removes clippy warnings (#601) 2025-07-30 18:33:00 +08:00
sigoden
f8b69f4df8 fix: unexpected public auth asking for login info (#583) 2025-05-12 08:03:23 +08:00
Matthias Möller
53f064c73b fix: incorrect seperator for zip archives under windows (#577) 2025-04-25 08:14:21 +08:00
Falko Galperin
8a92a0cf1a fix: follow symlinks when searching/archiving (#572)
Specifically, this will always follow symlinks when they lead to a path
below the dufs root, and will follow other symlinks when
`--allow-symlink` is set.

I refactored some common functionality out of `zip_dir` and
`handle_search_dir` as well.
2025-04-12 09:49:19 +08:00
sigoden
59685da06e fix: webui formatDirSize (#568) 2025-04-07 07:36:49 +08:00
sigoden
09200860b4 chore: update deps and clippy (#569) 2025-04-07 07:27:43 +08:00
sigoden
4fbdec2878 feat: tolerate the absence of mtime (#559) 2025-03-20 08:46:26 +08:00
sigoden
d0453b7591 feat: limit sub directory item counting (#556) 2025-03-14 08:53:11 +08:00
45gfg9
eda9769b2a feat: support multipart ranges (#535) 2025-02-01 08:28:34 +08:00
sigoden
d255f1376a fix: incorrect dir size due to hidden files (#529) 2025-01-18 07:20:34 +08:00
sigoden
669c4f8811 feat: add cache-control:no-cache while sending file and index (#528) 2025-01-17 21:45:41 +08:00
sigoden
e576ddcbea feat: higher perm auth path shadows lower one (#521)
In `/:rw;/path1:ro`, the `/:rw` have higher perms, it shadow `/path1:ro`, make `/path1` granted read-write perms.
2025-01-02 09:00:28 +08:00
sigoden
af95ea1cd7 fix: webui can't handle hash property of URL well (#515) 2024-12-28 09:53:59 +08:00
sigoden
cbc620481d refactor: change description for --allow-archive (#511) 2024-12-24 18:58:03 +08:00
sigoden
f1c9776962 chore: update readme 2024-12-20 09:52:51 +08:00
sigoden
ac15ae4e8e Merge pull request #497 from sigoden/fix 2024-12-11 09:04:58 +08:00
sigoden
ab4ef06cb8 fix: no authentication check if no auth users 2024-12-11 08:57:30 +08:00
sigoden
bc6c573acb chore: adjust timeout for wait_for_port 2024-11-16 18:26:10 +08:00
sigoden
f27f9e997f chore: update readme about hashed password 2024-11-16 17:11:42 +08:00
18 changed files with 1821 additions and 773 deletions

View File

@@ -2,6 +2,35 @@
All notable changes to this project will be documented in this file.
## [0.44.0] - 2025-08-02
### Bug Fixes
- No authentication check if no auth users ([#497](https://github.com/sigoden/dufs/issues/497))
- Webui can't handle hash property of URL well ([#515](https://github.com/sigoden/dufs/issues/515))
- Incorrect dir size due to hidden files ([#529](https://github.com/sigoden/dufs/issues/529))
- Webui formatDirSize ([#568](https://github.com/sigoden/dufs/issues/568))
- Follow symlinks when searching/archiving ([#572](https://github.com/sigoden/dufs/issues/572))
- Incorrect separator for zip archives under windows ([#577](https://github.com/sigoden/dufs/issues/577))
- Unexpected public auth asking for login info ([#583](https://github.com/sigoden/dufs/issues/583))
### Features
- Higher perm auth path shadows lower one ([#521](https://github.com/sigoden/dufs/issues/521))
- Add cache-control:no-cache while sending file and index ([#528](https://github.com/sigoden/dufs/issues/528))
- Support multipart ranges ([#535](https://github.com/sigoden/dufs/issues/535))
- Limit sub directory item counting ([#556](https://github.com/sigoden/dufs/issues/556))
- Tolerate the absence of mtime ([#559](https://github.com/sigoden/dufs/issues/559))
- Support noscript fallback ([#602](https://github.com/sigoden/dufs/issues/602))
- Support downloading via token auth ([#603](https://github.com/sigoden/dufs/issues/603))
### Refactor
- Change description for `--allow-archive` ([#511](https://github.com/sigoden/dufs/issues/511))
- Removes clippy warnings ([#601](https://github.com/sigoden/dufs/issues/601))
- Update deps ([#604](https://github.com/sigoden/dufs/issues/604))
- Fix typos ([#605](https://github.com/sigoden/dufs/issues/605))
## [0.43.0] - 2024-11-04
### Bug Fixes

1509
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
[package]
name = "dufs"
version = "0.43.0"
version = "0.44.0"
edition = "2021"
authors = ["sigoden <sigoden@gmail.com>"]
description = "Dufs is a distinctive utility file server"
@@ -14,26 +14,26 @@ keywords = ["static", "file", "server", "webdav", "cli"]
clap = { version = "4.5", features = ["wrap_help", "env"] }
clap_complete = "4.5"
chrono = { version = "0.4", default-features = false, features = ["clock"] }
tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util", "signal"]}
tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util", "signal", "net"]}
tokio-util = { version = "0.7", features = ["io-util", "compat"] }
hyper = { version = "1", features = ["http1", "server"] }
percent-encoding = "2.3"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
futures-util = { version = "0.3", default-features = false, features = ["alloc"] }
async_zip = { version = "0.0.17", default-features = false, features = ["deflate", "bzip2", "xz", "chrono", "tokio"] }
async_zip = { version = "0.0.18", default-features = false, features = ["deflate", "bzip2", "xz", "chrono", "tokio"] }
headers = "0.4"
mime_guess = "2.0"
if-addrs = "0.13"
if-addrs = "0.14"
rustls-pemfile = { version = "2.0", optional = true }
tokio-rustls = { version = "0.26", optional = true, default-features = false, features = ["ring", "tls12"]}
md5 = "0.7"
md5 = "0.8"
lazy_static = "1.4"
uuid = { version = "1.7", features = ["v4", "fast-rng"] }
urlencoding = "2.1"
xml-rs = "0.8"
log = { version = "0.4", features = ["std"] }
socket2 = "0.5"
socket2 = "0.6"
async-stream = "0.3"
walkdir = "2.3"
form_urlencoded = "1.2"
@@ -53,6 +53,8 @@ http-body-util = "0.1"
bytes = "1.5"
pin-project-lite = "0.2"
sha2 = "0.10.8"
ed25519-dalek = "2.2.0"
hex = "0.4.3"
[features]
default = ["tls"]
@@ -62,8 +64,8 @@ tls = ["rustls-pemfile", "tokio-rustls"]
assert_cmd = "2"
reqwest = { version = "0.12", features = ["blocking", "multipart", "rustls-tls"], default-features = false }
assert_fs = "1"
port_check = "0.2"
rstest = "0.23"
port_check = "0.3"
rstest = "0.26.1"
regex = "1"
url = "2"
predicates = "3"

View File

@@ -66,7 +66,7 @@ Options:
--allow-delete Allow delete files/folders
--allow-search Allow search files/folders
--allow-symlink Allow symlink to files/folders outside root directory
--allow-archive Allow zip archive generation
--allow-archive Allow download folders as archive file
--enable-cors Enable CORS, sets `Access-Control-Allow-Origin: *`
--render-index Serve index.html when requesting a directory, returns 404 if not found index.html
--render-try-index Serve index.html when requesting a directory, returns directory listing if not found index.html
@@ -244,23 +244,25 @@ dufs -a user:pass@/:rw,/dir1 -a @/
- `-a user:pass@/:rw,/dir1`: `user` has read-write permissions for `/*`, has read-only permissions for `/dir1/*`.
- `-a @/`: All paths is publicly accessible, everyone can view/download it.
> There are no restrictions on using ':' and '@' characters in a password. For example, `user:pa:ss@1@/:rw` is valid, the password is `pa:ss@1`.
**Auth permissions are restricted by dufs global permissions.** If dufs does not enable upload permissions via `--allow-upload`, then the account will not have upload permissions even if it is granted `read-write`(`:rw`) permissions.
#### Hashed Password
DUFS supports the use of sha-512 hashed password.
Create hashed password
Create hashed password:
```
$ mkpasswd -m sha-512 123456
```sh
$ openssl passwd -6 123456 # or `mkpasswd -m sha-512 123456`
$6$tWMB51u6Kb2ui3wd$5gVHP92V9kZcMwQeKTjyTRgySsYJu471Jb1I6iHQ8iZ6s07GgCIO69KcPBRuwPE5tDq05xMAzye0NxVKuJdYs/
```
Use hashed password
```
Use hashed password:
```sh
dufs -a 'admin:$6$tWMB51u6Kb2ui3wd$5gVHP92V9kZcMwQeKTjyTRgySsYJu471Jb1I6iHQ8iZ6s07GgCIO69KcPBRuwPE5tDq05xMAzye0NxVKuJdYs/@/:rw'
```
> The hashed password contains `$6`, which can expand to a variable in some shells, so you have to use **single quotes** to wrap it.
Two important things for hashed passwords:

View File

@@ -4,6 +4,9 @@
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width" />
<noscript>
<meta http-equiv="refresh" content="0; url=?noscript">
</noscript>
<link rel="icon" type="image/x-icon" href="__ASSETS_PREFIX__favicon.ico">
<link rel="stylesheet" href="__ASSETS_PREFIX__index.css">
</head>

View File

@@ -50,6 +50,8 @@ const IFRAME_FORMATS = [
".mp3", ".ogg", ".wav", ".m4a",
];
const MAX_SUBPATHS_COUNT = 1000;
const ICONS = {
dir: `<svg height="16" viewBox="0 0 14 16" width="14"><path fill-rule="evenodd" d="M13 4H7V3c0-.66-.31-1-1-1H1c-.55 0-1 .45-1 1v10c0 .55.45 1 1 1h12c.55 0 1-.45 1-1V5c0-.55-.45-1-1-1zM6 4H1V3h5v1z"></path></svg>`,
symlinkFile: `<svg height="16" viewBox="0 0 12 16" width="12"><path fill-rule="evenodd" d="M8.5 1H1c-.55 0-1 .45-1 1v12c0 .55.45 1 1 1h10c.55 0 1-.45 1-1V4.5L8.5 1zM11 14H1V2h7l3 3v9zM6 4.5l4 3-4 3v-2c-.98-.02-1.84.22-2.55.7-.71.48-1.19 1.25-1.45 2.3.02-1.64.39-2.88 1.13-3.73.73-.84 1.69-1.27 2.88-1.27v-2H6z"></path></svg>`,
@@ -248,7 +250,7 @@ class Uploader {
progress(event) {
const now = Date.now();
const speed = (event.loaded - this.uploaded) / (now - this.lastUptime) * 1000;
const [speedValue, speedUnit] = formatSize(speed);
const [speedValue, speedUnit] = formatFileSize(speed);
const speedText = `${speedValue} ${speedUnit}/s`;
const progress = formatPercent(((event.loaded + this.uploadOffset) / this.file.size) * 100);
const duration = formatDuration((event.total - event.loaded) / speed);
@@ -345,6 +347,7 @@ async function setupIndexPage() {
const $download = document.querySelector(".download");
$download.href = baseUrl() + "?zip";
$download.title = "Download folder as a .zip file";
$download.classList.add("dlwt");
$download.classList.remove("hidden");
}
@@ -365,6 +368,10 @@ async function setupIndexPage() {
renderPathsTableHead();
renderPathsTableBody();
if (DATA.user) {
setupDownloadWithToken();
}
}
/**
@@ -447,13 +454,13 @@ function addPath(file, index) {
if (DATA.allow_archive) {
actionDownload = `
<div class="action-btn">
<a href="${url}?zip" title="Download folder as a .zip file">${ICONS.download}</a>
<a class="dlwt" href="${url}?zip" title="Download folder as a .zip file" download>${ICONS.download}</a>
</div>`;
}
} else {
actionDownload = `
<div class="action-btn" >
<a href="${url}" title="Download file" download>${ICONS.download}</a>
<a class="dlwt" href="${url}" title="Download file" download>${ICONS.download}</a>
</div>`;
}
if (DATA.allow_delete) {
@@ -478,7 +485,7 @@ function addPath(file, index) {
${actionEdit}
</td>`;
let sizeDisplay = isDir ? `${file.size} ${file.size === 1 ? "item" : "items"}` : formatSize(file.size).join(" ");
let sizeDisplay = isDir ? formatDirSize(file.size) : formatFileSize(file.size).join(" ");
$pathsTableBody.insertAdjacentHTML("beforeend", `
<tr id="addPath${index}">
@@ -528,12 +535,39 @@ async function setupAuth() {
$loginBtn.addEventListener("click", async () => {
try {
await checkAuth();
} catch {}
} catch { }
location.reload();
});
}
}
function setupDownloadWithToken() {
document.querySelectorAll("a.dlwt").forEach(link => {
link.addEventListener("click", async e => {
e.preventDefault();
try {
const link = e.currentTarget || e.target;
const originalHref = link.getAttribute("href");
const tokengenUrl = new URL(originalHref);
tokengenUrl.searchParams.set("tokengen", "");
const res = await fetch(tokengenUrl);
if (!res.ok) throw new Error("Failed to fetch token");
const token = await res.text();
const downloadUrl = new URL(originalHref);
downloadUrl.searchParams.set("token", token);
const tempA = document.createElement("a");
tempA.href = downloadUrl.toString();
tempA.download = "";
document.body.appendChild(tempA);
tempA.click();
document.body.removeChild(tempA);
} catch (err) {
alert(`Failed to download, ${err.message}`);
}
});
});
}
function setupSearch() {
const $searchbar = document.querySelector(".searchbar");
$searchbar.classList.remove("hidden");
@@ -644,7 +678,7 @@ async function setupEditorPage() {
$editor.value = decoder.decode(dataView);
}
} catch (err) {
alert(`Failed get file, ${err.message}`);
alert(`Failed to get file, ${err.message}`);
}
}
@@ -833,7 +867,7 @@ function newUrl(name) {
}
function baseUrl() {
return location.href.split('?')[0];
return location.href.split(/[?#]/)[0];
}
function baseName(url) {
@@ -878,7 +912,13 @@ function padZero(value, size) {
return ("0".repeat(size) + value).slice(-1 * size);
}
function formatSize(size) {
function formatDirSize(size) {
const unit = size === 1 ? "item" : "items";
const num = size >= MAX_SUBPATHS_COUNT ? `>${MAX_SUBPATHS_COUNT - 1}` : `${size}`;
return ` ${num} ${unit}`;
}
function formatFileSize(size) {
if (size == null) return [0, "B"];
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
if (size == 0) return [0, "B"];

View File

@@ -146,7 +146,7 @@ pub fn build_cli() -> Command {
.hide_env(true)
.long("allow-archive")
.action(ArgAction::SetTrue)
.help("Allow zip archive generation"),
.help("Allow download folders as archive file"),
)
.arg(
Arg::new("enable-cors")

View File

@@ -2,11 +2,13 @@ use crate::{args::Args, server::Response, utils::unix_now};
use anyhow::{anyhow, bail, Result};
use base64::{engine::general_purpose::STANDARD, Engine as _};
use ed25519_dalek::{ed25519::signature::SignerMut, Signature, SigningKey};
use headers::HeaderValue;
use hyper::{header::WWW_AUTHENTICATE, Method};
use indexmap::IndexMap;
use lazy_static::lazy_static;
use md5::Context;
use sha2::{Digest, Sha256};
use std::{
collections::HashMap,
path::{Path, PathBuf},
@@ -14,7 +16,8 @@ use std::{
use uuid::Uuid;
const REALM: &str = "DUFS";
const DIGEST_AUTH_TIMEOUT: u32 = 604800; // 7 days
const DIGEST_AUTH_TIMEOUT: u32 = 60 * 60 * 24 * 7; // 7 days
const TOKEN_EXPIRATION: u64 = 1000 * 60 * 60 * 24 * 3; // 3 days
lazy_static! {
static ref NONCESTARTHASH: Context = {
@@ -69,15 +72,20 @@ impl AccessControl {
let mut anonymous = None;
if let Some(paths) = annoy_paths {
let mut access_paths = AccessPaths::default();
access_paths.merge(paths);
access_paths
.merge(paths)
.ok_or_else(|| anyhow!("Invalid auth value `@{paths}"))?;
anonymous = Some(access_paths);
}
let mut users = IndexMap::new();
for (user, pass, paths) in account_paths_pairs.into_iter() {
let mut access_paths = anonymous.clone().unwrap_or_default();
let mut access_paths = AccessPaths::default();
access_paths
.merge(paths)
.ok_or_else(|| anyhow!("Invalid auth `{user}:{pass}@{paths}"))?;
.ok_or_else(|| anyhow!("Invalid auth value `{user}:{pass}@{paths}"))?;
if let Some(paths) = annoy_paths {
access_paths.merge(paths);
}
if pass.starts_with("$6$") {
use_hashed_password = true;
}
@@ -100,16 +108,29 @@ impl AccessControl {
path: &str,
method: &Method,
authorization: Option<&HeaderValue>,
token: Option<&String>,
guard_options: bool,
) -> (Option<String>, Option<AccessPaths>) {
if self.users.is_empty() {
return (None, Some(AccessPaths::new(AccessPerm::ReadWrite)));
}
if method == Method::GET {
if let Some(token) = token {
if let Ok((user, ap)) = self.verify_token(token, path) {
return (Some(user), ap.guard(path, method));
}
}
}
if let Some(authorization) = authorization {
if let Some(user) = get_auth_user(authorization) {
if let Some((pass, paths)) = self.users.get(&user) {
if let Some((pass, ap)) = self.users.get(&user) {
if method == Method::OPTIONS {
return (Some(user), Some(AccessPaths::new(AccessPerm::ReadOnly)));
}
if check_auth(authorization, method.as_str(), &user, pass).is_some() {
return (Some(user), paths.find(path, !is_readonly_method(method)));
return (Some(user), ap.guard(path, method));
}
}
}
@@ -121,12 +142,55 @@ impl AccessControl {
return (None, Some(AccessPaths::new(AccessPerm::ReadOnly)));
}
if let Some(paths) = self.anonymous.as_ref() {
return (None, paths.find(path, !is_readonly_method(method)));
if let Some(ap) = self.anonymous.as_ref() {
return (None, ap.guard(path, method));
}
(None, None)
}
pub fn generate_token(&self, path: &str, user: &str) -> Result<String> {
let (pass, _) = self
.users
.get(user)
.ok_or_else(|| anyhow!("Not found user '{user}'"))?;
let exp = unix_now().as_millis() as u64 + TOKEN_EXPIRATION;
let message = format!("{path}:{exp}");
let mut signing_key = derive_secret_key(user, pass);
let sig = signing_key.sign(message.as_bytes()).to_bytes();
let mut raw = Vec::with_capacity(64 + 8 + user.len());
raw.extend_from_slice(&sig);
raw.extend_from_slice(&exp.to_be_bytes());
raw.extend_from_slice(user.as_bytes());
Ok(hex::encode(raw))
}
fn verify_token<'a>(&'a self, token: &str, path: &str) -> Result<(String, &'a AccessPaths)> {
let raw = hex::decode(token)?;
let sig_bytes = &raw[..64];
let exp_bytes = &raw[64..72];
let user_bytes = &raw[72..];
let exp = u64::from_be_bytes(exp_bytes.try_into()?);
if unix_now().as_millis() as u64 > exp {
bail!("Token expired");
}
let user = std::str::from_utf8(user_bytes)?;
let (pass, ap) = self
.users
.get(user)
.ok_or_else(|| anyhow!("Not found user '{user}'"))?;
let sig = Signature::from_bytes(&<[u8; 64]>::try_from(sig_bytes)?);
let message = format!("{path}:{exp}");
derive_secret_key(user, pass).verify(message.as_bytes(), &sig)?;
Ok((user.to_string(), ap))
}
}
#[derive(Debug, Default, Clone, PartialEq, Eq)]
@@ -148,8 +212,9 @@ impl AccessPaths {
}
pub fn set_perm(&mut self, perm: AccessPerm) {
if !perm.indexonly() {
if self.perm < perm {
self.perm = perm;
self.recursively_purge_children(perm);
}
}
@@ -166,6 +231,25 @@ impl AccessPaths {
Some(())
}
pub fn guard(&self, path: &str, method: &Method) -> Option<Self> {
let target = self.find(path)?;
if !is_readonly_method(method) && !target.perm().readwrite() {
return None;
}
Some(target)
}
fn recursively_purge_children(&mut self, perm: AccessPerm) {
self.children.retain(|_, child| {
if child.perm <= perm {
false
} else {
child.recursively_purge_children(perm);
true
}
});
}
fn add(&mut self, path: &str, perm: AccessPerm) {
let path = path.trim_matches('/');
if path.is_empty() {
@@ -182,21 +266,20 @@ impl AccessPaths {
self.set_perm(perm);
return;
}
if self.perm >= perm {
return;
}
let child = self.children.entry(parts[0].to_string()).or_default();
child.add_impl(&parts[1..], perm)
}
pub fn find(&self, path: &str, writable: bool) -> Option<AccessPaths> {
pub fn find(&self, path: &str) -> Option<AccessPaths> {
let parts: Vec<&str> = path
.trim_matches('/')
.split('/')
.filter(|v| !v.is_empty())
.collect();
let target = self.find_impl(&parts, self.perm)?;
if writable && !target.perm().readwrite() {
return None;
}
Some(target)
self.find_impl(&parts, self.perm)
}
fn find_impl(&self, parts: &[&str], perm: AccessPerm) -> Option<AccessPaths> {
@@ -229,20 +312,20 @@ impl AccessPaths {
self.children.keys().collect()
}
pub fn child_paths(&self, base: &Path) -> Vec<PathBuf> {
pub fn entry_paths(&self, base: &Path) -> Vec<PathBuf> {
if !self.perm().indexonly() {
return vec![base.to_path_buf()];
}
let mut output = vec![];
self.child_paths_impl(&mut output, base);
self.entry_paths_impl(&mut output, base);
output
}
fn child_paths_impl(&self, output: &mut Vec<PathBuf>, base: &Path) {
fn entry_paths_impl(&self, output: &mut Vec<PathBuf>, base: &Path) {
for (name, child) in self.children.iter() {
let base = base.join(name);
if child.perm().indexonly() {
child.child_paths_impl(output, &base);
child.entry_paths_impl(output, &base);
} else {
output.push(base)
}
@@ -270,15 +353,14 @@ impl AccessPerm {
pub fn www_authenticate(res: &mut Response, args: &Args) -> Result<()> {
if args.auth.use_hashed_password {
let basic = HeaderValue::from_str(&format!("Basic realm=\"{}\"", REALM))?;
let basic = HeaderValue::from_str(&format!("Basic realm=\"{REALM}\""))?;
res.headers_mut().insert(WWW_AUTHENTICATE, basic);
} else {
let nonce = create_nonce()?;
let digest = HeaderValue::from_str(&format!(
"Digest realm=\"{}\", nonce=\"{}\", qop=\"auth\"",
REALM, nonce
"Digest realm=\"{REALM}\", nonce=\"{nonce}\", qop=\"auth\""
))?;
let basic = HeaderValue::from_str(&format!("Basic realm=\"{}\"", REALM))?;
let basic = HeaderValue::from_str(&format!("Basic realm=\"{REALM}\""))?;
res.headers_mut().append(WWW_AUTHENTICATE, digest);
res.headers_mut().append(WWW_AUTHENTICATE, basic);
}
@@ -340,8 +422,8 @@ pub fn check_auth(
}
let mut h = Context::new();
h.consume(format!("{}:{}:{}", auth_user, REALM, auth_pass).as_bytes());
let auth_pass = format!("{:x}", h.compute());
h.consume(format!("{auth_user}:{REALM}:{auth_pass}").as_bytes());
let auth_pass = format!("{:x}", h.finalize());
let mut ha = Context::new();
ha.consume(method);
@@ -349,7 +431,7 @@ pub fn check_auth(
if let Some(uri) = digest_map.get(b"uri".as_ref()) {
ha.consume(uri);
}
let ha = format!("{:x}", ha.compute());
let ha = format!("{:x}", ha.finalize());
let mut correct_response = None;
if let Some(qop) = digest_map.get(b"qop".as_ref()) {
if qop == &b"auth".as_ref() || qop == &b"auth-int".as_ref() {
@@ -370,7 +452,7 @@ pub fn check_auth(
c.consume(qop);
c.consume(b":");
c.consume(&*ha);
format!("{:x}", c.compute())
format!("{:x}", c.finalize())
});
}
}
@@ -383,7 +465,7 @@ pub fn check_auth(
c.consume(nonce);
c.consume(b":");
c.consume(&*ha);
format!("{:x}", c.compute())
format!("{:x}", c.finalize())
}
};
if correct_response.as_bytes() == *user_response {
@@ -396,6 +478,13 @@ pub fn check_auth(
}
}
fn derive_secret_key(user: &str, pass: &str) -> SigningKey {
let mut hasher = Sha256::new();
hasher.update(format!("{user}:{pass}").as_bytes());
let hash = hasher.finalize();
SigningKey::from_bytes(&hash.into())
}
/// Check if a nonce is still valid.
/// Return an error if it was never valid
fn validate_nonce(nonce: &[u8]) -> Result<bool> {
@@ -407,14 +496,14 @@ fn validate_nonce(nonce: &[u8]) -> Result<bool> {
//get time
if let Ok(secs_nonce) = u32::from_str_radix(&n[..8], 16) {
//check time
let now = unix_now()?;
let now = unix_now();
let secs_now = now.as_secs() as u32;
if let Some(dur) = secs_now.checked_sub(secs_nonce) {
//check hash
let mut h = NONCESTARTHASH.clone();
h.consume(secs_nonce.to_be_bytes());
let h = format!("{:x}", h.compute());
let h = format!("{:x}", h.finalize());
if h[..26] == n[8..34] {
return Ok(dur < DIGEST_AUTH_TIMEOUT);
}
@@ -487,12 +576,12 @@ fn to_headermap(header: &[u8]) -> Result<HashMap<&[u8], &[u8]>, ()> {
}
fn create_nonce() -> Result<String> {
let now = unix_now()?;
let now = unix_now();
let secs = now.as_secs() as u32;
let mut h = NONCESTARTHASH.clone();
h.consume(secs.to_be_bytes());
let n = format!("{:08x}{:032x}", secs, h.compute());
let n = format!("{:08x}{:032x}", secs, h.finalize());
Ok(n[..34].to_string())
}
@@ -574,7 +663,7 @@ mod tests {
paths.add("/dir2/dir22/dir221", AccessPerm::ReadWrite);
paths.add("/dir2/dir23/dir231", AccessPerm::ReadWrite);
assert_eq!(
paths.child_paths(Path::new("/tmp")),
paths.entry_paths(Path::new("/tmp")),
[
"/tmp/dir1",
"/tmp/dir2/dir21",
@@ -587,8 +676,8 @@ mod tests {
);
assert_eq!(
paths
.find("dir2", false)
.map(|v| v.child_paths(Path::new("/tmp/dir2"))),
.find("dir2")
.map(|v| v.entry_paths(Path::new("/tmp/dir2"))),
Some(
[
"/tmp/dir2/dir21",
@@ -600,19 +689,30 @@ mod tests {
.collect::<Vec<_>>()
)
);
assert_eq!(paths.find("dir2", true), None);
assert_eq!(
paths.find("dir1/file", true),
paths.find("dir1/file"),
Some(AccessPaths::new(AccessPerm::ReadWrite))
);
assert_eq!(
paths.find("dir2/dir21/file", true),
paths.find("dir2/dir21/file"),
Some(AccessPaths::new(AccessPerm::ReadWrite))
);
assert_eq!(
paths.find("dir2/dir21/dir211/file", false),
paths.find("dir2/dir21/dir211/file"),
Some(AccessPaths::new(AccessPerm::ReadWrite))
);
assert_eq!(
paths.find("dir2/dir22/file"),
Some(AccessPaths::new(AccessPerm::ReadOnly))
);
assert_eq!(paths.find("dir2/dir21/dir211/file", true), None);
assert_eq!(
paths.find("dir2/dir22/dir221/file"),
Some(AccessPaths::new(AccessPerm::ReadWrite))
);
assert_eq!(paths.find("dir2/dir23/file"), None);
assert_eq!(
paths.find("dir2/dir23//dir231/file"),
Some(AccessPaths::new(AccessPerm::ReadWrite))
);
}
}

View File

@@ -64,8 +64,8 @@ impl HttpLogger {
}
}
match err {
Some(err) => error!("{} {}", output, err),
None => info!("{}", output),
Some(err) => error!("{output} {err}"),
None => info!("{output}"),
}
}
}

View File

@@ -3,6 +3,7 @@ mod auth;
mod http_logger;
mod http_utils;
mod logger;
mod noscript;
mod server;
mod utils;
@@ -57,7 +58,7 @@ async fn main() -> Result<()> {
ret = join_all(handles) => {
for r in ret {
if let Err(e) = r {
error!("{}", e);
error!("{e}");
}
}
Ok(())
@@ -154,7 +155,7 @@ fn serve(args: Args, running: Arc<AtomicBool>) -> Result<Vec<JoinHandle<()>>> {
path.into()
};
let listener = tokio::net::UnixListener::bind(socket_path)
.with_context(|| format!("Failed to bind `{}`", path))?;
.with_context(|| format!("Failed to bind `{path}`"))?;
let handle = tokio::spawn(async move {
loop {
let Ok((stream, _addr)) = listener.accept().await else {

100
src/noscript.rs Normal file
View File

@@ -0,0 +1,100 @@
use crate::{
server::{IndexData, PathItem, PathType, MAX_SUBPATHS_COUNT},
utils::encode_uri,
};
use anyhow::Result;
use chrono::{DateTime, Utc};
use xml::escape::escape_str_pcdata;
pub fn detect_noscript(user_agent: &str) -> bool {
[
"lynx/", "w3m/", "links ", "elinks/", "curl/", "wget/", "httpie/", "aria2/",
]
.iter()
.any(|v| user_agent.starts_with(v))
}
pub fn generate_noscript_html(data: &IndexData) -> Result<String> {
let mut html = String::new();
let title = format!("Index of {}", escape_str_pcdata(&data.href));
html.push_str("<html>\n");
html.push_str("<head>\n");
html.push_str(&format!("<title>{title}</title>\n"));
html.push_str(
r#"<style>
td {
padding: 0.2rem;
text-align: left;
}
td:nth-child(3) {
text-align: right;
}
</style>
"#,
);
html.push_str("</head>\n");
html.push_str("<body>\n");
html.push_str(&format!("<h1>{title}</h1>\n"));
html.push_str("<table>\n");
html.push_str(" <tbody>\n");
html.push_str(&format!(" {}\n", render_parent()));
for path in &data.paths {
html.push_str(&format!(" {}\n", render_path_item(path)));
}
html.push_str(" </tbody>\n");
html.push_str("</table>\n");
html.push_str("</body>\n");
Ok(html)
}
fn render_parent() -> String {
let value = "../";
format!("<tr><td><a href=\"{value}\">{value}</a></td><td></td><td></td></tr>")
}
fn render_path_item(path: &PathItem) -> String {
let href = encode_uri(&path.name);
let suffix = if path.path_type.is_dir() { "/" } else { "" };
let name = escape_str_pcdata(&path.name);
let mtime = format_mtime(path.mtime).unwrap_or_default();
let size = format_size(path.size, path.path_type);
format!("<tr><td><a href=\"{href}{suffix}\">{name}{suffix}</a></td><td>{mtime}</td><td>{size}</td></tr>")
}
fn format_mtime(mtime: u64) -> Option<String> {
let datetime = DateTime::<Utc>::from_timestamp_millis(mtime as _)?;
Some(datetime.format("%Y-%m-%dT%H:%M:%S.%3fZ").to_string())
}
fn format_size(size: u64, path_type: PathType) -> String {
if path_type.is_dir() {
let unit = if size == 1 { "item" } else { "items" };
let num = match size >= MAX_SUBPATHS_COUNT {
true => format!(">{}", MAX_SUBPATHS_COUNT - 1),
false => size.to_string(),
};
format!("{num} {unit}")
} else {
if size == 0 {
return "0 B".to_string();
}
const UNITS: [&str; 5] = ["B", "KB", "MB", "GB", "TB"];
let i = (size as f64).log2() / 10.0;
let i = i.floor() as usize;
if i >= UNITS.len() {
// Handle extremely large numbers beyond Terabytes
return format!("{:.2} PB", size as f64 / 1024.0f64.powi(5));
}
let size = size as f64 / 1024.0f64.powi(i as i32);
format!("{:.2} {}", size, UNITS[i])
}
}

View File

@@ -2,6 +2,7 @@
use crate::auth::{www_authenticate, AccessPaths, AccessPerm};
use crate::http_utils::{body_full, IncomingStream, LengthLimitedStream};
use crate::noscript::{detect_noscript, generate_noscript_html};
use crate::utils::{
decode_uri, encode_uri, get_file_mtime_and_mode, get_file_name, glob, parse_range,
try_get_file_name,
@@ -37,7 +38,7 @@ use std::collections::HashMap;
use std::fs::Metadata;
use std::io::SeekFrom;
use std::net::SocketAddr;
use std::path::{Component, Path, PathBuf};
use std::path::{Component, Path, PathBuf, MAIN_SEPARATOR};
use std::sync::atomic::{self, AtomicBool};
use std::sync::Arc;
use std::time::SystemTime;
@@ -48,7 +49,7 @@ use tokio::{fs, io};
use tokio_util::compat::FuturesAsyncWriteCompatExt;
use tokio_util::io::{ReaderStream, StreamReader};
use uuid::Uuid;
use walkdir::WalkDir;
use walkdir::{DirEntry, WalkDir};
use xml::escape::escape_str_pcdata;
pub type Request = hyper::Request<Incoming>;
@@ -63,6 +64,7 @@ const BUF_SIZE: usize = 65536;
const EDITABLE_TEXT_MAX_SIZE: u64 = 4194304; // 4M
const RESUMABLE_UPLOAD_MIN_SIZE: u64 = 20971520; // 20M
const HEALTH_CHECK_PATH: &str = "__dufs__/health";
pub const MAX_SUBPATHS_COUNT: u64 = 1000;
pub struct Server {
args: Args,
@@ -109,18 +111,12 @@ impl Server {
let uri = req.uri().clone();
let assets_prefix = &self.assets_prefix;
let enable_cors = self.args.enable_cors;
let is_microsoft_webdav = req
.headers()
.get("user-agent")
.and_then(|v| v.to_str().ok())
.map(|v| v.starts_with("Microsoft-WebDAV-MiniRedir/"))
.unwrap_or_default();
let mut http_log_data = self.args.http_logger.data(&req);
if let Some(addr) = addr {
http_log_data.insert("remote_addr".to_string(), addr.ip().to_string());
}
let mut res = match self.clone().handle(req, is_microsoft_webdav).await {
let mut res = match self.clone().handle(req).await {
Ok(res) => {
http_log_data.insert("status".to_string(), res.status().as_u16().to_string());
if !uri.path().starts_with(assets_prefix) {
@@ -140,22 +136,13 @@ impl Server {
}
};
if is_microsoft_webdav {
// microsoft webdav requires this.
res.headers_mut()
.insert(CONNECTION, HeaderValue::from_static("close"));
}
if enable_cors {
add_cors(&mut res);
}
Ok(res)
}
pub async fn handle(
self: Arc<Self>,
req: Request,
is_microsoft_webdav: bool,
) -> Result<Response> {
pub async fn handle(self: Arc<Self>, req: Request) -> Result<Response> {
let mut res = Response::default();
let req_path = req.uri().path();
@@ -178,11 +165,34 @@ impl Server {
return Ok(res);
}
let user_agent = headers
.get("user-agent")
.and_then(|v| v.to_str().ok())
.map(|v| v.to_lowercase())
.unwrap_or_default();
let is_microsoft_webdav = user_agent.starts_with("microsoft-webdav-miniredir/");
if is_microsoft_webdav {
// microsoft webdav requires this.
res.headers_mut()
.insert(CONNECTION, HeaderValue::from_static("close"));
}
let authorization = headers.get(AUTHORIZATION);
let guard =
self.args
.auth
.guard(&relative_path, &method, authorization, is_microsoft_webdav);
let query = req.uri().query().unwrap_or_default();
let mut query_params: HashMap<String, String> = form_urlencoded::parse(query.as_bytes())
.map(|(k, v)| (k.to_string(), v.to_string()))
.collect();
let guard = self.args.auth.guard(
&relative_path,
&method,
authorization,
query_params.get("token"),
is_microsoft_webdav,
);
let (user, access_paths) = match guard {
(None, None) => {
@@ -196,24 +206,23 @@ impl Server {
(x, Some(y)) => (x, y),
};
let query = req.uri().query().unwrap_or_default();
let query_params: HashMap<String, String> = form_urlencoded::parse(query.as_bytes())
.map(|(k, v)| (k.to_string(), v.to_string()))
.collect();
if detect_noscript(&user_agent) {
query_params.insert("noscript".to_string(), String::new());
}
if method.as_str() == "CHECKAUTH" {
match user.clone() {
Some(user) => {
*res.body_mut() = body_full(user);
}
None => self.auth_reject(&mut res)?,
}
*res.body_mut() = body_full(user.clone().unwrap_or_default());
return Ok(res);
} else if method.as_str() == "LOGOUT" {
self.auth_reject(&mut res)?;
return Ok(res);
}
if has_query_flag(&query_params, "tokengen") {
self.handle_tokengen(&relative_path, user, &mut res).await?;
return Ok(res);
}
let head_only = method == Method::HEAD;
if self.args.path_is_file {
@@ -506,7 +515,7 @@ impl Server {
};
let stream = IncomingStream::new(req.into_body());
let body_with_io_error = stream.map_err(|err| io::Error::new(io::ErrorKind::Other, err));
let body_with_io_error = stream.map_err(io::Error::other);
let body_reader = StreamReader::new(body_with_io_error);
pin_mut!(body_reader);
@@ -591,47 +600,20 @@ impl Server {
} else {
let path_buf = path.to_path_buf();
let hidden = Arc::new(self.args.hidden.to_vec());
let hidden = hidden.clone();
let running = self.running.clone();
let search = search.clone();
let access_paths = access_paths.clone();
let search_paths = tokio::task::spawn_blocking(move || {
let mut paths: Vec<PathBuf> = vec![];
for dir in access_paths.child_paths(&path_buf) {
let mut it = WalkDir::new(&dir).into_iter();
it.next();
while let Some(Ok(entry)) = it.next() {
if !running.load(atomic::Ordering::SeqCst) {
break;
}
let entry_path = entry.path();
let base_name = get_file_name(entry_path);
let file_type = entry.file_type();
let mut is_dir_type: bool = file_type.is_dir();
if file_type.is_symlink() {
match std::fs::symlink_metadata(entry_path) {
Ok(meta) => {
is_dir_type = meta.is_dir();
}
Err(_) => {
continue;
}
}
}
if is_hidden(&hidden, base_name, is_dir_type) {
if file_type.is_dir() {
it.skip_current_dir();
}
continue;
}
if !base_name.to_lowercase().contains(&search) {
continue;
}
paths.push(entry_path.to_path_buf());
}
}
paths
})
let search_paths = tokio::spawn(collect_dir_entries(
access_paths,
self.running.clone(),
path_buf,
hidden,
self.args.allow_symlink,
self.args.serve_path.clone(),
move |x| get_file_name(x.path()).to_lowercase().contains(&search),
))
.await?;
for search_path in search_paths.into_iter() {
if let Ok(Some(item)) = self.to_pathitem(search_path, path.to_path_buf()).await {
paths.push(item);
@@ -659,7 +641,7 @@ impl Server {
) -> Result<()> {
let (mut writer, reader) = tokio::io::duplex(BUF_SIZE);
let filename = try_get_file_name(path)?;
set_content_disposition(res, false, &format!("{}.zip", filename))?;
set_content_disposition(res, false, &format!("{filename}.zip"))?;
res.headers_mut()
.insert("content-type", HeaderValue::from_static("application/zip"));
if head_only {
@@ -669,6 +651,8 @@ impl Server {
let hidden = self.args.hidden.clone();
let running = self.running.clone();
let compression = self.args.compress.to_compression();
let follow_symlinks = self.args.allow_symlink;
let serve_path = self.args.serve_path.clone();
tokio::spawn(async move {
if let Err(e) = zip_dir(
&mut writer,
@@ -676,11 +660,13 @@ impl Server {
access_paths,
&hidden,
compression,
follow_symlinks,
serve_path,
running,
)
.await
{
error!("Failed to zip {}, {}", path.display(), e);
error!("Failed to zip {}, {e}", path.display());
}
});
let reader_stream = ReaderStream::with_capacity(reader, BUF_SIZE);
@@ -838,6 +824,8 @@ impl Server {
}
}
res.headers_mut()
.typed_insert(CacheControl::new().with_no_cache());
res.headers_mut().typed_insert(last_modified);
res.headers_mut().typed_insert(etag.clone());
@@ -852,7 +840,7 @@ impl Server {
}
}
let range = if use_range {
let ranges = if use_range {
headers.get(RANGE).map(|range| {
range
.to_str()
@@ -873,12 +861,14 @@ impl Server {
res.headers_mut().typed_insert(AcceptRanges::bytes());
if let Some(range) = range {
if let Some((start, end)) = range {
if let Some(ranges) = ranges {
if let Some(ranges) = ranges {
if ranges.len() == 1 {
let (start, end) = ranges[0];
file.seek(SeekFrom::Start(start)).await?;
let range_size = end - start + 1;
*res.status_mut() = StatusCode::PARTIAL_CONTENT;
let content_range = format!("bytes {}-{}/{}", start, end, size);
let content_range = format!("bytes {start}-{end}/{size}");
res.headers_mut()
.insert(CONTENT_RANGE, content_range.parse()?);
res.headers_mut()
@@ -894,6 +884,36 @@ impl Server {
);
let boxed_body = stream_body.boxed();
*res.body_mut() = boxed_body;
} else {
*res.status_mut() = StatusCode::PARTIAL_CONTENT;
let boundary = Uuid::new_v4();
let mut body = Vec::new();
let content_type = get_content_type(path).await?;
for (start, end) in ranges {
file.seek(SeekFrom::Start(start)).await?;
let range_size = end - start + 1;
let content_range = format!("bytes {start}-{end}/{size}");
let part_header = format!(
"--{boundary}\r\nContent-Type: {content_type}\r\nContent-Range: {content_range}\r\n\r\n",
);
body.extend_from_slice(part_header.as_bytes());
let mut buffer = vec![0; range_size as usize];
file.read_exact(&mut buffer).await?;
body.extend_from_slice(&buffer);
body.extend_from_slice(b"\r\n");
}
body.extend_from_slice(format!("--{boundary}--\r\n").as_bytes());
res.headers_mut().insert(
CONTENT_TYPE,
format!("multipart/byteranges; boundary={boundary}").parse()?,
);
res.headers_mut()
.insert(CONTENT_LENGTH, format!("{}", body.len()).parse()?);
if head_only {
return Ok(());
}
*res.body_mut() = body_full(body);
}
} else {
*res.status_mut() = StatusCode::RANGE_NOT_SATISFIABLE;
res.headers_mut()
@@ -957,7 +977,9 @@ impl Server {
)
.replace("__INDEX_DATA__", &index_data);
res.headers_mut()
.typed_insert(ContentLength(output.as_bytes().len() as u64));
.typed_insert(ContentLength(output.len() as u64));
res.headers_mut()
.typed_insert(CacheControl::new().with_no_cache());
if head_only {
return Ok(());
}
@@ -975,7 +997,7 @@ impl Server {
res.headers_mut()
.typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8));
res.headers_mut()
.typed_insert(ContentLength(output.as_bytes().len() as u64));
.typed_insert(ContentLength(output.len() as u64));
if head_only {
return Ok(());
}
@@ -983,6 +1005,24 @@ impl Server {
Ok(())
}
async fn handle_tokengen(
&self,
relative_path: &str,
user: Option<String>,
res: &mut Response,
) -> Result<()> {
let output = self
.args
.auth
.generate_token(relative_path, &user.unwrap_or_default())?;
res.headers_mut()
.typed_insert(ContentType::from(mime_guess::mime::TEXT_PLAIN_UTF_8));
res.headers_mut()
.typed_insert(ContentLength(output.len() as u64));
*res.body_mut() = body_full(output);
Ok(())
}
async fn handle_propfind_dir(
&self,
path: &Path,
@@ -1165,7 +1205,7 @@ impl Server {
res.headers_mut()
.typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8));
res.headers_mut()
.typed_insert(ContentLength(output.as_bytes().len() as u64));
.typed_insert(ContentLength(output.len() as u64));
*res.body_mut() = body_full(output);
if head_only {
return Ok(());
@@ -1194,6 +1234,10 @@ impl Server {
res.headers_mut()
.typed_insert(ContentType::from(mime_guess::mime::APPLICATION_JSON));
serde_json::to_string_pretty(&data)?
} else if has_query_flag(query_params, "noscript") {
res.headers_mut()
.typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8));
generate_noscript_html(&data)?
} else {
res.headers_mut()
.typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8));
@@ -1207,7 +1251,7 @@ impl Server {
.replace("__INDEX_DATA__", &index_data)
};
res.headers_mut()
.typed_insert(ContentLength(output.as_bytes().len() as u64));
.typed_insert(ContentLength(output.len() as u64));
res.headers_mut()
.typed_insert(CacheControl::new().with_no_cache());
res.headers_mut().insert(
@@ -1254,7 +1298,7 @@ impl Server {
let guard = self
.args
.auth
.guard(&dest_path, req.method(), authorization, false);
.guard(&dest_path, req.method(), authorization, None, false);
match guard {
(_, Some(_)) => {}
@@ -1368,13 +1412,29 @@ impl Server {
(true, false) => PathType::SymlinkFile,
(false, false) => PathType::File,
};
let mtime = to_timestamp(&meta.modified()?);
let mtime = match meta.modified().ok().or_else(|| meta.created().ok()) {
Some(v) => to_timestamp(&v),
None => 0,
};
let size = match path_type {
PathType::Dir | PathType::SymlinkDir => {
let mut count = 0;
let mut entries = tokio::fs::read_dir(&path).await?;
while entries.next_entry().await?.is_some() {
while let Some(entry) = entries.next_entry().await? {
let entry_path = entry.path();
let base_name = get_file_name(&entry_path);
let is_dir = entry
.file_type()
.await
.map(|v| v.is_dir())
.unwrap_or_default();
if is_hidden(&self.args.hidden, base_name, is_dir) {
continue;
}
count += 1;
if count >= MAX_SUBPATHS_COUNT {
break;
}
}
count
}
@@ -1392,45 +1452,33 @@ impl Server {
}
#[derive(Debug, Serialize, PartialEq)]
enum DataKind {
pub enum DataKind {
Index,
Edit,
View,
}
#[derive(Debug, Serialize)]
struct IndexData {
href: String,
kind: DataKind,
uri_prefix: String,
allow_upload: bool,
allow_delete: bool,
allow_search: bool,
allow_archive: bool,
dir_exists: bool,
auth: bool,
user: Option<String>,
paths: Vec<PathItem>,
}
#[derive(Debug, Serialize)]
struct EditData {
href: String,
kind: DataKind,
uri_prefix: String,
allow_upload: bool,
allow_delete: bool,
auth: bool,
user: Option<String>,
editable: bool,
pub struct IndexData {
pub href: String,
pub kind: DataKind,
pub uri_prefix: String,
pub allow_upload: bool,
pub allow_delete: bool,
pub allow_search: bool,
pub allow_archive: bool,
pub dir_exists: bool,
pub auth: bool,
pub user: Option<String>,
pub paths: Vec<PathItem>,
}
#[derive(Debug, Serialize, Eq, PartialEq, Ord, PartialOrd)]
struct PathItem {
path_type: PathType,
name: String,
mtime: u64,
size: u64,
pub struct PathItem {
pub path_type: PathType,
pub name: String,
pub mtime: u64,
pub size: u64,
}
impl PathItem {
@@ -1481,7 +1529,7 @@ impl PathItem {
}
pub fn base_name(&self) -> &str {
self.name.split('/').last().unwrap_or_default()
self.name.split('/').next_back().unwrap_or_default()
}
pub fn sort_by_name(&self, other: &Self) -> Ordering {
@@ -1508,14 +1556,20 @@ impl PathItem {
}
}
#[derive(Debug, Serialize, Eq, PartialEq)]
enum PathType {
#[derive(Debug, Serialize, Clone, Copy, Eq, PartialEq)]
pub enum PathType {
Dir,
SymlinkDir,
File,
SymlinkFile,
}
impl PathType {
pub fn is_dir(&self) -> bool {
matches!(self, Self::Dir | Self::SymlinkDir)
}
}
impl Ord for PathType {
fn cmp(&self, other: &Self) -> Ordering {
let to_value = |t: &Self| -> u8 {
@@ -1534,6 +1588,18 @@ impl PartialOrd for PathType {
}
}
#[derive(Debug, Serialize)]
struct EditData {
href: String,
kind: DataKind,
uri_prefix: String,
allow_upload: bool,
allow_delete: bool,
auth: bool,
user: Option<String>,
editable: bool,
}
fn to_timestamp(time: &SystemTime) -> u64 {
time.duration_since(SystemTime::UNIX_EPOCH)
.unwrap_or_default()
@@ -1597,54 +1663,29 @@ async fn zip_dir<W: AsyncWrite + Unpin>(
access_paths: AccessPaths,
hidden: &[String],
compression: Compression,
follow_symlinks: bool,
serve_path: PathBuf,
running: Arc<AtomicBool>,
) -> Result<()> {
let mut writer = ZipFileWriter::with_tokio(writer);
let hidden = Arc::new(hidden.to_vec());
let dir_clone = dir.to_path_buf();
let zip_paths = tokio::task::spawn_blocking(move || {
let mut paths: Vec<PathBuf> = vec![];
for dir in access_paths.child_paths(&dir_clone) {
let mut it = WalkDir::new(&dir).into_iter();
it.next();
while let Some(Ok(entry)) = it.next() {
if !running.load(atomic::Ordering::SeqCst) {
break;
}
let entry_path = entry.path();
let base_name = get_file_name(entry_path);
let file_type = entry.file_type();
let mut is_dir_type: bool = file_type.is_dir();
if file_type.is_symlink() {
match std::fs::symlink_metadata(entry_path) {
Ok(meta) => {
is_dir_type = meta.is_dir();
}
Err(_) => {
continue;
}
}
}
if is_hidden(&hidden, base_name, is_dir_type) {
if file_type.is_dir() {
it.skip_current_dir();
}
continue;
}
if entry.path().symlink_metadata().is_err() {
continue;
}
if !file_type.is_file() {
continue;
}
paths.push(entry_path.to_path_buf());
}
}
paths
})
let zip_paths = tokio::task::spawn(collect_dir_entries(
access_paths,
running,
dir.to_path_buf(),
hidden,
follow_symlinks,
serve_path,
move |x| x.path().symlink_metadata().is_ok() && x.file_type().is_file(),
))
.await?;
for zip_path in zip_paths.into_iter() {
let filename = match zip_path.strip_prefix(dir).ok().and_then(|v| v.to_str()) {
let filename = match zip_path
.strip_prefix(dir)
.ok()
.and_then(|v| v.to_str())
.map(|v| v.replace(MAIN_SEPARATOR, "/"))
{
Some(v) => v,
None => continue,
};
@@ -1662,7 +1703,7 @@ async fn zip_dir<W: AsyncWrite + Unpin>(
}
fn extract_cache_headers(meta: &Metadata) -> Option<(ETag, LastModified)> {
let mtime = meta.modified().ok()?;
let mtime = meta.modified().ok().or_else(|| meta.created().ok())?;
let timestamp = to_timestamp(&mtime);
let size = meta.len();
let etag = format!(r#""{timestamp}-{size}""#).parse::<ETag>().ok()?;
@@ -1704,7 +1745,7 @@ fn set_content_disposition(res: &mut Response, inline: bool, filename: &str) ->
})
.collect();
let value = if filename.is_ascii() {
HeaderValue::from_str(&format!("{kind}; filename=\"{}\"", filename,))?
HeaderValue::from_str(&format!("{kind}; filename=\"{filename}\"",))?
} else {
HeaderValue::from_str(&format!(
"{kind}; filename=\"{}\"; filename*=UTF-8''{}",
@@ -1716,9 +1757,9 @@ fn set_content_disposition(res: &mut Response, inline: bool, filename: &str) ->
Ok(())
}
fn is_hidden(hidden: &[String], file_name: &str, is_dir_type: bool) -> bool {
fn is_hidden(hidden: &[String], file_name: &str, is_dir: bool) -> bool {
hidden.iter().any(|v| {
if is_dir_type {
if is_dir {
if let Some(x) = v.strip_suffix('/') {
return glob(x, file_name);
}
@@ -1779,8 +1820,10 @@ fn parse_upload_offset(headers: &HeaderMap<HeaderValue>, size: u64) -> Result<Op
if value == "append" {
return Ok(Some(size));
}
let (start, _) = parse_range(value, size).ok_or_else(err)?;
Ok(Some(start))
// use the first range
let ranges = parse_range(value, size).ok_or_else(err)?;
let (start, _) = ranges.first().ok_or_else(err)?;
Ok(Some(*start))
}
async fn sha256_file(path: &Path) -> Result<String> {
@@ -1797,7 +1840,7 @@ async fn sha256_file(path: &Path) -> Result<String> {
}
let result = hasher.finalize();
Ok(format!("{:x}", result))
Ok(format!("{result:x}"))
}
fn has_query_flag(query_params: &HashMap<String, String>, name: &str) -> bool {
@@ -1806,3 +1849,57 @@ fn has_query_flag(query_params: &HashMap<String, String>, name: &str) -> bool {
.map(|v| v.is_empty())
.unwrap_or_default()
}
async fn collect_dir_entries<F>(
access_paths: AccessPaths,
running: Arc<AtomicBool>,
path: PathBuf,
hidden: Arc<Vec<String>>,
follow_symlinks: bool,
serve_path: PathBuf,
include_entry: F,
) -> Vec<PathBuf>
where
F: Fn(&DirEntry) -> bool,
{
let mut paths: Vec<PathBuf> = vec![];
for dir in access_paths.entry_paths(&path) {
let mut it = WalkDir::new(&dir).follow_links(true).into_iter();
it.next();
while let Some(Ok(entry)) = it.next() {
if !running.load(atomic::Ordering::SeqCst) {
break;
}
let entry_path = entry.path();
let base_name = get_file_name(entry_path);
let is_dir = entry.file_type().is_dir();
if is_hidden(&hidden, base_name, is_dir) {
if is_dir {
it.skip_current_dir();
}
continue;
}
if !follow_symlinks
&& !fs::canonicalize(entry_path)
.await
.ok()
.map(|v| v.starts_with(&serve_path))
.unwrap_or_default()
{
// We walked outside the server's root. This could only have
// happened if we followed a symlink, and hence we only allow it
// if allow_symlink is enabled, otherwise we skip this entry.
if is_dir {
it.skip_current_dir();
}
continue;
}
if !include_entry(&entry) {
continue;
}
paths.push(entry_path.to_path_buf());
}
}
paths
}

View File

@@ -8,10 +8,10 @@ use std::{
time::{Duration, SystemTime, UNIX_EPOCH},
};
pub fn unix_now() -> Result<Duration> {
pub fn unix_now() -> Duration {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.with_context(|| "Invalid system time")
.expect("Unable to get unix epoch time")
}
pub fn encode_uri(v: &str) -> String {
@@ -19,7 +19,7 @@ pub fn encode_uri(v: &str) -> String {
parts.join("/")
}
pub fn decode_uri(v: &str) -> Option<Cow<str>> {
pub fn decode_uri(v: &str) -> Option<Cow<'_, str>> {
percent_encoding::percent_decode(v.as_bytes())
.decode_utf8()
.ok()
@@ -100,36 +100,42 @@ pub fn load_private_key<T: AsRef<Path>>(filename: T) -> Result<PrivateKeyDer<'st
anyhow::bail!("No supported private key in file");
}
pub fn parse_range(range: &str, size: u64) -> Option<(u64, u64)> {
let (unit, range) = range.split_once('=')?;
if unit != "bytes" || range.contains(',') {
pub fn parse_range(range: &str, size: u64) -> Option<Vec<(u64, u64)>> {
let (unit, ranges) = range.split_once('=')?;
if unit != "bytes" {
return None;
}
let (start, end) = range.split_once('-')?;
let mut result = Vec::new();
for range in ranges.split(',') {
let (start, end) = range.trim().split_once('-')?;
if start.is_empty() {
let offset = end.parse::<u64>().ok()?;
if offset <= size {
Some((size - offset, size - 1))
result.push((size - offset, size - 1));
} else {
None
return None;
}
} else {
let start = start.parse::<u64>().ok()?;
if start < size {
if end.is_empty() {
Some((start, size - 1))
result.push((start, size - 1));
} else {
let end = end.parse::<u64>().ok()?;
if end < size {
Some((start, end))
result.push((start, end));
} else {
None
return None;
}
}
} else {
None
return None;
}
}
}
Some(result)
}
#[cfg(test)]
@@ -162,13 +168,19 @@ mod tests {
#[test]
fn test_parse_range() {
assert_eq!(parse_range("bytes=0-499", 500), Some((0, 499)));
assert_eq!(parse_range("bytes=0-", 500), Some((0, 499)));
assert_eq!(parse_range("bytes=299-", 500), Some((299, 499)));
assert_eq!(parse_range("bytes=-500", 500), Some((0, 499)));
assert_eq!(parse_range("bytes=-300", 500), Some((200, 499)));
assert_eq!(parse_range("bytes=0-499", 500), Some(vec![(0, 499)]));
assert_eq!(parse_range("bytes=0-", 500), Some(vec![(0, 499)]));
assert_eq!(parse_range("bytes=299-", 500), Some(vec![(299, 499)]));
assert_eq!(parse_range("bytes=-500", 500), Some(vec![(0, 499)]));
assert_eq!(parse_range("bytes=-300", 500), Some(vec![(200, 499)]));
assert_eq!(
parse_range("bytes=0-199, 100-399, 400-, -200", 500),
Some(vec![(0, 199), (100, 399), (400, 499), (300, 499)])
);
assert_eq!(parse_range("bytes=500-", 500), None);
assert_eq!(parse_range("bytes=-501", 500), None);
assert_eq!(parse_range("bytes=0-500", 500), None);
assert_eq!(parse_range("bytes=0-199,", 500), None);
assert_eq!(parse_range("bytes=0-199, 500-", 500), None);
}
}

View File

@@ -115,6 +115,16 @@ fn auth_skip_on_options_method(
Ok(())
}
#[rstest]
fn auth_skip_if_no_auth_user(server: TestServer) -> Result<(), Error> {
let url = format!("{}index.html", server.url());
let resp = fetch!(b"GET", &url)
.basic_auth("user", Some("pass"))
.send()?;
assert_eq!(resp.status(), 200);
Ok(())
}
#[rstest]
fn auth_check(
#[with(&["--auth", "user:pass@/:rw", "--auth", "user2:pass2@/", "-A"])] server: TestServer,
@@ -326,16 +336,31 @@ fn auth_data(
}
#[rstest]
fn auth_precedence(
#[with(&["--auth", "user:pass@/dir1:rw,/dir1/test.txt", "-A"])] server: TestServer,
fn auth_shadow(
#[with(&["--auth", "user:pass@/:rw", "-a", "@/dir1", "-A"])] server: TestServer,
) -> Result<(), Error> {
let url = format!("{}dir1/test.txt", server.url());
let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user", "pass")?;
assert_eq!(resp.status(), 403);
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 401);
let url = format!("{}dir1/file1", server.url());
let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user", "pass")?;
assert_eq!(resp.status(), 201);
Ok(())
}
#[rstest]
fn token_auth(#[with(&["-a", "user:pass@/"])] server: TestServer) -> Result<(), Error> {
let url = format!("{}index.html", server.url());
let resp = fetch!(b"GET", &url).send()?;
assert_eq!(resp.status(), 401);
let url = format!("{}index.html?tokengen", server.url());
let resp = fetch!(b"GET", &url)
.basic_auth("user", Some("pass"))
.send()?;
let token = resp.text()?;
let url = format!("{}index.html?token={token}", server.url());
let resp = fetch!(b"GET", &url).send()?;
assert_eq!(resp.status(), 200);
Ok(())
}

View File

@@ -49,7 +49,7 @@ fn same_etag(etag: &str) -> String {
}
fn different_etag(etag: &str) -> String {
format!("{}1234", etag)
format!("{etag}1234")
}
#[rstest]

View File

@@ -146,14 +146,14 @@ where
TestServer::new(port, tmpdir, child, is_tls)
}
/// Wait a max of 1s for the port to become available.
/// Wait a max of 2s for the port to become available.
pub fn wait_for_port(port: u16) {
let start_wait = Instant::now();
while !port_check::is_port_reachable(format!("localhost:{port}")) {
sleep(Duration::from_millis(100));
sleep(Duration::from_millis(250));
if start_wait.elapsed().as_secs() > 1 {
if start_wait.elapsed().as_secs() > 2 {
panic!("timeout waiting for port {port}");
}
}

View File

@@ -82,6 +82,19 @@ fn get_dir_simple(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
Ok(())
}
#[rstest]
fn get_dir_noscript(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?noscript", server.url()))?;
assert_eq!(resp.status(), 200);
assert_eq!(
resp.headers().get("content-type").unwrap(),
"text/html; charset=utf-8"
);
let text = resp.text().unwrap();
assert!(text.contains(r#"<td><a href="index.html">index.html</a></td>"#));
Ok(())
}
#[rstest]
fn head_dir_zip(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"HEAD", format!("{}?zip", server.url())).send()?;

View File

@@ -2,7 +2,7 @@ mod fixtures;
mod utils;
use fixtures::{server, Error, TestServer};
use reqwest::header::HeaderValue;
use reqwest::header::{HeaderMap, HeaderName, HeaderValue};
use rstest::rstest;
#[rstest]
@@ -39,3 +39,68 @@ fn get_file_range_invalid(server: TestServer) -> Result<(), Error> {
assert_eq!(resp.headers().get("content-range").unwrap(), "bytes */18");
Ok(())
}
fn parse_multipart_body<'a>(body: &'a str, boundary: &str) -> Vec<(HeaderMap, &'a str)> {
body.split(&format!("--{boundary}"))
.filter(|part| !part.is_empty() && *part != "--\r\n")
.map(|part| {
let (head, body) = part.trim_ascii().split_once("\r\n\r\n").unwrap();
let headers = head
.split("\r\n")
.fold(HeaderMap::new(), |mut headers, header| {
let (key, value) = header.split_once(":").unwrap();
let key = HeaderName::from_bytes(key.as_bytes()).unwrap();
let value = HeaderValue::from_str(value.trim_ascii_start()).unwrap();
headers.insert(key, value);
headers
});
(headers, body)
})
.collect()
}
#[rstest]
fn get_file_multipart_range(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"GET", format!("{}index.html", server.url()))
.header("range", HeaderValue::from_static("bytes=0-11, 6-17"))
.send()?;
assert_eq!(resp.status(), 206);
assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes");
let content_type = resp
.headers()
.get("content-type")
.unwrap()
.to_str()?
.to_string();
assert!(content_type.starts_with("multipart/byteranges; boundary="));
let boundary = content_type.split_once('=').unwrap().1.trim_ascii_start();
assert!(!boundary.is_empty());
let body = resp.text()?;
let parts = parse_multipart_body(&body, boundary);
assert_eq!(parts.len(), 2);
let (headers, body) = &parts[0];
assert_eq!(headers.get("content-range").unwrap(), "bytes 0-11/18");
assert_eq!(*body, "This is inde");
let (headers, body) = &parts[1];
assert_eq!(headers.get("content-range").unwrap(), "bytes 6-17/18");
assert_eq!(*body, "s index.html");
Ok(())
}
#[rstest]
fn get_file_multipart_range_invalid(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"GET", format!("{}index.html", server.url()))
.header("range", HeaderValue::from_static("bytes=0-6, 20-30"))
.send()?;
assert_eq!(resp.status(), 416);
assert_eq!(resp.headers().get("content-range").unwrap(), "bytes */18");
assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes");
assert_eq!(resp.headers().get("content-length").unwrap(), "0");
Ok(())
}