Compare commits

...

62 Commits

Author SHA1 Message Date
sigoden
58a46f7c3a chore: release v0.39.0 (#345) 2024-01-11 16:50:25 +08:00
sigoden
ef757281b3 chore: release v0.39.0 2024-01-11 08:31:56 +00:00
sigoden
de0614816a refactor: propfind with auth no need to list all (#344) 2024-01-11 16:10:10 +08:00
sigoden
81d2c49e3f chore: update bug_report issue template 2024-01-11 07:04:44 +00:00
sigoden
ee21894452 feat: supports resumable uploads (#343) 2024-01-11 14:56:30 +08:00
sigoden
0ac0c048ec fix: corrupted zip when downloading large folders (#337) 2024-01-07 10:50:15 +08:00
sigoden
17063454d3 chore: update bug_report issue tempalte 2024-01-05 00:37:41 +00:00
sigoden
af347f9cf0 feat: auth supports forbidden permissions (#329) 2023-12-23 18:36:46 +08:00
sigoden
006e03ed30 fix: serve files with names containing newline char (#328) 2023-12-23 15:40:41 +08:00
sigoden
77f86a4c60 fix: auth precedence (#325) 2023-12-21 17:28:13 +08:00
sigoden
a66f95b39f chore: log error during connection 2023-12-21 08:08:15 +00:00
sigoden
52506bc01f refactor: optimize http range parsing and handling (#323) 2023-12-21 15:46:55 +08:00
sigoden
270cc0cba2 feat: upgrade to hyper 1.0 (#321) 2023-12-21 14:24:20 +08:00
sigoden
5988442d5c chore: remove debug print 2023-12-14 11:08:10 +00:00
sigoden
3873f4794a feat: add --compress option (#319) 2023-12-14 18:59:28 +08:00
plantatorbob
cd84dff87f fix: upload more than 100 files in directory (#317) 2023-12-11 18:28:11 +08:00
sigoden
8590f3e841 chore: improve readme 2023-12-09 09:17:36 +00:00
sigoden
44a4ddf973 refactor: change the value name of --config (#313) 2023-12-07 15:14:41 +08:00
sigoden
37800f630d refactor: change the format of www-authenticate (#312) 2023-12-07 15:04:14 +08:00
sigoden
5c850256f4 feat: empty search ?q= list all paths (#311) 2023-12-07 06:55:17 +08:00
sigoden
0cec573579 chore: release v0.38.0 2023-11-29 07:49:50 +08:00
sigoden
073b098111 feat: ui supports view file (#301) 2023-11-28 07:14:53 +08:00
sigoden
6ff8b29b69 feat: more flexible config values (#299) 2023-11-27 04:24:25 +08:00
sigoden
7584fe3d08 feat: deprecate the use of | to separate auth rules (#298) 2023-11-26 22:15:49 +08:00
sigoden
653cd167d0 feat: password can contain : @ | (#297) 2023-11-26 20:47:57 +08:00
sigoden
ab29e39148 chore: trivial updates 2023-11-26 15:04:12 +08:00
sigoden
f8d6859354 refactor: ui improve uploading progress (#296) 2023-11-26 10:23:37 +08:00
sigoden
130435c387 chore: update readme 2023-11-25 19:07:37 +08:00
sigoden
afdfde01f0 fix: unable to start if config file omit bind/port fields (#294) 2023-11-25 18:54:36 +08:00
sigoden
ae97c714d6 refactor: ui change the cursor for upload-btn to a pointer (#291) 2023-11-21 16:24:59 +08:00
sigoden
c352dab470 refactor: take improvements from the edge browser (#289) 2023-11-15 19:44:44 +08:00
sigoden
743db47f90 chore: release v0.37.1 2023-11-08 11:11:36 +08:00
sigoden
a476c15a09 fix: use DUFS_CONFIG to specify the config file path (#286) 2023-11-08 11:10:47 +08:00
sigoden
0d74fa3ec5 chore: release v0.37.0 2023-11-08 10:41:24 +08:00
sigoden
b83cc6938b chore: update readme 2023-11-07 22:45:53 +08:00
sigoden
a187b14885 chore: update deps and ci (#284) 2023-11-04 19:47:13 +08:00
sigoden
d3de3db0d9 feat: support hashed password (#283) 2023-11-04 18:12:58 +08:00
sigoden
80ac9afe68 refactor: improve code quanity (#282)
- rename LogHttp to HttpLogger
2023-11-04 17:10:38 +08:00
sigoden
4ef07737e1 feat: support config file with --config option (#281) 2023-11-04 16:58:19 +08:00
sigoden
5782c5f413 chore: update description for --auth 2023-11-03 21:08:05 +08:00
sigoden
8b4cab1e69 fix: auto delete half-uploaded files (#280) 2023-11-03 20:58:53 +08:00
sigoden
70300b133c feat: deprecate --auth-method, as both options are available (#279)
* feat: deprecate `--auth-method`, both are avaiable

* send one www-authenticate with two schemes
2023-11-03 20:36:23 +08:00
sigoden
7ea4bb808d refactor: optimize tests 2023-11-03 15:25:20 +08:00
sigoden
6766e0d437 fix: ui show user-name next to the user-icon (#278) 2023-11-03 14:55:07 +08:00
tieway59
53c9bc8bea refactor: remove one clone on assets_prefix (#270)
This clone is not consist with the usage of `assets_prefix` in following
code and it's unnecessary.

Signed-off-by: TieWay59 <tieway59@foxmail.com>
2023-10-05 08:50:24 +08:00
sigoden
60df3b473c fix: sort path ignore case (#264) 2023-09-06 23:25:04 +08:00
sigoden
6510ae8be9 chore: release v0.36.0 2023-08-24 18:46:30 +08:00
sigoden
9545fb6e37 fix: ui readonly if no write perm (#258) 2023-08-24 18:32:34 +08:00
sigoden
0fd0f11298 chore: update deps 2023-08-24 16:46:38 +08:00
figsoda
46aa8fcc02 test: remove dependency on native tls (#255) 2023-08-15 11:01:25 +08:00
sigoden
09bb738866 chore: update changelog 2023-08-15 07:29:02 +08:00
sigoden
3612ef10d1 chore: release 0.35.0 (#254)
* chore: release 0.35.0

* update release profile
2023-08-15 07:24:22 +08:00
sigoden
7ac2039a36 chore: update deps 2023-08-14 17:31:52 +08:00
sigoden
7f83de765a fix: typo __ASSERTS_PREFIX__ (#252) 2023-08-13 15:05:45 +08:00
sigoden
9b3779b13a chore: update readme
close #247
2023-07-20 06:33:17 +08:00
sigoden
11a52f29c4 chore: fix release ci (#244) 2023-07-15 16:34:22 +08:00
sigoden
10204c723f chore: fix clippy (#245) 2023-07-15 16:27:13 +08:00
sigoden
204421643d chore: update ci (#242) 2023-07-04 10:25:49 +08:00
sigoden
d9706d75ef feat: sort by type first, then sort by name/mtime/size (#241) 2023-07-04 10:10:48 +08:00
sigoden
40df0bd2f9 chore: update readme 2023-06-18 08:55:42 +08:00
sigoden
a53411b4d6 fix: search should ignore entry path (#235) 2023-06-15 08:28:21 +08:00
ElmTran
609017b2f5 chore: Update README.md (#233)
update examples on new auth.
2023-06-13 08:23:05 +08:00
31 changed files with 3032 additions and 1841 deletions

View File

@@ -9,9 +9,10 @@ about: Create a report to help us improve
**Log** **Log**
If applicable, add logs to help explain your problem. The dufs log is crucial for locating the problem, so please do not omit it.
**Environment:** **Environment:**
- Dufs version: - Dufs version:
- Browser/Webdav Info: - Browser/Webdav info:
- OS Info: - OS info:
- Proxy server: e.g. nginx, cloudflare

View File

@@ -29,16 +29,12 @@ jobs:
RUSTFLAGS: --deny warnings RUSTFLAGS: --deny warnings
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- name: Install Rust Toolchain Components - name: Install Rust Toolchain Components
uses: actions-rs/toolchain@v1 uses: dtolnay/rust-toolchain@stable
with:
components: clippy, rustfmt
override: true
toolchain: stable
- uses: Swatinem/rust-cache@v1 - uses: Swatinem/rust-cache@v2
- name: Test - name: Test
run: cargo test --all run: cargo test --all

View File

@@ -27,7 +27,7 @@ jobs:
- target: aarch64-pc-windows-msvc - target: aarch64-pc-windows-msvc
os: windows-latest os: windows-latest
use-cross: true use-cross: true
cargo-flags: "--no-default-features" cargo-flags: ""
- target: x86_64-apple-darwin - target: x86_64-apple-darwin
os: macos-latest os: macos-latest
cargo-flags: "" cargo-flags: ""
@@ -71,30 +71,42 @@ jobs:
use-cross: true use-cross: true
cargo-flags: "--no-default-features" cargo-flags: "--no-default-features"
runs-on: ${{matrix.os}} runs-on: ${{matrix.os}}
env:
BUILD_CMD: cargo
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- name: Check Tag - name: Check Tag
id: check-tag id: check-tag
shell: bash shell: bash
run: | run: |
tag=${GITHUB_REF##*/} ver=${GITHUB_REF##*/}
echo "::set-output name=version::$tag" echo "version=$ver" >> $GITHUB_OUTPUT
if [[ "$tag" =~ [0-9]+.[0-9]+.[0-9]+$ ]]; then if [[ "$ver" =~ [0-9]+.[0-9]+.[0-9]+$ ]]; then
echo "::set-output name=rc::false" echo "rc=false" >> $GITHUB_OUTPUT
else else
echo "::set-output name=rc::true" echo "rc=true" >> $GITHUB_OUTPUT
fi fi
- name: Install Rust Toolchain Components - name: Install Rust Toolchain Components
uses: actions-rs/toolchain@v1 uses: dtolnay/rust-toolchain@stable
with: with:
override: true targets: ${{ matrix.target }}
target: ${{ matrix.target }} # Since rust 1.72, mips platforms are tier 3
toolchain: stable toolchain: 1.71
profile: minimal # minimal component installation (ie, no documentation)
- name: Install cross
if: matrix.use-cross
uses: taiki-e/install-action@v2
with:
tool: cross
- name: Overwrite build command env variable
if: matrix.use-cross
shell: bash
run: echo "BUILD_CMD=cross" >> $GITHUB_ENV
- name: Show Version Information (Rust, cargo, GCC) - name: Show Version Information (Rust, cargo, GCC)
shell: bash shell: bash
@@ -107,11 +119,8 @@ jobs:
rustc -V rustc -V
- name: Build - name: Build
uses: actions-rs/cargo@v1 shell: bash
with: run: $BUILD_CMD build --locked --release --target=${{ matrix.target }} ${{ matrix.cargo-flags }}
use-cross: ${{ matrix.use-cross }}
command: build
args: --locked --release --target=${{ matrix.target }} ${{ matrix.cargo-flags }}
- name: Build Archive - name: Build Archive
shell: bash shell: bash
@@ -123,8 +132,7 @@ jobs:
set -euxo pipefail set -euxo pipefail
bin=${GITHUB_REPOSITORY##*/} bin=${GITHUB_REPOSITORY##*/}
src=`pwd` dist_dir=`pwd`/dist
dist=$src/dist
name=$bin-$version-$target name=$bin-$version-$target
executable=target/$target/release/$bin executable=target/$target/release/$bin
@@ -132,22 +140,22 @@ jobs:
executable=$executable.exe executable=$executable.exe
fi fi
mkdir $dist mkdir $dist_dir
cp $executable $dist cp $executable $dist_dir
cd $dist cd $dist_dir
if [[ "$RUNNER_OS" == "Windows" ]]; then if [[ "$RUNNER_OS" == "Windows" ]]; then
archive=$dist/$name.zip archive=$dist_dir/$name.zip
7z a $archive * 7z a $archive *
echo "::set-output name=archive::`pwd -W`/$name.zip" echo "archive=dist/$name.zip" >> $GITHUB_OUTPUT
else else
archive=$dist/$name.tar.gz archive=$dist_dir/$name.tar.gz
tar czf $archive * tar -czf $archive *
echo "::set-output name=archive::$archive" echo "archive=dist/$name.tar.gz" >> $GITHUB_OUTPUT
fi fi
- name: Publish Archive - name: Publish Archive
uses: softprops/action-gh-release@v0.1.5 uses: softprops/action-gh-release@v1
if: ${{ startsWith(github.ref, 'refs/tags/') }} if: ${{ startsWith(github.ref, 'refs/tags/') }}
with: with:
draft: false draft: false
@@ -163,16 +171,16 @@ jobs:
needs: release needs: release
steps: steps:
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v1 uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1 uses: docker/setup-buildx-action@v2
- name: Login to DockerHub - name: Login to DockerHub
uses: docker/login-action@v1 uses: docker/login-action@v2
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push - name: Build and push
uses: docker/build-push-action@v2 uses: docker/build-push-action@v4
with: with:
build-args: | build-args: |
REPO=${{ github.repository }} REPO=${{ github.repository }}
@@ -191,13 +199,11 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: release needs: release
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
- name: Publish
- uses: dtolnay/rust-toolchain@stable
- name: Publish
env: env:
CARGO_REGISTRY_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }} CARGO_REGISTRY_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}
run: cargo publish run: cargo publish

View File

@@ -2,6 +2,96 @@
All notable changes to this project will be documented in this file. All notable changes to this project will be documented in this file.
## [0.39.0] - 2024-01-11
### Bug Fixes
- Upload more than 100 files in directory ([#317](https://github.com/sigoden/dufs/issues/317))
- Auth precedence ([#325](https://github.com/sigoden/dufs/issues/325))
- Serve files with names containing newline char ([#328](https://github.com/sigoden/dufs/issues/328))
- Corrupted zip when downloading large folders ([#337](https://github.com/sigoden/dufs/issues/337))
### Features
- Empty search `?q=` list all paths ([#311](https://github.com/sigoden/dufs/issues/311))
- Add `--compress` option ([#319](https://github.com/sigoden/dufs/issues/319))
- Upgrade to hyper 1.0 ([#321](https://github.com/sigoden/dufs/issues/321))
- Auth supports forbidden permissions ([#329](https://github.com/sigoden/dufs/issues/329))
- Supports resumable uploads ([#343](https://github.com/sigoden/dufs/issues/343))
### Refactor
- Change the format of www-authenticate ([#312](https://github.com/sigoden/dufs/issues/312))
- Change the value name of `--config` ([#313](https://github.com/sigoden/dufs/issues/313))
- Optimize http range parsing and handling ([#323](https://github.com/sigoden/dufs/issues/323))
- Propfind with auth no need to list all ([#344](https://github.com/sigoden/dufs/issues/344))
## [0.38.0] - 2023-11-28
### Bug Fixes
- Unable to start if config file omit bind/port fields ([#294](https://github.com/sigoden/dufs/issues/294))
### Features
- Password can contain `:` `@` `|` ([#297](https://github.com/sigoden/dufs/issues/297))
- Deprecate the use of `|` to separate auth rules ([#298](https://github.com/sigoden/dufs/issues/298))
- More flexible config values ([#299](https://github.com/sigoden/dufs/issues/299))
- Ui supports view file ([#301](https://github.com/sigoden/dufs/issues/301))
### Refactor
- Take improvements from the edge browser ([#289](https://github.com/sigoden/dufs/issues/289))
- Ui change the cursor for upload-btn to a pointer ([#291](https://github.com/sigoden/dufs/issues/291))
- Ui improve uploading progress ([#296](https://github.com/sigoden/dufs/issues/296))
## [0.37.1] - 2023-11-08
### Bug Fixes
- Use DUFS_CONFIG to specify the config file path ([#286](https://github.com/sigoden/dufs/issues/286)
## [0.37.0] - 2023-11-08
### Bug Fixes
- Sort path ignore case ([#264](https://github.com/sigoden/dufs/issues/264))
- Ui show user-name next to the user-icon ([#278](https://github.com/sigoden/dufs/issues/278))
- Auto delete half-uploaded files ([#280](https://github.com/sigoden/dufs/issues/280))
### Features
- Deprecate `--auth-method`, as both options are available ([#279](https://github.com/sigoden/dufs/issues/279))
- Support config file with `--config` option ([#281](https://github.com/sigoden/dufs/issues/281))
- Support hashed password ([#283](https://github.com/sigoden/dufs/issues/283))
### Refactor
- Remove one clone on `assets_prefix` ([#270](https://github.com/sigoden/dufs/issues/270))
- Optimize tests
- Improve code quanity ([#282](https://github.com/sigoden/dufs/issues/282))
## [0.36.0] - 2023-08-24
### Bug Fixes
- Ui readonly if no write perm ([#258](https://github.com/sigoden/dufs/issues/258))
### Testing
- Remove dependency on native tls ([#255](https://github.com/sigoden/dufs/issues/255))
## [0.35.0] - 2023-08-14
### Bug Fixes
- Search should ignore entry path ([#235](https://github.com/sigoden/dufs/issues/235))
- Typo __ASSERTS_PREFIX__ ([#252](https://github.com/sigoden/dufs/issues/252))
### Features
- Sort by type first, then sort by name/mtime/size ([#241](https://github.com/sigoden/dufs/issues/241))
## [0.34.2] - 2023-06-05 ## [0.34.2] - 2023-06-05
### Bug Fixes ### Bug Fixes

1450
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "dufs" name = "dufs"
version = "0.34.2" version = "0.39.0"
edition = "2021" edition = "2021"
authors = ["sigoden <sigoden@gmail.com>"] authors = ["sigoden <sigoden@gmail.com>"]
description = "Dufs is a distinctive utility file server" description = "Dufs is a distinctive utility file server"
@@ -13,55 +13,64 @@ keywords = ["static", "file", "server", "webdav", "cli"]
[dependencies] [dependencies]
clap = { version = "4", features = ["wrap_help", "env"] } clap = { version = "4", features = ["wrap_help", "env"] }
clap_complete = "4" clap_complete = "4"
chrono = "0.4" chrono = { version = "0.4", default-features = false, features = ["clock"] }
tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util", "signal"]} tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util", "signal"]}
tokio-util = { version = "0.7", features = ["io-util", "compat"] } tokio-util = { version = "0.7", features = ["io-util", "compat"] }
hyper = { version = "0.14", features = ["http1", "server", "tcp", "stream"] } hyper = { version = "1.0", features = ["http1", "server"] }
percent-encoding = "2.1" percent-encoding = "2.3"
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
serde_json = "1" serde_json = "1"
futures = "0.3" futures-util = { version = "0.3", default-features = false, features = ["alloc"] }
base64 = "0.21" async_zip = { version = "0.0.16", default-features = false, features = ["deflate", "bzip2", "xz", "chrono", "tokio"] }
async_zip = { version = "0.0.15", default-features = false, features = ["deflate", "chrono", "tokio"] } headers = "0.4"
headers = "0.3"
mime_guess = "2.0" mime_guess = "2.0"
if-addrs = "0.10.1" if-addrs = "0.11"
rustls = { version = "0.21", default-features = false, features = ["tls12"], optional = true } rustls-pemfile = { version = "2.0", optional = true }
rustls-pemfile = { version = "1", optional = true } tokio-rustls = { version = "0.25", optional = true }
tokio-rustls = { version = "0.24", optional = true }
md5 = "0.7" md5 = "0.7"
lazy_static = "1.4" lazy_static = "1.4"
uuid = { version = "1.1", features = ["v4", "fast-rng"] } uuid = { version = "1.4", features = ["v4", "fast-rng"] }
urlencoding = "2.1" urlencoding = "2.1"
xml-rs = "0.8" xml-rs = "0.8"
log = "0.4" log = "0.4"
socket2 = "0.5" socket2 = "0.5"
async-stream = "0.3" async-stream = "0.3"
walkdir = "2.3" walkdir = "2.3"
form_urlencoded = "1.0" form_urlencoded = "1.2"
alphanumeric-sort = "1.4" alphanumeric-sort = "1.4"
content_inspector = "0.2" content_inspector = "0.2"
anyhow = "1.0" anyhow = "1.0"
chardetng = "0.1" chardetng = "0.1"
glob = "0.3.1" glob = "0.3"
indexmap = "1.9" indexmap = "2.0"
serde_yaml = "0.9"
sha-crypt = "0.5"
base64 = "0.21"
smart-default = "0.7"
rustls-pki-types = "1.0"
hyper-util = { version = "0.1", features = ["server-auto", "tokio"] }
http-body-util = "0.1"
bytes = "1.5"
pin-project-lite = "0.2"
[features] [features]
default = ["tls"] default = ["tls"]
tls = ["rustls", "rustls-pemfile", "tokio-rustls"] tls = ["rustls-pemfile", "tokio-rustls"]
[dev-dependencies] [dev-dependencies]
assert_cmd = "2" assert_cmd = "2"
reqwest = { version = "0.11", features = ["blocking", "multipart", "rustls-tls"], default-features = false } reqwest = { version = "0.11", features = ["blocking", "multipart", "rustls-tls"], default-features = false }
assert_fs = "1" assert_fs = "1"
port_check = "0.1" port_check = "0.1"
rstest = "0.17" rstest = "0.18"
regex = "1" regex = "1"
url = "2" url = "2"
diqwest = { version = "1", features = ["blocking"] } diqwest = { version = "2.0", features = ["blocking"], default-features = false }
predicates = "3" predicates = "3"
[profile.release] [profile.release]
opt-level = 3
lto = true lto = true
strip = true codegen-units = 1
opt-level = "z" panic = "abort"
strip = "symbols"

189
README.md
View File

@@ -13,7 +13,7 @@ Dufs is a distinctive utility file server that supports static serving, uploadin
- Download folder as zip file - Download folder as zip file
- Upload files and folders (Drag & Drop) - Upload files and folders (Drag & Drop)
- Create/Edit/Search files - Create/Edit/Search files
- Partial responses (Parallel/Resume download) - Resumable/partial uploads/downloads
- Access control - Access control
- Support https - Support https
- Support webdav - Support webdav
@@ -48,18 +48,18 @@ Download from [Github Releases](https://github.com/sigoden/dufs/releases), unzip
``` ```
Dufs is a distinctive utility file server - https://github.com/sigoden/dufs Dufs is a distinctive utility file server - https://github.com/sigoden/dufs
Usage: dufs [OPTIONS] [serve_path] Usage: dufs [OPTIONS] [serve-path]
Arguments: Arguments:
[serve_path] Specific path to serve [default: .] [serve-path] Specific path to serve [default: .]
Options: Options:
-c, --config <file> Specify configuration file
-b, --bind <addrs> Specify bind address or unix socket -b, --bind <addrs> Specify bind address or unix socket
-p, --port <port> Specify port to listen on [default: 5000] -p, --port <port> Specify port to listen on [default: 5000]
--path-prefix <path> Specify a path prefix --path-prefix <path> Specify a path prefix
--hidden <value> Hide paths from directory listings, separated by `,` --hidden <value> Hide paths from directory listings, e.g. tmp,*.log,*.lock
-a, --auth <rules> Add auth role -a, --auth <rules> Add auth roles, e.g. user:pass@/dir1:rw,/dir2
--auth-method <value> Select auth method [default: digest] [possible values: basic, digest]
-A, --allow-all Allow all operations -A, --allow-all Allow all operations
--allow-upload Allow upload files/folders --allow-upload Allow upload files/folders
--allow-delete Allow delete files/folders --allow-delete Allow delete files/folders
@@ -70,18 +70,19 @@ Options:
--render-index Serve index.html when requesting a directory, returns 404 if not found index.html --render-index Serve index.html when requesting a directory, returns 404 if not found index.html
--render-try-index Serve index.html when requesting a directory, returns directory listing if not found index.html --render-try-index Serve index.html when requesting a directory, returns directory listing if not found index.html
--render-spa Serve SPA(Single Page Application) --render-spa Serve SPA(Single Page Application)
--assets <path> Use custom assets to override builtin assets --assets <path> Set the path to the assets directory for overriding the built-in assets
--log-format <format> Customize http log format
--compress <level> Set zip compress level [default: low] [possible values: none, low, medium, high]
--completions <shell> Print shell completion script for <shell> [possible values: bash, elvish, fish, powershell, zsh]
--tls-cert <path> Path to an SSL/TLS certificate to serve with HTTPS --tls-cert <path> Path to an SSL/TLS certificate to serve with HTTPS
--tls-key <path> Path to the SSL/TLS certificate's private key --tls-key <path> Path to the SSL/TLS certificate's private key
--log-format <format> Customize http log format
--completions <shell> Print shell completion script for <shell> [possible values: bash, elvish, fish, powershell, zsh]
-h, --help Print help -h, --help Print help
-V, --version Print version -V, --version Print version
``` ```
## Examples ## Examples
Serve current working directory in readonly mode Serve current working directory in read-only mode
``` ```
dufs dufs
@@ -126,7 +127,7 @@ dufs --render-index
Require username/password Require username/password
``` ```
dufs -a /@admin:123 dufs -a admin:123@/:rw
``` ```
Listen on specific host:ip Listen on specific host:ip
@@ -186,16 +187,30 @@ curl -X MOVE https://127.0.0.1:5000/path -H "Destination: https://127.0.0.1:5000
List/search directory contents List/search directory contents
``` ```
curl http://127.0.0.1:5000?simple # output names only, just like `ls -1` curl http://127.0.0.1:5000?q=Dockerfile # search for files, similar to `find -name Dockerfile`
curl http://127.0.0.1:5000?simple # output names only, similar to `ls -1`
curl http://127.0.0.1:5000?json # output paths in json format curl http://127.0.0.1:5000?json # output paths in json format
curl http://127.0.0.1:5000?q=Dockerfile&simple # search for files, just like `find -name Dockerfile`
``` ```
With authorization With authorization
``` ```
curl --user user:pass --digest http://192.168.8.10:5000/file # digest auth curl http://127.0.0.1:5000/file --user user:pass # basic auth
curl --user user:pass http://192.168.8.10:5000/file # basic auth curl http://127.0.0.1:5000/file --user user:pass --digest # digest auth
```
Resumable downloads
```
curl -C- -o file http://127.0.0.1:5000/file
```
Resumable uploads
```
upload_offset=$(curl -I -s http://127.0.0.1:5000/file | tr -d '\r' | sed -n 's/content-length: //p')
dd skip=$upload_offset if=file status=none ibs=1 | \
curl -X PATCH -H "X-Update-Range: append" --data-binary @- http://127.0.0.1:5000/file
``` ```
<details> <details>
@@ -206,44 +221,44 @@ curl --user user:pass http://192.168.8.10:5000/file # basic auth
Dufs supports account based access control. You can control who can do what on which path with `--auth`/`-a`. Dufs supports account based access control. You can control who can do what on which path with `--auth`/`-a`.
``` ```
dufs -a [user:pass]@path[:rw][,path[:rw]...][|...] dufs -a admin:admin@/:rw -a guest:guest@/
dufs -a user:pass@/:rw,/dir1,/dir2:- -a @/
``` ```
1: Multiple rules are separated by "|"
2: User and pass are the account name and password, if omitted, it is an anonymous user 1. Use `@` to separate the account and paths. No account means anonymous user.
3: One rule can set multiple paths, separated by "," 2. Use `:` to separate the username and password of the account.
4: Add `:rw` after the path to indicate that the path has read and write permissions, otherwise the path has readonly permissions. 3. Use `,` to separate paths.
4. Use path suffix `:rw`, `:ro`, `:-` to set permissions: `read-write`, `read-only`, `forbidden`. `:ro` can be omitted.
- `-a admin:admin@/:rw`: `admin` has complete permissions for all paths.
- `-a guest:guest@/`: `guest` has read-only permissions for all paths.
- `-a user:pass@/:rw,/dir1,/dir2:-`: `user` has read-write permissions for `/*`, has read-only permissions for `/dir1/*`, but is fordden for `/dir2/*`.
- `-a @/`: All paths is publicly accessible, everyone can view/download it.
> There are no restrictions on using ':' and '@' characters in a password. For example, `user:pa:ss@1@/:rw` is valid, the password is `pa:ss@1`.
#### Hashed Password
DUFS supports the use of sha-512 hashed password.
Create hashed password
``` ```
dufs -A -a admin:admin@/:rw $ mkpasswd -m sha-512 -s
Password: 123456
$6$qCAVUG7yn7t/hH4d$BWm8r5MoDywNmDP/J3V2S2a6flmKHC1IpblfoqZfuK.LtLBZ0KFXP9QIfJP8RqL8MCw4isdheoAMTuwOz.pAO/
``` ```
`admin` has all permissions for all paths.
Use hashed password
``` ```
dufs -A -a admin:admin@/:rw -a guest:guest@/ dufs -a 'admin:$6$qCAVUG7yn7t/hH4d$BWm8r5MoDywNmDP/J3V2S2a6flmKHC1IpblfoqZfuK.LtLBZ0KFXP9QIfJP8RqL8MCw4isdheoAMTuwOz.pAO/@/:rw'
``` ```
`guest` has readonly permissions for all paths.
``` Two important things for hashed passwords:
dufs -A -a admin:admin@/:rw -a @/
```
All paths is public, everyone can view/download it.
``` 1. Dufs only supports sha-512 hashed passwords, so ensure that the password string always starts with `$6$`.
dufs -A -a admin:admin@/:rw -a user1:pass1@/user1:rw -a user2:pass2@/user2 2. Digest authentication does not function properly with hashed passwords.
dufs -A -a "admin:admin@/:rw|user1:pass1@/user1:rw|user2:pass2@/user2"
```
`user1` has all permissions for `/user1/*` path.
`user2` has all permissions for `/user2/*` path.
```
dufs -A -a user:pass@/dir1:rw,/dir2:rw,dir3
```
`user` has all permissions for `/dir1/*` and `/dir2/*`, has readonly permissions for `/dir3/`.
```
dufs -a admin:admin@/
```
Since dufs only allows viewing/downloading, `admin` can only view/download files.
### Hide Paths ### Hide Paths
@@ -256,9 +271,10 @@ dufs --hidden .git,.DS_Store,tmp
> The glob used in --hidden only matches file and directory names, not paths. So `--hidden dir1/file` is invalid. > The glob used in --hidden only matches file and directory names, not paths. So `--hidden dir1/file` is invalid.
```sh ```sh
dufs --hidden '.*' # hidden dotfiles dufs --hidden '.*' # hidden dotfiles
dufs --hidden '*/' # hidden all folders dufs --hidden '*/' # hidden all folders
dufs --hidden '*.log,*.lock' # hidden by exts dufs --hidden '*.log,*.lock' # hidden by exts
dufs --hidden '*.log' --hidden '*.lock'
``` ```
### Log Format ### Log Format
@@ -307,27 +323,64 @@ dufs --log-format '$remote_addr $remote_user "$request" $status' -a /@admin:admi
All options can be set using environment variables prefixed with `DUFS_`. All options can be set using environment variables prefixed with `DUFS_`.
``` ```
[ROOT_DIR] DUFS_ROOT_DIR=/dir [serve-path] DUFS_SERVE_PATH="."
-b, --bind <addrs> DUFS_BIND=0.0.0.0 --config <file> DUFS_CONFIG=config.yaml
-p, --port <port> DUFS_PORT=5000 -b, --bind <addrs> DUFS_BIND=0.0.0.0
--path-prefix <path> DUFS_PATH_RREFIX=/path -p, --port <port> DUFS_PORT=5000
--hidden <value> DUFS_HIDDEN=*.log --path-prefix <path> DUFS_PATH_PREFIX=/static
-a, --auth <rules> DUFS_AUTH="admin:admin@/:rw|@/" --hidden <value> DUFS_HIDDEN=tmp,*.log,*.lock
--auth-method <value> DUFS_AUTH_METHOD=basic -a, --auth <rules> DUFS_AUTH="admin:admin@/:rw|@/"
-A, --allow-all DUFS_ALLOW_ALL=true -A, --allow-all DUFS_ALLOW_ALL=true
--allow-upload DUFS_ALLOW_UPLOAD=true --allow-upload DUFS_ALLOW_UPLOAD=true
--allow-delete DUFS_ALLOW_DELETE=true --allow-delete DUFS_ALLOW_DELETE=true
--allow-search DUFS_ALLOW_SEARCH=true --allow-search DUFS_ALLOW_SEARCH=true
--allow-symlink DUFS_ALLOW_SYMLINK=true --allow-symlink DUFS_ALLOW_SYMLINK=true
--allow-archive DUFS_ALLOW_ARCHIVE=true --allow-archive DUFS_ALLOW_ARCHIVE=true
--enable-cors DUFS_ENABLE_CORS=true --enable-cors DUFS_ENABLE_CORS=true
--render-index DUFS_RENDER_INDEX=true --render-index DUFS_RENDER_INDEX=true
--render-try-index DUFS_RENDER_TRY_INDEX=true --render-try-index DUFS_RENDER_TRY_INDEX=true
--render-spa DUFS_RENDER_SPA=true --render-spa DUFS_RENDER_SPA=true
--assets <path> DUFS_ASSETS=/assets --assets <path> DUFS_ASSETS=/assets
--tls-cert <path> DUFS_TLS_CERT=cert.pem --log-format <format> DUFS_LOG_FORMAT=""
--tls-key <path> DUFS_TLS_KEY=key.pem --compress <compress> DUFS_COMPRESS="low"
--log-format <format> DUFS_LOG_FORMAT="" --tls-cert <path> DUFS_TLS_CERT=cert.pem
--tls-key <path> DUFS_TLS_KEY=key.pem
```
## Configuration File
You can specify and use the configuration file by selecting the option `--config <path-to-config.yaml>`.
The following are the configuration items:
```yaml
serve-path: '.'
bind: 0.0.0.0
port: 5000
path-prefix: /dufs
hidden:
- tmp
- '*.log'
- '*.lock'
auth:
- admin:admin@/:rw
- user:pass@/src:rw,/share
- '@/' # According to the YAML spec, quoting is required.
allow-all: false
allow-upload: true
allow-delete: true
allow-search: true
allow-symlink: true
allow-archive: true
enable-cors: true
render-index: true
render-try-index: true
render-spa: true
assets: ./assets/
log-format: '$remote_addr "$request" $status $http_user_agent'
compress: low
tls-cert: tests/data/cert.pem
tls-key: tests/data/key_pkcs1.pem
``` ```
### Customize UI ### Customize UI
@@ -343,7 +396,7 @@ Your assets folder must contains a `index.html` file.
`index.html` can use the following placeholder variables to retrieve internal data. `index.html` can use the following placeholder variables to retrieve internal data.
- `__INDEX_DATA__`: directory listing data - `__INDEX_DATA__`: directory listing data
- `__ASSERTS_PREFIX__`: assets url prefix - `__ASSETS_PREFIX__`: assets url prefix
</details> </details>

View File

@@ -73,6 +73,10 @@ body {
display: none; display: none;
} }
.upload-file label {
cursor: pointer;
}
.searchbar { .searchbar {
display: flex; display: flex;
flex-wrap: nowrap; flex-wrap: nowrap;
@@ -103,11 +107,6 @@ body {
cursor: pointer; cursor: pointer;
} }
.upload-status span {
width: 70px;
display: inline-block;
}
.main { .main {
padding: 3.3em 1em 0; padding: 3.3em 1em 0;
} }
@@ -134,6 +133,10 @@ body {
padding-left: 0.6em; padding-left: 0.6em;
} }
.cell-status span {
display: inline-block;
}
.paths-table thead a { .paths-table thead a {
color: unset; color: unset;
text-decoration: none; text-decoration: none;
@@ -208,22 +211,37 @@ body {
height: calc(100vh - 5rem); height: calc(100vh - 5rem);
border: 1px solid #ced4da; border: 1px solid #ced4da;
outline: none; outline: none;
padding: 5px;
} }
.toolbox2 { .toolbox-right {
margin-left: auto; margin-left: auto;
margin-right: 2em; margin-right: 2em;
} }
.save-btn { .save-btn {
cursor: pointer; cursor: pointer;
-webkit-user-select: none;
user-select: none; user-select: none;
} }
.user-btn {
display: flex;
align-items: center;
}
.user-name {
padding-left: 3px;
}
.not-editable { .not-editable {
font-style: italic; font-style: italic;
} }
.retry-btn {
cursor: pointer;
}
@media (min-width: 768px) { @media (min-width: 768px) {
.path a { .path a {
min-width: 400px; min-width: 400px;

View File

@@ -1,15 +1,15 @@
<!DOCTYPE html> <!DOCTYPE html>
<html> <html lang="en-US">
<head> <head>
<meta charset="utf-8" /> <meta charset="utf-8" />
<meta name="viewport" content="width=device-width" /> <meta name="viewport" content="width=device-width" />
<link rel="icon" type="image/x-icon" href="__ASSERTS_PREFIX__favicon.ico"> <link rel="icon" type="image/x-icon" href="__ASSETS_PREFIX__favicon.ico">
<link rel="stylesheet" href="__ASSERTS_PREFIX__index.css"> <link rel="stylesheet" href="__ASSETS_PREFIX__index.css">
<script> <script>
DATA = __INDEX_DATA__ DATA = __INDEX_DATA__
</script> </script>
<script src="__ASSERTS_PREFIX__index.js"></script> <script src="__ASSETS_PREFIX__index.js"></script>
</head> </head>
<body> <body>
@@ -48,7 +48,7 @@
d="M7.646 1.146a.5.5 0 0 1 .708 0l3 3a.5.5 0 0 1-.708.708L8.5 2.707V11.5a.5.5 0 0 1-1 0V2.707L5.354 4.854a.5.5 0 1 1-.708-.708l3-3z" /> d="M7.646 1.146a.5.5 0 0 1 .708 0l3 3a.5.5 0 0 1-.708.708L8.5 2.707V11.5a.5.5 0 0 1-1 0V2.707L5.354 4.854a.5.5 0 1 1-.708-.708l3-3z" />
</svg> </svg>
</label> </label>
<input type="file" id="file" name="file" multiple> <input type="file" id="file" title="Upload files" name="file" multiple>
</div> </div>
<div class="control new-folder hidden" title="New folder"> <div class="control new-folder hidden" title="New folder">
<svg width="16" height="16" viewBox="0 0 16 16"> <svg width="16" height="16" viewBox="0 0 16 16">
@@ -74,10 +74,10 @@
d="M11.742 10.344a6.5 6.5 0 1 0-1.397 1.398h-.001c.03.04.062.078.098.115l3.85 3.85a1 1 0 0 0 1.415-1.414l-3.85-3.85a1.007 1.007 0 0 0-.115-.1zM12 6.5a5.5 5.5 0 1 1-11 0 5.5 5.5 0 0 1 11 0z" /> d="M11.742 10.344a6.5 6.5 0 1 0-1.397 1.398h-.001c.03.04.062.078.098.115l3.85 3.85a1 1 0 0 0 1.415-1.414l-3.85-3.85a1.007 1.007 0 0 0-.115-.1zM12 6.5a5.5 5.5 0 1 1-11 0 5.5 5.5 0 0 1 11 0z" />
</svg> </svg>
</div> </div>
<input id="search" name="q" type="text" maxlength="128" autocomplete="off" tabindex="1"> <input id="search" title="Searching for folders or files" name="q" type="text" maxlength="128" autocomplete="off" tabindex="1">
<input type="submit" hidden /> <input type="submit" hidden />
</form> </form>
<div class="toolbox2"> <div class="toolbox-right">
<div class="login-btn hidden" title="Login for upload/move/delete/edit permissions"> <div class="login-btn hidden" title="Login for upload/move/delete/edit permissions">
<svg width="16" height="16" viewBox="0 0 16 16"> <svg width="16" height="16" viewBox="0 0 16 16">
<path fill-rule="evenodd" <path fill-rule="evenodd"
@@ -91,6 +91,7 @@
<path <path
d="M8 8a3 3 0 1 0 0-6 3 3 0 0 0 0 6Zm2-3a2 2 0 1 1-4 0 2 2 0 0 1 4 0Zm4 8c0 1-1 1-1 1H3s-1 0-1-1 1-4 6-4 6 3 6 4Zm-1-.004c-.001-.246-.154-.986-.832-1.664C11.516 10.68 10.289 10 8 10c-2.29 0-3.516.68-4.168 1.332-.678.678-.83 1.418-.832 1.664h10Z" /> d="M8 8a3 3 0 1 0 0-6 3 3 0 0 0 0 6Zm2-3a2 2 0 1 1-4 0 2 2 0 0 1 4 0Zm4 8c0 1-1 1-1 1H3s-1 0-1-1 1-4 6-4 6 3 6 4Zm-1-.004c-.001-.246-.154-.986-.832-1.664C11.516 10.68 10.289 10 8 10c-2.29 0-3.516.68-4.168 1.332-.678.678-.83 1.418-.832 1.664h10Z" />
</svg> </svg>
<span class="user-name"></span>
</div> </div>
<div class="save-btn hidden" title="Save file"> <div class="save-btn hidden" title="Save file">
<svg viewBox="0 0 1024 1024" width="24" height="24"> <svg viewBox="0 0 1024 1024" width="24" height="24">
@@ -121,7 +122,7 @@
</div> </div>
<div class="editor-page hidden"> <div class="editor-page hidden">
<div class="not-editable hidden"></div> <div class="not-editable hidden"></div>
<textarea class="editor hidden" cols="10"></textarea> <textarea id="editor" class="editor hidden" aria-label="Editor" cols="10"></textarea>
</div> </div>
</div> </div>
<script> <script>

View File

@@ -10,7 +10,7 @@
* @typedef {object} DATA * @typedef {object} DATA
* @property {string} href * @property {string} href
* @property {string} uri_prefix * @property {string} uri_prefix
* @property {"Index" | "Edit"} kind * @property {"Index" | "Edit" | "View"} kind
* @property {PathItem[]} paths * @property {PathItem[]} paths
* @property {boolean} allow_upload * @property {boolean} allow_upload
* @property {boolean} allow_delete * @property {boolean} allow_delete
@@ -55,9 +55,15 @@ const ICONS = {
download: `<svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg>`, download: `<svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg>`,
move: `<svg width="16" height="16" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M1.5 1.5A.5.5 0 0 0 1 2v4.8a2.5 2.5 0 0 0 2.5 2.5h9.793l-3.347 3.346a.5.5 0 0 0 .708.708l4.2-4.2a.5.5 0 0 0 0-.708l-4-4a.5.5 0 0 0-.708.708L13.293 8.3H3.5A1.5 1.5 0 0 1 2 6.8V2a.5.5 0 0 0-.5-.5z"/></svg>`, move: `<svg width="16" height="16" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M1.5 1.5A.5.5 0 0 0 1 2v4.8a2.5 2.5 0 0 0 2.5 2.5h9.793l-3.347 3.346a.5.5 0 0 0 .708.708l4.2-4.2a.5.5 0 0 0 0-.708l-4-4a.5.5 0 0 0-.708.708L13.293 8.3H3.5A1.5 1.5 0 0 1 2 6.8V2a.5.5 0 0 0-.5-.5z"/></svg>`,
edit: `<svg width="16" height="16" viewBox="0 0 16 16"><path d="M12.146.146a.5.5 0 0 1 .708 0l3 3a.5.5 0 0 1 0 .708l-10 10a.5.5 0 0 1-.168.11l-5 2a.5.5 0 0 1-.65-.65l2-5a.5.5 0 0 1 .11-.168l10-10zM11.207 2.5 13.5 4.793 14.793 3.5 12.5 1.207 11.207 2.5zm1.586 3L10.5 3.207 4 9.707V10h.5a.5.5 0 0 1 .5.5v.5h.5a.5.5 0 0 1 .5.5v.5h.293l6.5-6.5zm-9.761 5.175-.106.106-1.528 3.821 3.821-1.528.106-.106A.5.5 0 0 1 5 12.5V12h-.5a.5.5 0 0 1-.5-.5V11h-.5a.5.5 0 0 1-.468-.325z"/></svg>`, edit: `<svg width="16" height="16" viewBox="0 0 16 16"><path d="M12.146.146a.5.5 0 0 1 .708 0l3 3a.5.5 0 0 1 0 .708l-10 10a.5.5 0 0 1-.168.11l-5 2a.5.5 0 0 1-.65-.65l2-5a.5.5 0 0 1 .11-.168l10-10zM11.207 2.5 13.5 4.793 14.793 3.5 12.5 1.207 11.207 2.5zm1.586 3L10.5 3.207 4 9.707V10h.5a.5.5 0 0 1 .5.5v.5h.5a.5.5 0 0 1 .5.5v.5h.293l6.5-6.5zm-9.761 5.175-.106.106-1.528 3.821 3.821-1.528.106-.106A.5.5 0 0 1 5 12.5V12h-.5a.5.5 0 0 1-.5-.5V11h-.5a.5.5 0 0 1-.468-.325z"/></svg>`,
delete: `<svg width="16" height="16" fill="currentColor"viewBox="0 0 16 16"><path d="M6.854 7.146a.5.5 0 1 0-.708.708L7.293 9l-1.147 1.146a.5.5 0 0 0 .708.708L8 9.707l1.146 1.147a.5.5 0 0 0 .708-.708L8.707 9l1.147-1.146a.5.5 0 0 0-.708-.708L8 8.293 6.854 7.146z"/><path d="M14 14V4.5L9.5 0H4a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h8a2 2 0 0 0 2-2zM9.5 3A1.5 1.5 0 0 0 11 4.5h2V14a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1V2a1 1 0 0 1 1-1h5.5v2z"/></svg>`, delete: `<svg width="16" height="16" viewBox="0 0 16 16"><path d="M6.854 7.146a.5.5 0 1 0-.708.708L7.293 9l-1.147 1.146a.5.5 0 0 0 .708.708L8 9.707l1.146 1.147a.5.5 0 0 0 .708-.708L8.707 9l1.147-1.146a.5.5 0 0 0-.708-.708L8 8.293 6.854 7.146z"/><path d="M14 14V4.5L9.5 0H4a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h8a2 2 0 0 0 2-2zM9.5 3A1.5 1.5 0 0 0 11 4.5h2V14a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1V2a1 1 0 0 1 1-1h5.5v2z"/></svg>`,
view: `<svg width="16" height="16" viewBox="0 0 16 16"><path d="M4 0a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h8a2 2 0 0 0 2-2V2a2 2 0 0 0-2-2zm0 1h8a1 1 0 0 1 1 1v12a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1V2a1 1 0 0 1 1-1"/></svg>`,
} }
/**
* @type Map<string, Uploader>
*/
const failUploaders = new Map();
/** /**
* @type Element * @type Element
*/ */
@@ -86,6 +92,10 @@ let $editor;
* @type Element * @type Element
*/ */
let $userBtn; let $userBtn;
/**
* @type Element
*/
let $userName;
function ready() { function ready() {
$pathsTable = document.querySelector(".paths-table") $pathsTable = document.querySelector(".paths-table")
@@ -95,6 +105,7 @@ function ready() {
$emptyFolder = document.querySelector(".empty-folder"); $emptyFolder = document.querySelector(".empty-folder");
$editor = document.querySelector(".editor"); $editor = document.querySelector(".editor");
$userBtn = document.querySelector(".user-btn"); $userBtn = document.querySelector(".user-btn");
$userName = document.querySelector(".user-name");
addBreadcrumb(DATA.href, DATA.uri_prefix); addBreadcrumb(DATA.href, DATA.uri_prefix);
@@ -108,7 +119,12 @@ function ready() {
document.title = `Edit ${DATA.href} - Dufs`; document.title = `Edit ${DATA.href} - Dufs`;
document.querySelector(".editor-page").classList.remove("hidden");; document.querySelector(".editor-page").classList.remove("hidden");;
setupEditPage(); setupEditorPage();
} else if (DATA.kind == "View") {
document.title = `View ${DATA.href} - Dufs`;
document.querySelector(".editor-page").classList.remove("hidden");;
setupEditorPage();
} }
} }
@@ -117,23 +133,24 @@ class Uploader {
/** /**
* *
* @param {File} file * @param {File} file
* @param {string[]} dirs * @param {string[]} pathParts
*/ */
constructor(file, dirs) { constructor(file, pathParts) {
/** /**
* @type Element * @type Element
*/ */
this.$uploadStatus = null this.$uploadStatus = null
this.uploaded = 0; this.uploaded = 0;
this.uploadOffset = 0;
this.lastUptime = 0; this.lastUptime = 0;
this.name = [...dirs, file.name].join("/"); this.name = [...pathParts, file.name].join("/");
this.idx = Uploader.globalIdx++; this.idx = Uploader.globalIdx++;
this.file = file; this.file = file;
this.url = newUrl(this.name);
} }
upload() { upload() {
const { idx, name } = this; const { idx, name, url } = this;
const url = newUrl(name);
const encodedName = encodedStr(name); const encodedName = encodedStr(name);
$uploadersTable.insertAdjacentHTML("beforeend", ` $uploadersTable.insertAdjacentHTML("beforeend", `
<tr id="upload${idx}" class="uploader"> <tr id="upload${idx}" class="uploader">
@@ -149,13 +166,25 @@ class Uploader {
$emptyFolder.classList.add("hidden"); $emptyFolder.classList.add("hidden");
this.$uploadStatus = document.getElementById(`uploadStatus${idx}`); this.$uploadStatus = document.getElementById(`uploadStatus${idx}`);
this.$uploadStatus.innerHTML = '-'; this.$uploadStatus.innerHTML = '-';
this.$uploadStatus.addEventListener("click", e => {
const nodeId = e.target.id;
const matches = /^retry(\d+)$/.exec(nodeId);
if (matches) {
const id = parseInt(matches[1]);
let uploader = failUploaders.get(id);
if (uploader) uploader.retry();
}
});
Uploader.queues.push(this); Uploader.queues.push(this);
Uploader.runQueue(); Uploader.runQueue();
} }
ajax() { ajax() {
const url = newUrl(this.name); const { url } = this;
this.uploaded = 0;
this.lastUptime = Date.now(); this.lastUptime = Date.now();
const ajax = new XMLHttpRequest(); const ajax = new XMLHttpRequest();
ajax.upload.addEventListener("progress", e => this.progress(e), false); ajax.upload.addEventListener("progress", e => this.progress(e), false);
ajax.addEventListener("readystatechange", () => { ajax.addEventListener("readystatechange", () => {
@@ -163,37 +192,64 @@ class Uploader {
if (ajax.status >= 200 && ajax.status < 300) { if (ajax.status >= 200 && ajax.status < 300) {
this.complete(); this.complete();
} else { } else {
this.fail(); if (ajax.status != 0) {
this.fail(`${ajax.status} ${ajax.statusText}`);
}
} }
} }
}) })
ajax.addEventListener("error", () => this.fail(), false); ajax.addEventListener("error", () => this.fail(), false);
ajax.addEventListener("abort", () => this.fail(), false); ajax.addEventListener("abort", () => this.fail(), false);
ajax.open("PUT", url); if (this.uploadOffset > 0) {
ajax.send(this.file); ajax.open("PATCH", url);
ajax.setRequestHeader("X-Update-Range", "append");
ajax.send(this.file.slice(this.uploadOffset));
} else {
ajax.open("PUT", url);
ajax.send(this.file);
// setTimeout(() => ajax.abort(), 3000);
}
} }
async retry() {
const { url } = this;
let res = await fetch(url, {
method: "HEAD",
});
let uploadOffset = 0;
if (res.status == 200) {
let value = res.headers.get("content-length");
uploadOffset = parseInt(value) || 0;
}
this.uploadOffset = uploadOffset;
this.ajax()
}
progress(event) { progress(event) {
const now = Date.now(); const now = Date.now();
const speed = (event.loaded - this.uploaded) / (now - this.lastUptime) * 1000; const speed = (event.loaded - this.uploaded) / (now - this.lastUptime) * 1000;
const [speedValue, speedUnit] = formatSize(speed); const [speedValue, speedUnit] = formatSize(speed);
const speedText = `${speedValue}${speedUnit.toLowerCase()}/s`; const speedText = `${speedValue} ${speedUnit}/s`;
const progress = formatPercent((event.loaded / event.total) * 100); const progress = formatPercent(((event.loaded + this.uploadOffset) / this.file.size) * 100);
const duration = formatDuration((event.total - event.loaded) / speed) const duration = formatDuration((event.total - event.loaded) / speed)
this.$uploadStatus.innerHTML = `<span>${speedText}</span><span>${progress}</span><span>${duration}</span>`; this.$uploadStatus.innerHTML = `<span style="width: 80px;">${speedText}</span><span>${progress} ${duration}</span>`;
this.uploaded = event.loaded; this.uploaded = event.loaded;
this.lastUptime = now; this.lastUptime = now;
} }
complete() { complete() {
this.$uploadStatus.innerHTML = ``; const $uploadStatusNew = this.$uploadStatus.cloneNode(true);
$uploadStatusNew.innerHTML = ``;
this.$uploadStatus.parentNode.replaceChild($uploadStatusNew, this.$uploadStatus);
this.$uploadStatus = null;
failUploaders.delete(this.idx);
Uploader.runnings--; Uploader.runnings--;
Uploader.runQueue(); Uploader.runQueue();
} }
fail() { fail(reason = "") {
this.$uploadStatus.innerHTML = ``; this.$uploadStatus.innerHTML = `<span style="width: 20px;" title="${reason}">✗</span><span class="retry-btn" id="retry${this.idx}" title="Retry">↻</span>`;
failUploaders.set(this.idx, this);
Uploader.runnings--; Uploader.runnings--;
Uploader.runQueue(); Uploader.runQueue();
} }
@@ -252,7 +308,7 @@ function addBreadcrumb(href, uri_prefix) {
} }
const encodedName = encodedStr(name); const encodedName = encodedStr(name);
if (i === 0) { if (i === 0) {
$breadcrumb.insertAdjacentHTML("beforeend", `<a href="${path}"><svg width="16" height="16" viewBox="0 0 16 16"><path d="M6.5 14.5v-3.505c0-.245.25-.495.5-.495h2c.25 0 .5.25.5.5v3.5a.5.5 0 0 0 .5.5h4a.5.5 0 0 0 .5-.5v-7a.5.5 0 0 0-.146-.354L13 5.793V2.5a.5.5 0 0 0-.5-.5h-1a.5.5 0 0 0-.5.5v1.293L8.354 1.146a.5.5 0 0 0-.708 0l-6 6A.5.5 0 0 0 1.5 7.5v7a.5.5 0 0 0 .5.5h4a.5.5 0 0 0 .5-.5z"/></svg></a>`); $breadcrumb.insertAdjacentHTML("beforeend", `<a href="${path}" title="Root"><svg width="16" height="16" viewBox="0 0 16 16"><path d="M6.5 14.5v-3.505c0-.245.25-.495.5-.495h2c.25 0 .5.25.5.5v3.5a.5.5 0 0 0 .5.5h4a.5.5 0 0 0 .5-.5v-7a.5.5 0 0 0-.146-.354L13 5.793V2.5a.5.5 0 0 0-.5-.5h-1a.5.5 0 0 0-.5.5v1.293L8.354 1.146a.5.5 0 0 0-.708 0l-6 6A.5.5 0 0 0 1.5 7.5v7a.5.5 0 0 0 .5.5h4a.5.5 0 0 0 .5-.5z"/></svg></a>`);
} else if (i === len - 1) { } else if (i === len - 1) {
$breadcrumb.insertAdjacentHTML("beforeend", `<b>${encodedName}</b>`); $breadcrumb.insertAdjacentHTML("beforeend", `<b>${encodedName}</b>`);
} else { } else {
@@ -316,13 +372,13 @@ function renderPathsTableHead() {
<tr> <tr>
${headerItems.map(item => { ${headerItems.map(item => {
let svg = `<svg width="12" height="12" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M11.5 15a.5.5 0 0 0 .5-.5V2.707l3.146 3.147a.5.5 0 0 0 .708-.708l-4-4a.5.5 0 0 0-.708 0l-4 4a.5.5 0 1 0 .708.708L11 2.707V14.5a.5.5 0 0 0 .5.5zm-7-14a.5.5 0 0 1 .5.5v11.793l3.146-3.147a.5.5 0 0 1 .708.708l-4 4a.5.5 0 0 1-.708 0l-4-4a.5.5 0 0 1 .708-.708L4 13.293V1.5a.5.5 0 0 1 .5-.5z"/></svg>`; let svg = `<svg width="12" height="12" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M11.5 15a.5.5 0 0 0 .5-.5V2.707l3.146 3.147a.5.5 0 0 0 .708-.708l-4-4a.5.5 0 0 0-.708 0l-4 4a.5.5 0 1 0 .708.708L11 2.707V14.5a.5.5 0 0 0 .5.5zm-7-14a.5.5 0 0 1 .5.5v11.793l3.146-3.147a.5.5 0 0 1 .708.708l-4 4a.5.5 0 0 1-.708 0l-4-4a.5.5 0 0 1 .708-.708L4 13.293V1.5a.5.5 0 0 1 .5-.5z"/></svg>`;
let order = "asc"; let order = "desc";
if (PARAMS.sort === item.name) { if (PARAMS.sort === item.name) {
if (PARAMS.order === "asc") { if (PARAMS.order === "desc") {
order = "desc"; order = "asc";
svg = `<svg width="12" height="12" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M8 15a.5.5 0 0 0 .5-.5V2.707l3.146 3.147a.5.5 0 0 0 .708-.708l-4-4a.5.5 0 0 0-.708 0l-4 4a.5.5 0 1 0 .708.708L7.5 2.707V14.5a.5.5 0 0 0 .5.5z"/></svg>`
} else {
svg = `<svg width="12" height="12" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M8 1a.5.5 0 0 1 .5.5v11.793l3.146-3.147a.5.5 0 0 1 .708.708l-4 4a.5.5 0 0 1-.708 0l-4-4a.5.5 0 0 1 .708-.708L7.5 13.293V1.5A.5.5 0 0 1 8 1z"/></svg>` svg = `<svg width="12" height="12" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M8 1a.5.5 0 0 1 .5.5v11.793l3.146-3.147a.5.5 0 0 1 .708.708l-4 4a.5.5 0 0 1-.708 0l-4-4a.5.5 0 0 1 .708-.708L7.5 13.293V1.5A.5.5 0 0 1 8 1z"/></svg>`
} else {
svg = `<svg width="12" height="12" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M8 15a.5.5 0 0 0 .5-.5V2.707l3.146 3.147a.5.5 0 0 0 .708-.708l-4-4a.5.5 0 0 0-.708 0l-4 4a.5.5 0 1 0 .708.708L7.5 2.707V14.5a.5.5 0 0 0 .5.5z"/></svg>`
} }
} }
const qs = new URLSearchParams({ ...PARAMS, order, sort: item.name }).toString(); const qs = new URLSearchParams({ ...PARAMS, order, sort: item.name }).toString();
@@ -364,6 +420,7 @@ function addPath(file, index) {
let actionDownload = ""; let actionDownload = "";
let actionMove = ""; let actionMove = "";
let actionEdit = ""; let actionEdit = "";
let actionView = "";
let isDir = file.path_type.endsWith("Dir"); let isDir = file.path_type.endsWith("Dir");
if (isDir) { if (isDir) {
url += "/"; url += "/";
@@ -389,9 +446,13 @@ function addPath(file, index) {
actionDelete = ` actionDelete = `
<div onclick="deletePath(${index})" class="action-btn" id="deleteBtn${index}" title="Delete">${ICONS.delete}</div>`; <div onclick="deletePath(${index})" class="action-btn" id="deleteBtn${index}" title="Delete">${ICONS.delete}</div>`;
} }
if (!actionEdit && !isDir) {
actionView = `<a class="action-btn" title="View file" target="_blank" href="${url}?view">${ICONS.view}</a>`;
}
let actionCell = ` let actionCell = `
<td class="cell-actions"> <td class="cell-actions">
${actionDownload} ${actionDownload}
${actionView}
${actionMove} ${actionMove}
${actionDelete} ${actionDelete}
${actionEdit} ${actionEdit}
@@ -438,7 +499,7 @@ function setupDropzone() {
function setupAuth() { function setupAuth() {
if (DATA.user) { if (DATA.user) {
$userBtn.classList.remove("hidden"); $userBtn.classList.remove("hidden");
$userBtn.title = DATA.user; $userName.textContent = DATA.user;
} else { } else {
const $loginBtn = document.querySelector(".login-btn"); const $loginBtn = document.querySelector(".login-btn");
$loginBtn.classList.remove("hidden"); $loginBtn.classList.remove("hidden");
@@ -499,32 +560,40 @@ function setupNewFile() {
}); });
} }
async function setupEditPage() { async function setupEditorPage() {
const url = baseUrl(); const url = baseUrl();
const $download = document.querySelector(".download"); const $download = document.querySelector(".download");
$download.classList.remove("hidden"); $download.classList.remove("hidden");
$download.href = url; $download.href = url;
const $moveFile = document.querySelector(".move-file"); if (DATA.kind == "Edit") {
$moveFile.classList.remove("hidden"); const $moveFile = document.querySelector(".move-file");
$moveFile.addEventListener("click", async () => { $moveFile.classList.remove("hidden");
const query = location.href.slice(url.length); $moveFile.addEventListener("click", async () => {
const newFileUrl = await doMovePath(url); const query = location.href.slice(url.length);
if (newFileUrl) { const newFileUrl = await doMovePath(url);
location.href = newFileUrl + query; if (newFileUrl) {
} location.href = newFileUrl + query;
}); }
const $deleteFile = document.querySelector(".delete-file");
$deleteFile.classList.remove("hidden");
$deleteFile.addEventListener("click", async () => {
const url = baseUrl();
const name = baseName(url);
await doDeletePath(name, url, () => {
location.href = location.href.split("/").slice(0, -1).join("/");
}); });
})
const $deleteFile = document.querySelector(".delete-file");
$deleteFile.classList.remove("hidden");
$deleteFile.addEventListener("click", async () => {
const url = baseUrl();
const name = baseName(url);
await doDeletePath(name, url, () => {
location.href = location.href.split("/").slice(0, -1).join("/");
});
})
const $saveBtn = document.querySelector(".save-btn");
$saveBtn.classList.remove("hidden");
$saveBtn.addEventListener("click", saveChange);
} else if (DATA.kind == "View") {
$editor.readonly = true;
}
if (!DATA.editable) { if (!DATA.editable) {
const $notEditable = document.querySelector(".not-editable"); const $notEditable = document.querySelector(".not-editable");
@@ -539,10 +608,6 @@ async function setupEditPage() {
return; return;
} }
const $saveBtn = document.querySelector(".save-btn");
$saveBtn.classList.remove("hidden");
$saveBtn.addEventListener("click", saveChange);
$editor.classList.remove("hidden"); $editor.classList.remove("hidden");
try { try {
const res = await fetch(baseUrl()); const res = await fetch(baseUrl());
@@ -669,7 +734,7 @@ async function checkAuth() {
await assertResOK(res); await assertResOK(res);
document.querySelector(".login-btn").classList.add("hidden"); document.querySelector(".login-btn").classList.add("hidden");
$userBtn.classList.remove("hidden"); $userBtn.classList.remove("hidden");
$userBtn.title = ""; $userName.textContent = "";
} }
/** /**
@@ -712,8 +777,16 @@ async function addFileEntries(entries, dirs) {
new Uploader(file, dirs).upload(); new Uploader(file, dirs).upload();
}); });
} else if (entry.isDirectory) { } else if (entry.isDirectory) {
const dirReader = entry.createReader() const dirReader = entry.createReader();
dirReader.readEntries(entries => addFileEntries(entries, [...dirs, entry.name]));
const successCallback = entries => {
if (entries.length > 0) {
addFileEntries(entries, [...dirs, entry.name]);
dirReader.readEntries(successCallback);
}
};
dirReader.readEntries(successCallback);
} }
} }
} }
@@ -773,7 +846,7 @@ function padZero(value, size) {
} }
function formatSize(size) { function formatSize(size) {
if (size == null) return [] if (size == null) return [0, "B"]
const sizes = ['B', 'KB', 'MB', 'GB', 'TB']; const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
if (size == 0) return [0, "B"]; if (size == 0) return [0, "B"];
const i = parseInt(Math.floor(Math.log(size) / Math.log(1024))); const i = parseInt(Math.floor(Math.log(size) / Math.log(1024)));

View File

@@ -1,18 +1,16 @@
use anyhow::{bail, Context, Result}; use anyhow::{bail, Context, Result};
use clap::builder::PossibleValuesParser; use async_zip::Compression;
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command}; use clap::builder::{PossibleValue, PossibleValuesParser};
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command, ValueEnum};
use clap_complete::{generate, Generator, Shell}; use clap_complete::{generate, Generator, Shell};
#[cfg(feature = "tls")] use serde::{Deserialize, Deserializer};
use rustls::{Certificate, PrivateKey}; use smart_default::SmartDefault;
use std::env; use std::env;
use std::net::IpAddr; use std::net::IpAddr;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use crate::auth::AccessControl; use crate::auth::AccessControl;
use crate::auth::AuthMethod; use crate::http_logger::HttpLogger;
use crate::log_http::{LogHttp, DEFAULT_LOG_FORMAT};
#[cfg(feature = "tls")]
use crate::tls::{load_certs, load_private_key};
use crate::utils::encode_uri; use crate::utils::encode_uri;
pub fn build_cli() -> Command { pub fn build_cli() -> Command {
@@ -25,12 +23,21 @@ pub fn build_cli() -> Command {
env!("CARGO_PKG_REPOSITORY") env!("CARGO_PKG_REPOSITORY")
)) ))
.arg( .arg(
Arg::new("serve_path") Arg::new("serve-path")
.env("DUFS_SERVE_PATH") .env("DUFS_SERVE_PATH")
.hide_env(true) .hide_env(true)
.default_value(".")
.value_parser(value_parser!(PathBuf)) .value_parser(value_parser!(PathBuf))
.help("Specific path to serve"), .help("Specific path to serve [default: .]"),
)
.arg(
Arg::new("config")
.env("DUFS_CONFIG")
.hide_env(true)
.short('c')
.long("config")
.value_parser(value_parser!(PathBuf))
.help("Specify configuration file")
.value_name("file"),
) )
.arg( .arg(
Arg::new("bind") Arg::new("bind")
@@ -49,9 +56,8 @@ pub fn build_cli() -> Command {
.hide_env(true) .hide_env(true)
.short('p') .short('p')
.long("port") .long("port")
.default_value("5000")
.value_parser(value_parser!(u16)) .value_parser(value_parser!(u16))
.help("Specify port to listen on") .help("Specify port to listen on [default: 5000]")
.value_name("port"), .value_name("port"),
) )
.arg( .arg(
@@ -67,7 +73,9 @@ pub fn build_cli() -> Command {
.env("DUFS_HIDDEN") .env("DUFS_HIDDEN")
.hide_env(true) .hide_env(true)
.long("hidden") .long("hidden")
.help("Hide paths from directory listings, separated by `,`") .action(ArgAction::Append)
.value_delimiter(',')
.help("Hide paths from directory listings, e.g. tmp,*.log,*.lock")
.value_name("value"), .value_name("value"),
) )
.arg( .arg(
@@ -76,13 +84,13 @@ pub fn build_cli() -> Command {
.hide_env(true) .hide_env(true)
.short('a') .short('a')
.long("auth") .long("auth")
.help("Add auth role") .help("Add auth roles, e.g. user:pass@/dir1:rw,/dir2")
.action(ArgAction::Append) .action(ArgAction::Append)
.value_delimiter('|')
.value_name("rules"), .value_name("rules"),
) )
.arg( .arg(
Arg::new("auth-method") Arg::new("auth-method")
.hide(true)
.env("DUFS_AUTH_METHOD") .env("DUFS_AUTH_METHOD")
.hide_env(true) .hide_env(true)
.long("auth-method") .long("auth-method")
@@ -177,9 +185,33 @@ pub fn build_cli() -> Command {
.env("DUFS_ASSETS") .env("DUFS_ASSETS")
.hide_env(true) .hide_env(true)
.long("assets") .long("assets")
.help("Use custom assets to override builtin assets") .help("Set the path to the assets directory for overriding the built-in assets")
.value_parser(value_parser!(PathBuf)) .value_parser(value_parser!(PathBuf))
.value_name("path") .value_name("path")
)
.arg(
Arg::new("log-format")
.env("DUFS_LOG_FORMAT")
.hide_env(true)
.long("log-format")
.value_name("format")
.help("Customize http log format"),
)
.arg(
Arg::new("compress")
.env("DUFS_COMPRESS")
.hide_env(true)
.value_parser(clap::builder::EnumValueParser::<Compress>::new())
.long("compress")
.value_name("level")
.help("Set zip compress level [default: low]")
)
.arg(
Arg::new("completions")
.long("completions")
.value_name("shell")
.value_parser(value_parser!(Shell))
.help("Print shell completion script for <shell>"),
); );
#[cfg(feature = "tls")] #[cfg(feature = "tls")]
@@ -203,38 +235,38 @@ pub fn build_cli() -> Command {
.help("Path to the SSL/TLS certificate's private key"), .help("Path to the SSL/TLS certificate's private key"),
); );
app.arg( app
Arg::new("log-format")
.env("DUFS_LOG_FORMAT")
.hide_env(true)
.long("log-format")
.value_name("format")
.help("Customize http log format"),
)
.arg(
Arg::new("completions")
.long("completions")
.value_name("shell")
.value_parser(value_parser!(Shell))
.help("Print shell completion script for <shell>"),
)
} }
pub fn print_completions<G: Generator>(gen: G, cmd: &mut Command) { pub fn print_completions<G: Generator>(gen: G, cmd: &mut Command) {
generate(gen, cmd, cmd.get_name().to_string(), &mut std::io::stdout()); generate(gen, cmd, cmd.get_name().to_string(), &mut std::io::stdout());
} }
#[derive(Debug)] #[derive(Debug, Deserialize, SmartDefault, PartialEq)]
#[serde(default)]
#[serde(rename_all = "kebab-case")]
pub struct Args { pub struct Args {
#[serde(default = "default_serve_path")]
#[default(default_serve_path())]
pub serve_path: PathBuf,
#[serde(deserialize_with = "deserialize_bind_addrs")]
#[serde(rename = "bind")]
#[serde(default = "default_addrs")]
#[default(default_addrs())]
pub addrs: Vec<BindAddr>, pub addrs: Vec<BindAddr>,
#[serde(default = "default_port")]
#[default(default_port())]
pub port: u16, pub port: u16,
pub path: PathBuf, #[serde(skip)]
pub path_is_file: bool, pub path_is_file: bool,
pub path_prefix: String, pub path_prefix: String,
#[serde(skip)]
pub uri_prefix: String, pub uri_prefix: String,
#[serde(deserialize_with = "deserialize_string_or_vec")]
pub hidden: Vec<String>, pub hidden: Vec<String>,
pub auth_method: AuthMethod, #[serde(deserialize_with = "deserialize_access_control")]
pub auth: AccessControl, pub auth: AccessControl,
pub allow_all: bool,
pub allow_upload: bool, pub allow_upload: bool,
pub allow_delete: bool, pub allow_delete: bool,
pub allow_search: bool, pub allow_search: bool,
@@ -244,12 +276,13 @@ pub struct Args {
pub render_spa: bool, pub render_spa: bool,
pub render_try_index: bool, pub render_try_index: bool,
pub enable_cors: bool, pub enable_cors: bool,
pub assets_path: Option<PathBuf>, pub assets: Option<PathBuf>,
pub log_http: LogHttp, #[serde(deserialize_with = "deserialize_log_http")]
#[cfg(feature = "tls")] #[serde(rename = "log-format")]
pub tls: Option<(Vec<Certificate>, PrivateKey)>, pub http_logger: HttpLogger,
#[cfg(not(feature = "tls"))] pub compress: Compress,
pub tls: Option<()>, pub tls_cert: Option<PathBuf>,
pub tls_key: Option<PathBuf>,
} }
impl Args { impl Args {
@@ -258,95 +291,168 @@ impl Args {
/// If a parsing error occurred, exit the process and print out informative /// If a parsing error occurred, exit the process and print out informative
/// error message to user. /// error message to user.
pub fn parse(matches: ArgMatches) -> Result<Args> { pub fn parse(matches: ArgMatches) -> Result<Args> {
let port = *matches.get_one::<u16>("port").unwrap(); let mut args = Self::default();
let addrs = matches
.get_many::<String>("bind") if let Some(config_path) = matches.get_one::<PathBuf>("config") {
.map(|bind| bind.map(|v| v.as_str()).collect()) let contents = std::fs::read_to_string(config_path)
.unwrap_or_else(|| vec!["0.0.0.0", "::"]); .with_context(|| format!("Failed to read config at {}", config_path.display()))?;
let addrs: Vec<BindAddr> = Args::parse_addrs(&addrs)?; args = serde_yaml::from_str(&contents)
let path = Args::parse_path(matches.get_one::<PathBuf>("serve_path").unwrap())?; .with_context(|| format!("Failed to load config at {}", config_path.display()))?;
let path_is_file = path.metadata()?.is_file(); }
let path_prefix = matches
.get_one::<String>("path-prefix") if let Some(path) = matches.get_one::<PathBuf>("serve-path") {
.map(|v| v.trim_matches('/').to_owned()) args.serve_path = path.clone()
.unwrap_or_default(); }
let uri_prefix = if path_prefix.is_empty() {
args.serve_path = Self::sanitize_path(args.serve_path)?;
if let Some(port) = matches.get_one::<u16>("port") {
args.port = *port
}
if let Some(addrs) = matches.get_many::<String>("bind") {
let addrs: Vec<_> = addrs.map(|v| v.as_str()).collect();
args.addrs = BindAddr::parse_addrs(&addrs)?;
}
args.path_is_file = args.serve_path.metadata()?.is_file();
if let Some(path_prefix) = matches.get_one::<String>("path-prefix") {
args.path_prefix = path_prefix.clone();
}
args.path_prefix = args.path_prefix.trim_matches('/').to_string();
args.uri_prefix = if args.path_prefix.is_empty() {
"/".to_owned() "/".to_owned()
} else { } else {
format!("/{}/", &encode_uri(&path_prefix)) format!("/{}/", &encode_uri(&args.path_prefix))
};
let hidden: Vec<String> = matches
.get_one::<String>("hidden")
.map(|v| v.split(',').map(|x| x.to_string()).collect())
.unwrap_or_default();
let enable_cors = matches.get_flag("enable-cors");
let auth: Vec<&str> = matches
.get_many::<String>("auth")
.map(|auth| auth.map(|v| v.as_str()).collect())
.unwrap_or_default();
let auth_method = match matches.get_one::<String>("auth-method").unwrap().as_str() {
"basic" => AuthMethod::Basic,
_ => AuthMethod::Digest,
};
let auth = AccessControl::new(&auth)?;
let allow_upload = matches.get_flag("allow-all") || matches.get_flag("allow-upload");
let allow_delete = matches.get_flag("allow-all") || matches.get_flag("allow-delete");
let allow_search = matches.get_flag("allow-all") || matches.get_flag("allow-search");
let allow_symlink = matches.get_flag("allow-all") || matches.get_flag("allow-symlink");
let allow_archive = matches.get_flag("allow-all") || matches.get_flag("allow-archive");
let render_index = matches.get_flag("render-index");
let render_try_index = matches.get_flag("render-try-index");
let render_spa = matches.get_flag("render-spa");
#[cfg(feature = "tls")]
let tls = match (
matches.get_one::<PathBuf>("tls-cert"),
matches.get_one::<PathBuf>("tls-key"),
) {
(Some(certs_file), Some(key_file)) => {
let certs = load_certs(certs_file)?;
let key = load_private_key(key_file)?;
Some((certs, key))
}
_ => None,
};
#[cfg(not(feature = "tls"))]
let tls = None;
let log_http: LogHttp = matches
.get_one::<String>("log-format")
.map(|v| v.as_str())
.unwrap_or(DEFAULT_LOG_FORMAT)
.parse()?;
let assets_path = match matches.get_one::<PathBuf>("assets") {
Some(v) => Some(Args::parse_assets_path(v)?),
None => None,
}; };
Ok(Args { if let Some(hidden) = matches.get_many::<String>("hidden") {
addrs, args.hidden = hidden.cloned().collect();
port, } else {
path, let mut hidden = vec![];
path_is_file, std::mem::swap(&mut args.hidden, &mut hidden);
path_prefix, args.hidden = hidden
uri_prefix, .into_iter()
hidden, .flat_map(|v| v.split(',').map(|v| v.to_string()).collect::<Vec<String>>())
auth_method, .collect();
auth, }
enable_cors,
allow_delete, if !args.enable_cors {
allow_upload, args.enable_cors = matches.get_flag("enable-cors");
allow_search, }
allow_symlink,
allow_archive, if let Some(rules) = matches.get_many::<String>("auth") {
render_index, let rules: Vec<_> = rules.map(|v| v.as_str()).collect();
render_try_index, args.auth = AccessControl::new(&rules)?;
render_spa, }
tls,
log_http, if !args.allow_all {
assets_path, args.allow_all = matches.get_flag("allow-all");
}) }
let allow_all = args.allow_all;
if !args.allow_upload {
args.allow_upload = allow_all || matches.get_flag("allow-upload");
}
if !args.allow_delete {
args.allow_delete = allow_all || matches.get_flag("allow-delete");
}
if !args.allow_search {
args.allow_search = allow_all || matches.get_flag("allow-search");
}
if !args.allow_symlink {
args.allow_symlink = allow_all || matches.get_flag("allow-symlink");
}
if !args.allow_archive {
args.allow_archive = allow_all || matches.get_flag("allow-archive");
}
if !args.render_index {
args.render_index = matches.get_flag("render-index");
}
if !args.render_try_index {
args.render_try_index = matches.get_flag("render-try-index");
}
if !args.render_spa {
args.render_spa = matches.get_flag("render-spa");
}
if let Some(assets_path) = matches.get_one::<PathBuf>("assets") {
args.assets = Some(assets_path.clone());
}
if let Some(assets_path) = &args.assets {
args.assets = Some(Args::sanitize_assets_path(assets_path)?);
}
if let Some(log_format) = matches.get_one::<String>("log-format") {
args.http_logger = log_format.parse()?;
}
if let Some(compress) = matches.get_one::<Compress>("compress") {
args.compress = *compress;
}
#[cfg(feature = "tls")]
{
if let Some(tls_cert) = matches.get_one::<PathBuf>("tls-cert") {
args.tls_cert = Some(tls_cert.clone())
}
if let Some(tls_key) = matches.get_one::<PathBuf>("tls-key") {
args.tls_key = Some(tls_key.clone())
}
match (&args.tls_cert, &args.tls_key) {
(Some(_), Some(_)) => {}
(Some(_), _) => bail!("No tls-key set"),
(_, Some(_)) => bail!("No tls-cert set"),
(None, None) => {}
}
}
#[cfg(not(feature = "tls"))]
{
args.tls_cert = None;
args.tls_key = None;
}
Ok(args)
} }
fn parse_addrs(addrs: &[&str]) -> Result<Vec<BindAddr>> { fn sanitize_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
let path = path.as_ref();
if !path.exists() {
bail!("Path `{}` doesn't exist", path.display());
}
env::current_dir()
.and_then(|mut p| {
p.push(path); // If path is absolute, it replaces the current path.
std::fs::canonicalize(p)
})
.with_context(|| format!("Failed to access path `{}`", path.display()))
}
fn sanitize_assets_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
let path = Self::sanitize_path(path)?;
if !path.join("index.html").exists() {
bail!("Path `{}` doesn't contains index.html", path.display());
}
Ok(path)
}
}
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub enum BindAddr {
Address(IpAddr),
Path(PathBuf),
}
impl BindAddr {
fn parse_addrs(addrs: &[&str]) -> Result<Vec<Self>> {
let mut bind_addrs = vec![]; let mut bind_addrs = vec![];
let mut invalid_addrs = vec![]; let mut invalid_addrs = vec![];
for addr in addrs { for addr in addrs {
@@ -368,32 +474,262 @@ impl Args {
} }
Ok(bind_addrs) Ok(bind_addrs)
} }
}
fn parse_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> { #[derive(Debug, Clone, Copy, PartialEq, Deserialize)]
let path = path.as_ref(); #[serde(rename_all = "lowercase")]
if !path.exists() { pub enum Compress {
bail!("Path `{}` doesn't exist", path.display()); None,
} Low,
Medium,
High,
}
env::current_dir() impl Default for Compress {
.and_then(|mut p| { fn default() -> Self {
p.push(path); // If path is absolute, it replaces the current path. Self::Low
std::fs::canonicalize(p)
})
.with_context(|| format!("Failed to access path `{}`", path.display()))
}
fn parse_assets_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
let path = Self::parse_path(path)?;
if !path.join("index.html").exists() {
bail!("Path `{}` doesn't contains index.html", path.display());
}
Ok(path)
} }
} }
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] impl ValueEnum for Compress {
pub enum BindAddr { fn value_variants<'a>() -> &'a [Self] {
Address(IpAddr), &[Self::None, Self::Low, Self::Medium, Self::High]
Path(PathBuf), }
fn to_possible_value(&self) -> Option<clap::builder::PossibleValue> {
Some(match self {
Compress::None => PossibleValue::new("none"),
Compress::Low => PossibleValue::new("low"),
Compress::Medium => PossibleValue::new("medium"),
Compress::High => PossibleValue::new("high"),
})
}
}
impl Compress {
pub fn to_compression(self) -> Compression {
match self {
Compress::None => Compression::Stored,
Compress::Low => Compression::Deflate,
Compress::Medium => Compression::Bz,
Compress::High => Compression::Xz,
}
}
}
fn deserialize_bind_addrs<'de, D>(deserializer: D) -> Result<Vec<BindAddr>, D::Error>
where
D: Deserializer<'de>,
{
struct StringOrVec;
impl<'de> serde::de::Visitor<'de> for StringOrVec {
type Value = Vec<BindAddr>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("string or list of strings")
}
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
BindAddr::parse_addrs(&[s]).map_err(serde::de::Error::custom)
}
fn visit_seq<S>(self, seq: S) -> Result<Self::Value, S::Error>
where
S: serde::de::SeqAccess<'de>,
{
let addrs: Vec<&'de str> =
Deserialize::deserialize(serde::de::value::SeqAccessDeserializer::new(seq))?;
BindAddr::parse_addrs(&addrs).map_err(serde::de::Error::custom)
}
}
deserializer.deserialize_any(StringOrVec)
}
fn deserialize_string_or_vec<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
where
D: Deserializer<'de>,
{
struct StringOrVec;
impl<'de> serde::de::Visitor<'de> for StringOrVec {
type Value = Vec<String>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("string or list of strings")
}
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(vec![s.to_owned()])
}
fn visit_seq<S>(self, seq: S) -> Result<Self::Value, S::Error>
where
S: serde::de::SeqAccess<'de>,
{
Deserialize::deserialize(serde::de::value::SeqAccessDeserializer::new(seq))
}
}
deserializer.deserialize_any(StringOrVec)
}
fn deserialize_access_control<'de, D>(deserializer: D) -> Result<AccessControl, D::Error>
where
D: Deserializer<'de>,
{
let rules: Vec<&str> = Vec::deserialize(deserializer)?;
AccessControl::new(&rules).map_err(serde::de::Error::custom)
}
fn deserialize_log_http<'de, D>(deserializer: D) -> Result<HttpLogger, D::Error>
where
D: Deserializer<'de>,
{
let value: String = Deserialize::deserialize(deserializer)?;
value.parse().map_err(serde::de::Error::custom)
}
fn default_serve_path() -> PathBuf {
PathBuf::from(".")
}
fn default_addrs() -> Vec<BindAddr> {
BindAddr::parse_addrs(&["0.0.0.0", "::"]).unwrap()
}
fn default_port() -> u16 {
5000
}
#[cfg(test)]
mod tests {
use super::*;
use assert_fs::prelude::*;
#[test]
fn test_default() {
let cli = build_cli();
let matches = cli.try_get_matches_from(vec![""]).unwrap();
let args = Args::parse(matches).unwrap();
let cwd = Args::sanitize_path(std::env::current_dir().unwrap()).unwrap();
assert_eq!(args.serve_path, cwd);
assert_eq!(args.port, default_port());
assert_eq!(args.addrs, default_addrs());
}
#[test]
fn test_args_from_cli1() {
let tmpdir = assert_fs::TempDir::new().unwrap();
let cli = build_cli();
let matches = cli
.try_get_matches_from(vec![
"",
"--hidden",
"tmp,*.log,*.lock",
&tmpdir.to_string_lossy(),
])
.unwrap();
let args = Args::parse(matches).unwrap();
assert_eq!(args.serve_path, Args::sanitize_path(&tmpdir).unwrap());
assert_eq!(args.hidden, ["tmp", "*.log", "*.lock"]);
}
#[test]
fn test_args_from_cli2() {
let cli = build_cli();
let matches = cli
.try_get_matches_from(vec![
"", "--hidden", "tmp", "--hidden", "*.log", "--hidden", "*.lock",
])
.unwrap();
let args = Args::parse(matches).unwrap();
assert_eq!(args.hidden, ["tmp", "*.log", "*.lock"]);
}
#[test]
fn test_args_from_empty_config_file() {
let tmpdir = assert_fs::TempDir::new().unwrap();
let config_file = tmpdir.child("config.yaml");
config_file.write_str("").unwrap();
let cli = build_cli();
let matches = cli
.try_get_matches_from(vec!["", "-c", &config_file.to_string_lossy()])
.unwrap();
let args = Args::parse(matches).unwrap();
let cwd = Args::sanitize_path(std::env::current_dir().unwrap()).unwrap();
assert_eq!(args.serve_path, cwd);
assert_eq!(args.port, default_port());
assert_eq!(args.addrs, default_addrs());
}
#[test]
fn test_args_from_config_file1() {
let tmpdir = assert_fs::TempDir::new().unwrap();
let config_file = tmpdir.child("config.yaml");
let contents = format!(
r#"
serve-path: {}
bind: 0.0.0.0
port: 3000
allow-upload: true
hidden: tmp,*.log,*.lock
"#,
tmpdir.display()
);
config_file.write_str(&contents).unwrap();
let cli = build_cli();
let matches = cli
.try_get_matches_from(vec!["", "-c", &config_file.to_string_lossy()])
.unwrap();
let args = Args::parse(matches).unwrap();
assert_eq!(args.serve_path, Args::sanitize_path(&tmpdir).unwrap());
assert_eq!(
args.addrs,
vec![BindAddr::Address("0.0.0.0".parse().unwrap())]
);
assert_eq!(args.hidden, ["tmp", "*.log", "*.lock"]);
assert_eq!(args.port, 3000);
assert!(args.allow_upload);
}
#[test]
fn test_args_from_config_file2() {
let tmpdir = assert_fs::TempDir::new().unwrap();
let config_file = tmpdir.child("config.yaml");
let contents = r#"
bind:
- 127.0.0.1
- 192.168.8.10
hidden:
- tmp
- '*.log'
- '*.lock'
"#;
config_file.write_str(contents).unwrap();
let cli = build_cli();
let matches = cli
.try_get_matches_from(vec!["", "-c", &config_file.to_string_lossy()])
.unwrap();
let args = Args::parse(matches).unwrap();
assert_eq!(
args.addrs,
vec![
BindAddr::Address("127.0.0.1".parse().unwrap()),
BindAddr::Address("192.168.8.10".parse().unwrap())
]
);
assert_eq!(args.hidden, ["tmp", "*.log", "*.lock"]);
}
} }

View File

@@ -1,7 +1,9 @@
use crate::{args::Args, server::Response, utils::unix_now};
use anyhow::{anyhow, bail, Result}; use anyhow::{anyhow, bail, Result};
use base64::{engine::general_purpose, Engine as _}; use base64::{engine::general_purpose, Engine as _};
use headers::HeaderValue; use headers::HeaderValue;
use hyper::Method; use hyper::{header::WWW_AUTHENTICATE, Method};
use indexmap::IndexMap; use indexmap::IndexMap;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use md5::Context; use md5::Context;
@@ -11,10 +13,8 @@ use std::{
}; };
use uuid::Uuid; use uuid::Uuid;
use crate::utils::unix_now;
const REALM: &str = "DUFS"; const REALM: &str = "DUFS";
const DIGEST_AUTH_TIMEOUT: u32 = 86400; const DIGEST_AUTH_TIMEOUT: u32 = 604800; // 7 days
lazy_static! { lazy_static! {
static ref NONCESTARTHASH: Context = { static ref NONCESTARTHASH: Context = {
@@ -25,59 +25,70 @@ lazy_static! {
}; };
} }
#[derive(Debug, Default)] #[derive(Debug, Clone, PartialEq)]
pub struct AccessControl { pub struct AccessControl {
use_hashed_password: bool,
users: IndexMap<String, (String, AccessPaths)>, users: IndexMap<String, (String, AccessPaths)>,
anony: Option<AccessPaths>, anony: Option<AccessPaths>,
} }
impl Default for AccessControl {
fn default() -> Self {
AccessControl {
use_hashed_password: false,
anony: Some(AccessPaths::new(AccessPerm::ReadWrite)),
users: IndexMap::new(),
}
}
}
impl AccessControl { impl AccessControl {
pub fn new(raw_rules: &[&str]) -> Result<Self> { pub fn new(raw_rules: &[&str]) -> Result<Self> {
if raw_rules.is_empty() { if raw_rules.is_empty() {
return Ok(AccessControl { return Ok(Default::default());
anony: Some(AccessPaths::new(AccessPerm::ReadWrite)), }
users: IndexMap::new(), let new_raw_rules = split_rules(raw_rules);
}); let mut use_hashed_password = false;
let mut annoy_paths = None;
let mut account_paths_pairs = vec![];
for rule in &new_raw_rules {
let (account, paths) =
split_account_paths(rule).ok_or_else(|| anyhow!("Invalid auth `{rule}`"))?;
if account.is_empty() {
if annoy_paths.is_some() {
bail!("Invalid auth, no duplicate anonymous rules");
}
annoy_paths = Some(paths)
} else if let Some((user, pass)) = account.split_once(':') {
if user.is_empty() || pass.is_empty() {
bail!("Invalid auth `{rule}`");
}
account_paths_pairs.push((user, pass, paths));
}
}
let mut anony = None;
if let Some(paths) = annoy_paths {
let mut access_paths = AccessPaths::default();
access_paths.merge(paths);
anony = Some(access_paths);
}
let mut users = IndexMap::new();
for (user, pass, paths) in account_paths_pairs.into_iter() {
let mut access_paths = anony.clone().unwrap_or_default();
access_paths
.merge(paths)
.ok_or_else(|| anyhow!("Invalid auth `{user}:{pass}@{paths}"))?;
if pass.starts_with("$6$") {
use_hashed_password = true;
}
users.insert(user.to_string(), (pass.to_string(), access_paths));
} }
let create_err = |v: &str| anyhow!("Invalid auth `{v}`"); Ok(Self {
let mut anony = None; use_hashed_password,
let mut anony_paths = vec![]; users,
let mut users = IndexMap::new(); anony,
for rule in raw_rules { })
let (user, list) = rule.split_once('@').ok_or_else(|| create_err(rule))?;
if user.is_empty() && anony.is_some() {
bail!("Invalid auth, duplicate anonymous rules");
}
let mut paths = AccessPaths::default();
for value in list.trim_matches(',').split(',') {
let (path, perm) = match value.split_once(':') {
None => (value, AccessPerm::ReadOnly),
Some((path, "rw")) => (path, AccessPerm::ReadWrite),
_ => return Err(create_err(rule)),
};
if user.is_empty() {
anony_paths.push((path, perm));
}
paths.add(path, perm);
}
if user.is_empty() {
anony = Some(paths);
} else if let Some((user, pass)) = user.split_once(':') {
if user.is_empty() || pass.is_empty() {
return Err(create_err(rule));
}
users.insert(user.to_string(), (pass.to_string(), paths));
} else {
return Err(create_err(rule));
}
}
for (path, perm) in anony_paths {
for (_, (_, paths)) in users.iter_mut() {
paths.add(path, perm)
}
}
Ok(Self { users, anony })
} }
pub fn exist(&self) -> bool { pub fn exist(&self) -> bool {
@@ -89,18 +100,14 @@ impl AccessControl {
path: &str, path: &str,
method: &Method, method: &Method,
authorization: Option<&HeaderValue>, authorization: Option<&HeaderValue>,
auth_method: AuthMethod,
) -> (Option<String>, Option<AccessPaths>) { ) -> (Option<String>, Option<AccessPaths>) {
if let Some(authorization) = authorization { if let Some(authorization) = authorization {
if let Some(user) = auth_method.get_user(authorization) { if let Some(user) = get_auth_user(authorization) {
if let Some((pass, paths)) = self.users.get(&user) { if let Some((pass, paths)) = self.users.get(&user) {
if method == Method::OPTIONS { if method == Method::OPTIONS {
return (Some(user), Some(AccessPaths::new(AccessPerm::ReadOnly))); return (Some(user), Some(AccessPaths::new(AccessPerm::ReadOnly)));
} }
if auth_method if check_auth(authorization, method.as_str(), &user, pass).is_some() {
.check(authorization, method.as_str(), &user, pass)
.is_some()
{
return (Some(user), paths.find(path, !is_readonly_method(method))); return (Some(user), paths.find(path, !is_readonly_method(method)));
} else { } else {
return (None, None); return (None, None);
@@ -139,13 +146,27 @@ impl AccessPaths {
self.perm self.perm
} }
fn set_perm(&mut self, perm: AccessPerm) { pub fn set_perm(&mut self, perm: AccessPerm) {
if self.perm < perm { if !perm.inherit() {
self.perm = perm self.perm = perm;
} }
} }
pub fn add(&mut self, path: &str, perm: AccessPerm) { pub fn merge(&mut self, paths: &str) -> Option<()> {
for item in paths.trim_matches(',').split(',') {
let (path, perm) = match item.split_once(':') {
None => (item, AccessPerm::ReadOnly),
Some((path, "ro")) => (path, AccessPerm::ReadOnly),
Some((path, "rw")) => (path, AccessPerm::ReadWrite),
Some((path, "-")) => (path, AccessPerm::Forbidden),
_ => return None,
};
self.add(path, perm);
}
Some(())
}
fn add(&mut self, path: &str, perm: AccessPerm) {
let path = path.trim_matches('/'); let path = path.trim_matches('/');
if path.is_empty() { if path.is_empty() {
self.set_perm(perm); self.set_perm(perm);
@@ -172,6 +193,9 @@ impl AccessPaths {
.filter(|v| !v.is_empty()) .filter(|v| !v.is_empty())
.collect(); .collect();
let target = self.find_impl(&parts, self.perm)?; let target = self.find_impl(&parts, self.perm)?;
if target.perm().forbidden() {
return None;
}
if writable && !target.perm().readwrite() { if writable && !target.perm().readwrite() {
return None; return None;
} }
@@ -179,9 +203,13 @@ impl AccessPaths {
} }
fn find_impl(&self, parts: &[&str], perm: AccessPerm) -> Option<AccessPaths> { fn find_impl(&self, parts: &[&str], perm: AccessPerm) -> Option<AccessPaths> {
let perm = self.perm.max(perm); let perm = if !self.perm.inherit() {
self.perm
} else {
perm
};
if parts.is_empty() { if parts.is_empty() {
if perm.indexonly() { if perm.inherit() {
return Some(self.clone()); return Some(self.clone());
} else { } else {
return Some(AccessPaths::new(perm)); return Some(AccessPaths::new(perm));
@@ -190,7 +218,7 @@ impl AccessPaths {
let child = match self.children.get(parts[0]) { let child = match self.children.get(parts[0]) {
Some(v) => v, Some(v) => v,
None => { None => {
if perm.indexonly() { if perm.inherit() {
return None; return None;
} else { } else {
return Some(AccessPaths::new(perm)); return Some(AccessPaths::new(perm));
@@ -200,24 +228,24 @@ impl AccessPaths {
child.find_impl(&parts[1..], perm) child.find_impl(&parts[1..], perm)
} }
pub fn child_paths(&self) -> Vec<&String> { pub fn child_names(&self) -> Vec<&String> {
self.children.keys().collect() self.children.keys().collect()
} }
pub fn leaf_paths(&self, base: &Path) -> Vec<PathBuf> { pub fn child_paths(&self, base: &Path) -> Vec<PathBuf> {
if !self.perm().indexonly() { if !self.perm().inherit() {
return vec![base.to_path_buf()]; return vec![base.to_path_buf()];
} }
let mut output = vec![]; let mut output = vec![];
self.leaf_paths_impl(&mut output, base); self.child_paths_impl(&mut output, base);
output output
} }
fn leaf_paths_impl(&self, output: &mut Vec<PathBuf>, base: &Path) { fn child_paths_impl(&self, output: &mut Vec<PathBuf>, base: &Path) {
for (name, child) in self.children.iter() { for (name, child) in self.children.iter() {
let base = base.join(name); let base = base.join(name);
if child.perm().indexonly() { if child.perm().inherit() {
child.leaf_paths_impl(output, &base); child.child_paths_impl(output, &base);
} else { } else {
output.push(base) output.push(base)
} }
@@ -228,162 +256,151 @@ impl AccessPaths {
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
pub enum AccessPerm { pub enum AccessPerm {
#[default] #[default]
IndexOnly, Inherit,
ReadOnly, ReadOnly,
ReadWrite, ReadWrite,
Forbidden,
} }
impl AccessPerm { impl AccessPerm {
pub fn inherit(&self) -> bool {
self == &AccessPerm::Inherit
}
pub fn readwrite(&self) -> bool { pub fn readwrite(&self) -> bool {
self == &AccessPerm::ReadWrite self == &AccessPerm::ReadWrite
} }
pub fn indexonly(&self) -> bool { pub fn forbidden(&self) -> bool {
self == &AccessPerm::IndexOnly self == &AccessPerm::Forbidden
} }
} }
fn is_readonly_method(method: &Method) -> bool { pub fn www_authenticate(res: &mut Response, args: &Args) -> Result<()> {
method == Method::GET if args.auth.use_hashed_password {
|| method == Method::OPTIONS let basic = HeaderValue::from_str(&format!("Basic realm=\"{}\"", REALM))?;
|| method == Method::HEAD res.headers_mut().insert(WWW_AUTHENTICATE, basic);
|| method.as_str() == "PROPFIND" } else {
let nonce = create_nonce()?;
let digest = HeaderValue::from_str(&format!(
"Digest realm=\"{}\", nonce=\"{}\", qop=\"auth\"",
REALM, nonce
))?;
let basic = HeaderValue::from_str(&format!("Basic realm=\"{}\"", REALM))?;
res.headers_mut().append(WWW_AUTHENTICATE, digest);
res.headers_mut().append(WWW_AUTHENTICATE, basic);
}
Ok(())
} }
#[derive(Debug, Clone)] pub fn get_auth_user(authorization: &HeaderValue) -> Option<String> {
pub enum AuthMethod { if let Some(value) = strip_prefix(authorization.as_bytes(), b"Basic ") {
Basic, let value: Vec<u8> = general_purpose::STANDARD.decode(value).ok()?;
Digest, let parts: Vec<&str> = std::str::from_utf8(&value).ok()?.split(':').collect();
Some(parts[0].to_string())
} else if let Some(value) = strip_prefix(authorization.as_bytes(), b"Digest ") {
let digest_map = to_headermap(value).ok()?;
let username = digest_map.get(b"username".as_ref())?;
std::str::from_utf8(username).map(|v| v.to_string()).ok()
} else {
None
}
} }
impl AuthMethod { pub fn check_auth(
pub fn www_auth(&self) -> Result<String> { authorization: &HeaderValue,
match self { method: &str,
AuthMethod::Basic => Ok(format!("Basic realm=\"{REALM}\"")), auth_user: &str,
AuthMethod::Digest => Ok(format!( auth_pass: &str,
"Digest realm=\"{}\",nonce=\"{}\",qop=\"auth\"", ) -> Option<()> {
REALM, if let Some(value) = strip_prefix(authorization.as_bytes(), b"Basic ") {
create_nonce()?, let value: Vec<u8> = general_purpose::STANDARD.decode(value).ok()?;
)), let parts: Vec<&str> = std::str::from_utf8(&value).ok()?.split(':').collect();
if parts[0] != auth_user {
return None;
} }
}
pub fn get_user(&self, authorization: &HeaderValue) -> Option<String> { if auth_pass.starts_with("$6$") {
match self { if let Ok(()) = sha_crypt::sha512_check(parts[1], auth_pass) {
AuthMethod::Basic => { return Some(());
let value: Vec<u8> = general_purpose::STANDARD
.decode(strip_prefix(authorization.as_bytes(), b"Basic ")?)
.ok()?;
let parts: Vec<&str> = std::str::from_utf8(&value).ok()?.split(':').collect();
Some(parts[0].to_string())
}
AuthMethod::Digest => {
let digest_value = strip_prefix(authorization.as_bytes(), b"Digest ")?;
let digest_map = to_headermap(digest_value).ok()?;
digest_map
.get(b"username".as_ref())
.and_then(|b| std::str::from_utf8(b).ok())
.map(|v| v.to_string())
} }
} else if parts[1] == auth_pass {
return Some(());
} }
}
fn check( None
&self, } else if let Some(value) = strip_prefix(authorization.as_bytes(), b"Digest ") {
authorization: &HeaderValue, let digest_map = to_headermap(value).ok()?;
method: &str, if let (Some(username), Some(nonce), Some(user_response)) = (
auth_user: &str, digest_map
auth_pass: &str, .get(b"username".as_ref())
) -> Option<()> { .and_then(|b| std::str::from_utf8(b).ok()),
match self { digest_map.get(b"nonce".as_ref()),
AuthMethod::Basic => { digest_map.get(b"response".as_ref()),
let basic_value: Vec<u8> = general_purpose::STANDARD ) {
.decode(strip_prefix(authorization.as_bytes(), b"Basic ")?) match validate_nonce(nonce) {
.ok()?; Ok(true) => {}
let parts: Vec<&str> = std::str::from_utf8(&basic_value).ok()?.split(':').collect(); _ => return None,
}
if parts[0] != auth_user { if auth_user != username {
return None; return None;
}
if parts[1] == auth_pass {
return Some(());
}
None
} }
AuthMethod::Digest => {
let digest_value = strip_prefix(authorization.as_bytes(), b"Digest ")?;
let digest_map = to_headermap(digest_value).ok()?;
if let (Some(username), Some(nonce), Some(user_response)) = (
digest_map
.get(b"username".as_ref())
.and_then(|b| std::str::from_utf8(b).ok()),
digest_map.get(b"nonce".as_ref()),
digest_map.get(b"response".as_ref()),
) {
match validate_nonce(nonce) {
Ok(true) => {}
_ => return None,
}
if auth_user != username {
return None;
}
let mut h = Context::new(); let mut h = Context::new();
h.consume(format!("{}:{}:{}", auth_user, REALM, auth_pass).as_bytes()); h.consume(format!("{}:{}:{}", auth_user, REALM, auth_pass).as_bytes());
let auth_pass = format!("{:x}", h.compute()); let auth_pass = format!("{:x}", h.compute());
let mut ha = Context::new(); let mut ha = Context::new();
ha.consume(method); ha.consume(method);
ha.consume(b":"); ha.consume(b":");
if let Some(uri) = digest_map.get(b"uri".as_ref()) { if let Some(uri) = digest_map.get(b"uri".as_ref()) {
ha.consume(uri); ha.consume(uri);
} }
let ha = format!("{:x}", ha.compute()); let ha = format!("{:x}", ha.compute());
let mut correct_response = None; let mut correct_response = None;
if let Some(qop) = digest_map.get(b"qop".as_ref()) { if let Some(qop) = digest_map.get(b"qop".as_ref()) {
if qop == &b"auth".as_ref() || qop == &b"auth-int".as_ref() { if qop == &b"auth".as_ref() || qop == &b"auth-int".as_ref() {
correct_response = Some({ correct_response = Some({
let mut c = Context::new(); let mut c = Context::new();
c.consume(&auth_pass); c.consume(&auth_pass);
c.consume(b":"); c.consume(b":");
c.consume(nonce); c.consume(nonce);
c.consume(b":"); c.consume(b":");
if let Some(nc) = digest_map.get(b"nc".as_ref()) { if let Some(nc) = digest_map.get(b"nc".as_ref()) {
c.consume(nc); c.consume(nc);
}
c.consume(b":");
if let Some(cnonce) = digest_map.get(b"cnonce".as_ref()) {
c.consume(cnonce);
}
c.consume(b":");
c.consume(qop);
c.consume(b":");
c.consume(&*ha);
format!("{:x}", c.compute())
});
} }
} c.consume(b":");
let correct_response = match correct_response { if let Some(cnonce) = digest_map.get(b"cnonce".as_ref()) {
Some(r) => r, c.consume(cnonce);
None => {
let mut c = Context::new();
c.consume(&auth_pass);
c.consume(b":");
c.consume(nonce);
c.consume(b":");
c.consume(&*ha);
format!("{:x}", c.compute())
} }
}; c.consume(b":");
if correct_response.as_bytes() == *user_response { c.consume(qop);
return Some(()); c.consume(b":");
} c.consume(&*ha);
format!("{:x}", c.compute())
});
} }
None }
let correct_response = match correct_response {
Some(r) => r,
None => {
let mut c = Context::new();
c.consume(&auth_pass);
c.consume(b":");
c.consume(nonce);
c.consume(b":");
c.consume(&*ha);
format!("{:x}", c.compute())
}
};
if correct_response.as_bytes() == *user_response {
return Some(());
} }
} }
None
} else {
None
} }
} }
@@ -415,6 +432,13 @@ fn validate_nonce(nonce: &[u8]) -> Result<bool> {
bail!("invalid nonce"); bail!("invalid nonce");
} }
fn is_readonly_method(method: &Method) -> bool {
method == Method::GET
|| method == Method::OPTIONS
|| method == Method::HEAD
|| method.as_str() == "PROPFIND"
}
fn strip_prefix<'a>(search: &'a [u8], prefix: &[u8]) -> Option<&'a [u8]> { fn strip_prefix<'a>(search: &'a [u8], prefix: &[u8]) -> Option<&'a [u8]> {
let l = prefix.len(); let l = prefix.len();
if search.len() < l { if search.len() < l {
@@ -478,24 +502,91 @@ fn create_nonce() -> Result<String> {
Ok(n[..34].to_string()) Ok(n[..34].to_string())
} }
fn split_account_paths(s: &str) -> Option<(&str, &str)> {
let i = s.find("@/")?;
Some((&s[0..i], &s[i + 1..]))
}
fn split_rules(rules: &[&str]) -> Vec<String> {
let mut output = vec![];
for rule in rules {
let parts: Vec<&str> = rule.split('|').collect();
let mut rules_list = vec![];
let mut concated_part = String::new();
for (i, part) in parts.iter().enumerate() {
if part.contains("@/") {
concated_part.push_str(part);
let mut concated_part_tmp = String::new();
std::mem::swap(&mut concated_part_tmp, &mut concated_part);
rules_list.push(concated_part_tmp);
continue;
}
concated_part.push_str(part);
if i < parts.len() - 1 {
concated_part.push('|');
}
}
if !concated_part.is_empty() {
rules_list.push(concated_part)
}
output.extend(rules_list);
}
output
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
#[test]
fn test_split_account_paths() {
assert_eq!(
split_account_paths("user:pass@/:rw"),
Some(("user:pass", "/:rw"))
);
assert_eq!(
split_account_paths("user:pass@@/:rw"),
Some(("user:pass@", "/:rw"))
);
assert_eq!(
split_account_paths("user:pass@1@/:rw"),
Some(("user:pass@1", "/:rw"))
);
}
#[test]
fn test_compact_split_rules() {
assert_eq!(
split_rules(&["user1:pass1@/:rw|user2:pass2@/:rw"]),
["user1:pass1@/:rw", "user2:pass2@/:rw"]
);
assert_eq!(
split_rules(&["user1:pa|ss1@/:rw|user2:pa|ss2@/:rw"]),
["user1:pa|ss1@/:rw", "user2:pa|ss2@/:rw"]
);
assert_eq!(
split_rules(&["user1:pa|ss1@/:rw|@/"]),
["user1:pa|ss1@/:rw", "@/"]
);
}
#[test] #[test]
fn test_access_paths() { fn test_access_paths() {
let mut paths = AccessPaths::default(); let mut paths = AccessPaths::default();
paths.add("/dir1", AccessPerm::ReadWrite); paths.add("/dir1", AccessPerm::ReadWrite);
paths.add("/dir2/dir1", AccessPerm::ReadWrite); paths.add("/dir2/dir21", AccessPerm::ReadWrite);
paths.add("/dir2/dir2", AccessPerm::ReadOnly); paths.add("/dir2/dir21/dir211", AccessPerm::ReadOnly);
paths.add("/dir2/dir3/dir1", AccessPerm::ReadWrite); paths.add("/dir2/dir21/dir212", AccessPerm::Forbidden);
paths.add("/dir2/dir22", AccessPerm::ReadOnly);
paths.add("/dir2/dir22/dir221", AccessPerm::ReadWrite);
paths.add("/dir2/dir23/dir231", AccessPerm::ReadWrite);
assert_eq!( assert_eq!(
paths.leaf_paths(Path::new("/tmp")), paths.child_paths(Path::new("/tmp")),
[ [
"/tmp/dir1", "/tmp/dir1",
"/tmp/dir2/dir1", "/tmp/dir2/dir21",
"/tmp/dir2/dir2", "/tmp/dir2/dir22",
"/tmp/dir2/dir3/dir1" "/tmp/dir2/dir23/dir231",
] ]
.iter() .iter()
.map(PathBuf::from) .map(PathBuf::from)
@@ -504,27 +595,32 @@ mod tests {
assert_eq!( assert_eq!(
paths paths
.find("dir2", false) .find("dir2", false)
.map(|v| v.leaf_paths(Path::new("/tmp/dir2"))), .map(|v| v.child_paths(Path::new("/tmp/dir2"))),
Some( Some(
["/tmp/dir2/dir1", "/tmp/dir2/dir2", "/tmp/dir2/dir3/dir1"] [
.iter() "/tmp/dir2/dir21",
.map(PathBuf::from) "/tmp/dir2/dir22",
.collect::<Vec<_>>() "/tmp/dir2/dir23/dir231"
]
.iter()
.map(PathBuf::from)
.collect::<Vec<_>>()
) )
); );
assert_eq!(paths.find("dir2", true), None); assert_eq!(paths.find("dir2", true), None);
assert!(paths.find("dir1/file", true).is_some()); assert_eq!(
} paths.find("dir1/file", true),
Some(AccessPaths::new(AccessPerm::ReadWrite))
#[test] );
fn test_access_paths_perm() { assert_eq!(
let mut paths = AccessPaths::default(); paths.find("dir2/dir21/file", true),
assert_eq!(paths.perm(), AccessPerm::IndexOnly); Some(AccessPaths::new(AccessPerm::ReadWrite))
paths.set_perm(AccessPerm::ReadOnly); );
assert_eq!(paths.perm(), AccessPerm::ReadOnly); assert_eq!(
paths.set_perm(AccessPerm::ReadWrite); paths.find("dir2/dir21/dir211/file", false),
assert_eq!(paths.perm(), AccessPerm::ReadWrite); Some(AccessPaths::new(AccessPerm::ReadOnly))
paths.set_perm(AccessPerm::ReadOnly); );
assert_eq!(paths.perm(), AccessPerm::ReadWrite); assert_eq!(paths.find("dir2/dir21/dir211/file", true), None);
assert_eq!(paths.find("dir2/dir21/dir212", false), None);
} }
} }

View File

@@ -1,23 +1,29 @@
use std::{collections::HashMap, str::FromStr, sync::Arc}; use std::{collections::HashMap, str::FromStr};
use crate::{args::Args, server::Request}; use crate::{auth::get_auth_user, server::Request};
pub const DEFAULT_LOG_FORMAT: &str = r#"$remote_addr "$request" $status"#; pub const DEFAULT_LOG_FORMAT: &str = r#"$remote_addr "$request" $status"#;
#[derive(Debug)] #[derive(Debug, Clone, PartialEq)]
pub struct LogHttp { pub struct HttpLogger {
elements: Vec<LogElement>, elements: Vec<LogElement>,
} }
#[derive(Debug)] impl Default for HttpLogger {
fn default() -> Self {
DEFAULT_LOG_FORMAT.parse().unwrap()
}
}
#[derive(Debug, Clone, PartialEq)]
enum LogElement { enum LogElement {
Variable(String), Variable(String),
Header(String), Header(String),
Literal(String), Literal(String),
} }
impl LogHttp { impl HttpLogger {
pub fn data(&self, req: &Request, args: &Arc<Args>) -> HashMap<String, String> { pub fn data(&self, req: &Request) -> HashMap<String, String> {
let mut data = HashMap::default(); let mut data = HashMap::default();
for element in self.elements.iter() { for element in self.elements.iter() {
match element { match element {
@@ -26,10 +32,8 @@ impl LogHttp {
data.insert(name.to_string(), format!("{} {}", req.method(), req.uri())); data.insert(name.to_string(), format!("{} {}", req.method(), req.uri()));
} }
"remote_user" => { "remote_user" => {
if let Some(user) = req if let Some(user) =
.headers() req.headers().get("authorization").and_then(get_auth_user)
.get("authorization")
.and_then(|v| args.auth_method.get_user(v))
{ {
data.insert(name.to_string(), user); data.insert(name.to_string(), user);
} }
@@ -66,7 +70,7 @@ impl LogHttp {
} }
} }
impl FromStr for LogHttp { impl FromStr for HttpLogger {
type Err = anyhow::Error; type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> { fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut elements = vec![]; let mut elements = vec![];

105
src/http_utils.rs Normal file
View File

@@ -0,0 +1,105 @@
use bytes::{Bytes, BytesMut};
use futures_util::Stream;
use http_body_util::{combinators::BoxBody, BodyExt, Full};
use hyper::body::{Body, Incoming};
use std::{
pin::Pin,
task::{Context, Poll},
};
use tokio::io::AsyncRead;
use tokio_util::io::poll_read_buf;
#[derive(Debug)]
pub struct IncomingStream {
inner: Incoming,
}
impl IncomingStream {
pub fn new(inner: Incoming) -> Self {
Self { inner }
}
}
impl Stream for IncomingStream {
type Item = Result<Bytes, anyhow::Error>;
#[inline]
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
loop {
match futures_util::ready!(Pin::new(&mut self.inner).poll_frame(cx)?) {
Some(frame) => match frame.into_data() {
Ok(data) => return Poll::Ready(Some(Ok(data))),
Err(_frame) => {}
},
None => return Poll::Ready(None),
}
}
}
}
pin_project_lite::pin_project! {
pub struct LengthLimitedStream<R> {
#[pin]
reader: Option<R>,
remaining: usize,
buf: BytesMut,
capacity: usize,
}
}
impl<R> LengthLimitedStream<R> {
pub fn new(reader: R, limit: usize) -> Self {
Self {
reader: Some(reader),
remaining: limit,
buf: BytesMut::new(),
capacity: 4096,
}
}
}
impl<R: AsyncRead> Stream for LengthLimitedStream<R> {
type Item = std::io::Result<Bytes>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let mut this = self.as_mut().project();
if *this.remaining == 0 {
self.project().reader.set(None);
return Poll::Ready(None);
}
let reader = match this.reader.as_pin_mut() {
Some(r) => r,
None => return Poll::Ready(None),
};
if this.buf.capacity() == 0 {
this.buf.reserve(*this.capacity);
}
match poll_read_buf(reader, cx, &mut this.buf) {
Poll::Pending => Poll::Pending,
Poll::Ready(Err(err)) => {
self.project().reader.set(None);
Poll::Ready(Some(Err(err)))
}
Poll::Ready(Ok(0)) => {
self.project().reader.set(None);
Poll::Ready(None)
}
Poll::Ready(Ok(_)) => {
let mut chunk = this.buf.split();
let chunk_size = (*this.remaining).min(chunk.len());
chunk.truncate(chunk_size);
*this.remaining -= chunk_size;
Poll::Ready(Some(Ok(chunk.freeze())))
}
}
}
}
pub fn body_full(content: impl Into<hyper::body::Bytes>) -> BoxBody<Bytes, anyhow::Error> {
Full::new(content.into())
.map_err(anyhow::Error::new)
.boxed()
}

View File

@@ -1,38 +1,37 @@
mod args; mod args;
mod auth; mod auth;
mod log_http; mod http_logger;
mod http_utils;
mod logger; mod logger;
mod server; mod server;
mod streamer;
#[cfg(feature = "tls")]
mod tls;
#[cfg(unix)]
mod unix;
mod utils; mod utils;
#[macro_use] #[macro_use]
extern crate log; extern crate log;
use crate::args::{build_cli, print_completions, Args}; use crate::args::{build_cli, print_completions, Args};
use crate::server::{Request, Server}; use crate::server::Server;
#[cfg(feature = "tls")] #[cfg(feature = "tls")]
use crate::tls::{TlsAcceptor, TlsStream}; use crate::utils::{load_certs, load_private_key};
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use std::net::{IpAddr, SocketAddr, TcpListener as StdTcpListener};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use args::BindAddr; use args::BindAddr;
use clap_complete::Shell; use clap_complete::Shell;
use futures::future::join_all; use futures_util::future::join_all;
use tokio::net::TcpListener;
use tokio::task::JoinHandle;
use hyper::server::conn::{AddrIncoming, AddrStream}; use hyper::{body::Incoming, service::service_fn, Request};
use hyper::service::{make_service_fn, service_fn}; use hyper_util::{
rt::{TokioExecutor, TokioIo},
server::conn::auto::Builder,
};
use std::net::{IpAddr, SocketAddr, TcpListener as StdTcpListener};
use std::sync::{
atomic::{AtomicBool, Ordering},
Arc,
};
use tokio::{net::TcpListener, task::JoinHandle};
#[cfg(feature = "tls")] #[cfg(feature = "tls")]
use rustls::ServerConfig; use tokio_rustls::{rustls::ServerConfig, TlsAcceptor};
#[tokio::main] #[tokio::main]
async fn main() -> Result<()> { async fn main() -> Result<()> {
@@ -45,10 +44,10 @@ async fn main() -> Result<()> {
return Ok(()); return Ok(());
} }
let args = Args::parse(matches)?; let args = Args::parse(matches)?;
let args = Arc::new(args);
let running = Arc::new(AtomicBool::new(true)); let running = Arc::new(AtomicBool::new(true));
let handles = serve(args.clone(), running.clone())?; let listening = print_listening(&args)?;
print_listening(args)?; let handles = serve(args, running.clone())?;
println!("{listening}");
tokio::select! { tokio::select! {
ret = join_all(handles) => { ret = join_all(handles) => {
@@ -66,58 +65,66 @@ async fn main() -> Result<()> {
} }
} }
fn serve( fn serve(args: Args, running: Arc<AtomicBool>) -> Result<Vec<JoinHandle<()>>> {
args: Arc<Args>, let addrs = args.addrs.clone();
running: Arc<AtomicBool>,
) -> Result<Vec<JoinHandle<Result<(), hyper::Error>>>> {
let inner = Arc::new(Server::init(args.clone(), running)?);
let mut handles = vec![];
let port = args.port; let port = args.port;
for bind_addr in args.addrs.iter() { let tls_config = (args.tls_cert.clone(), args.tls_key.clone());
let inner = inner.clone(); let server_handle = Arc::new(Server::init(args, running)?);
let serve_func = move |remote_addr: Option<SocketAddr>| { let mut handles = vec![];
let inner = inner.clone(); for bind_addr in addrs.iter() {
async move { let server_handle = server_handle.clone();
Ok::<_, hyper::Error>(service_fn(move |req: Request| {
let inner = inner.clone();
inner.call(req, remote_addr)
}))
}
};
match bind_addr { match bind_addr {
BindAddr::Address(ip) => { BindAddr::Address(ip) => {
let incoming = create_addr_incoming(SocketAddr::new(*ip, port)) let listener = create_listener(SocketAddr::new(*ip, port))
.with_context(|| format!("Failed to bind `{ip}:{port}`"))?; .with_context(|| format!("Failed to bind `{ip}:{port}`"))?;
match args.tls.as_ref() {
match &tls_config {
#[cfg(feature = "tls")] #[cfg(feature = "tls")]
Some((certs, key)) => { (Some(cert_file), Some(key_file)) => {
let config = ServerConfig::builder() let certs = load_certs(cert_file)?;
.with_safe_defaults() let key = load_private_key(key_file)?;
let mut config = ServerConfig::builder()
.with_no_client_auth() .with_no_client_auth()
.with_single_cert(certs.clone(), key.clone())?; .with_single_cert(certs, key)?;
config.alpn_protocols = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
let config = Arc::new(config); let config = Arc::new(config);
let accepter = TlsAcceptor::new(config.clone(), incoming); let tls_accepter = TlsAcceptor::from(config);
let new_service = make_service_fn(move |socket: &TlsStream| {
let remote_addr = socket.remote_addr(); let handle = tokio::spawn(async move {
serve_func(Some(remote_addr)) loop {
let (cnx, addr) = listener.accept().await.unwrap();
let Ok(stream) = tls_accepter.accept(cnx).await else {
warn!("During cls handshake connection from {}", addr);
continue;
};
let stream = TokioIo::new(stream);
tokio::spawn(handle_stream(
server_handle.clone(),
stream,
Some(addr),
));
}
}); });
let server =
tokio::spawn(hyper::Server::builder(accepter).serve(new_service)); handles.push(handle);
handles.push(server);
} }
#[cfg(not(feature = "tls"))] (None, None) => {
Some(_) => { let handle = tokio::spawn(async move {
loop {
let (cnx, addr) = listener.accept().await.unwrap();
let stream = TokioIo::new(cnx);
tokio::spawn(handle_stream(
server_handle.clone(),
stream,
Some(addr),
));
}
});
handles.push(handle);
}
_ => {
unreachable!() unreachable!()
} }
None => {
let new_service = make_service_fn(move |socket: &AddrStream| {
let remote_addr = socket.remote_addr();
serve_func(Some(remote_addr))
});
let server =
tokio::spawn(hyper::Server::builder(incoming).serve(new_service));
handles.push(server);
}
}; };
} }
BindAddr::Path(path) => { BindAddr::Path(path) => {
@@ -128,10 +135,15 @@ fn serve(
{ {
let listener = tokio::net::UnixListener::bind(path) let listener = tokio::net::UnixListener::bind(path)
.with_context(|| format!("Failed to bind `{}`", path.display()))?; .with_context(|| format!("Failed to bind `{}`", path.display()))?;
let acceptor = unix::UnixAcceptor::from_listener(listener); let handle = tokio::spawn(async move {
let new_service = make_service_fn(move |_| serve_func(None)); loop {
let server = tokio::spawn(hyper::Server::builder(acceptor).serve(new_service)); let (cnx, _) = listener.accept().await.unwrap();
handles.push(server); let stream = TokioIo::new(cnx);
tokio::spawn(handle_stream(server_handle.clone(), stream, None));
}
});
handles.push(handle);
} }
} }
} }
@@ -139,7 +151,30 @@ fn serve(
Ok(handles) Ok(handles)
} }
fn create_addr_incoming(addr: SocketAddr) -> Result<AddrIncoming> { async fn handle_stream<T>(handle: Arc<Server>, stream: TokioIo<T>, addr: Option<SocketAddr>)
where
T: tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static,
{
let hyper_service =
service_fn(move |request: Request<Incoming>| handle.clone().call(request, addr));
let ret = Builder::new(TokioExecutor::new())
.serve_connection_with_upgrades(stream, hyper_service)
.await;
if let Err(err) = ret {
let scope = match addr {
Some(addr) => format!(" from {}", addr),
None => String::new(),
};
match err.downcast_ref::<std::io::Error>() {
Some(err) if err.kind() == std::io::ErrorKind::UnexpectedEof => {}
_ => warn!("Serving connection{}: {}", scope, err),
}
}
}
fn create_listener(addr: SocketAddr) -> Result<TcpListener> {
use socket2::{Domain, Protocol, Socket, Type}; use socket2::{Domain, Protocol, Socket, Type};
let socket = Socket::new(Domain::for_address(addr), Type::STREAM, Some(Protocol::TCP))?; let socket = Socket::new(Domain::for_address(addr), Type::STREAM, Some(Protocol::TCP))?;
if addr.is_ipv6() { if addr.is_ipv6() {
@@ -150,11 +185,12 @@ fn create_addr_incoming(addr: SocketAddr) -> Result<AddrIncoming> {
socket.listen(1024 /* Default backlog */)?; socket.listen(1024 /* Default backlog */)?;
let std_listener = StdTcpListener::from(socket); let std_listener = StdTcpListener::from(socket);
std_listener.set_nonblocking(true)?; std_listener.set_nonblocking(true)?;
let incoming = AddrIncoming::from_listener(TcpListener::from_std(std_listener)?)?; let listener = TcpListener::from_std(std_listener)?;
Ok(incoming) Ok(listener)
} }
fn print_listening(args: Arc<Args>) -> Result<()> { fn print_listening(args: &Args) -> Result<String> {
let mut output = String::new();
let mut bind_addrs = vec![]; let mut bind_addrs = vec![];
let (mut ipv4, mut ipv6) = (false, false); let (mut ipv4, mut ipv6) = (false, false);
for bind_addr in args.addrs.iter() { for bind_addr in args.addrs.iter() {
@@ -195,7 +231,11 @@ fn print_listening(args: Arc<Args>) -> Result<()> {
IpAddr::V4(_) => format!("{}:{}", addr, args.port), IpAddr::V4(_) => format!("{}:{}", addr, args.port),
IpAddr::V6(_) => format!("[{}]:{}", addr, args.port), IpAddr::V6(_) => format!("[{}]:{}", addr, args.port),
}; };
let protocol = if args.tls.is_some() { "https" } else { "http" }; let protocol = if args.tls_cert.is_some() {
"https"
} else {
"http"
};
format!("{}://{}{}", protocol, addr, args.uri_prefix) format!("{}://{}{}", protocol, addr, args.uri_prefix)
} }
BindAddr::Path(path) => path.display().to_string(), BindAddr::Path(path) => path.display().to_string(),
@@ -203,17 +243,17 @@ fn print_listening(args: Arc<Args>) -> Result<()> {
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if urls.len() == 1 { if urls.len() == 1 {
println!("Listening on {}", urls[0]); output.push_str(&format!("Listening on {}", urls[0]))
} else { } else {
let info = urls let info = urls
.iter() .iter()
.map(|v| format!(" {v}")) .map(|v| format!(" {v}"))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.join("\n"); .join("\n");
println!("Listening on:\n{info}\n"); output.push_str(&format!("Listening on:\n{info}\n"))
} }
Ok(()) Ok(output)
} }
async fn shutdown_signal() { async fn shutdown_signal() {

View File

@@ -1,48 +1,56 @@
#![allow(clippy::too_many_arguments)] #![allow(clippy::too_many_arguments)]
use crate::auth::{AccessPaths, AccessPerm}; use crate::auth::{www_authenticate, AccessPaths, AccessPerm};
use crate::streamer::Streamer; use crate::http_utils::{body_full, IncomingStream, LengthLimitedStream};
use crate::utils::{ use crate::utils::{
decode_uri, encode_uri, get_file_mtime_and_mode, get_file_name, glob, try_get_file_name, decode_uri, encode_uri, get_file_mtime_and_mode, get_file_name, glob, parse_range,
try_get_file_name,
}; };
use crate::Args; use crate::Args;
use anyhow::{anyhow, Result};
use walkdir::WalkDir;
use xml::escape::escape_str_pcdata;
use async_zip::tokio::write::ZipFileWriter; use anyhow::{anyhow, Result};
use async_zip::{Compression, ZipDateTime, ZipEntryBuilder}; use async_zip::{tokio::write::ZipFileWriter, Compression, ZipDateTime, ZipEntryBuilder};
use bytes::Bytes;
use chrono::{LocalResult, TimeZone, Utc}; use chrono::{LocalResult, TimeZone, Utc};
use futures::TryStreamExt; use futures_util::{pin_mut, TryStreamExt};
use headers::{ use headers::{
AcceptRanges, AccessControlAllowCredentials, AccessControlAllowOrigin, ContentLength, AcceptRanges, AccessControlAllowCredentials, AccessControlAllowOrigin, CacheControl,
ContentType, ETag, HeaderMap, HeaderMapExt, IfModifiedSince, IfNoneMatch, IfRange, ContentLength, ContentType, ETag, HeaderMap, HeaderMapExt, IfModifiedSince, IfNoneMatch,
LastModified, Range, IfRange, LastModified, Range,
}; };
use hyper::header::{ use http_body_util::{combinators::BoxBody, BodyExt, StreamBody};
HeaderValue, AUTHORIZATION, CONTENT_DISPOSITION, CONTENT_LENGTH, CONTENT_RANGE, CONTENT_TYPE, use hyper::body::Frame;
RANGE, WWW_AUTHENTICATE, use hyper::{
body::Incoming,
header::{
HeaderValue, AUTHORIZATION, CONTENT_DISPOSITION, CONTENT_LENGTH, CONTENT_RANGE,
CONTENT_TYPE, RANGE,
},
Method, StatusCode, Uri,
}; };
use hyper::{Body, Method, StatusCode, Uri};
use serde::Serialize; use serde::Serialize;
use std::borrow::Cow; use std::borrow::Cow;
use std::cmp::Ordering;
use std::collections::HashMap; use std::collections::HashMap;
use std::fs::Metadata; use std::fs::Metadata;
use std::io::SeekFrom; use std::io::SeekFrom;
use std::net::SocketAddr; use std::net::SocketAddr;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{self, AtomicBool};
use std::sync::Arc; use std::sync::Arc;
use std::time::SystemTime; use std::time::SystemTime;
use tokio::fs::File; use tokio::fs::File;
use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWrite}; use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWrite};
use tokio::{fs, io}; use tokio::{fs, io};
use tokio_util::compat::FuturesAsyncWriteCompatExt;
use tokio_util::io::StreamReader;
use uuid::Uuid;
pub type Request = hyper::Request<Body>; use tokio_util::compat::FuturesAsyncWriteCompatExt;
pub type Response = hyper::Response<Body>; use tokio_util::io::{ReaderStream, StreamReader};
use uuid::Uuid;
use walkdir::WalkDir;
use xml::escape::escape_str_pcdata;
pub type Request = hyper::Request<Incoming>;
pub type Response = hyper::Response<BoxBody<Bytes, anyhow::Error>>;
const INDEX_HTML: &str = include_str!("../assets/index.html"); const INDEX_HTML: &str = include_str!("../assets/index.html");
const INDEX_CSS: &str = include_str!("../assets/index.css"); const INDEX_CSS: &str = include_str!("../assets/index.css");
@@ -50,10 +58,11 @@ const INDEX_JS: &str = include_str!("../assets/index.js");
const FAVICON_ICO: &[u8] = include_bytes!("../assets/favicon.ico"); const FAVICON_ICO: &[u8] = include_bytes!("../assets/favicon.ico");
const INDEX_NAME: &str = "index.html"; const INDEX_NAME: &str = "index.html";
const BUF_SIZE: usize = 65536; const BUF_SIZE: usize = 65536;
const TEXT_MAX_SIZE: u64 = 4194304; // 4M const EDITABLE_TEXT_MAX_SIZE: u64 = 4194304; // 4M
const RESUMABLE_UPLOAD_MIN_SIZE: u64 = 20971520; // 20M
pub struct Server { pub struct Server {
args: Arc<Args>, args: Args,
assets_prefix: String, assets_prefix: String,
html: Cow<'static, str>, html: Cow<'static, str>,
single_file_req_paths: Vec<String>, single_file_req_paths: Vec<String>,
@@ -61,7 +70,7 @@ pub struct Server {
} }
impl Server { impl Server {
pub fn init(args: Arc<Args>, running: Arc<AtomicBool>) -> Result<Self> { pub fn init(args: Args, running: Arc<AtomicBool>) -> Result<Self> {
let assets_prefix = format!("{}__dufs_v{}_", args.uri_prefix, env!("CARGO_PKG_VERSION")); let assets_prefix = format!("{}__dufs_v{}_", args.uri_prefix, env!("CARGO_PKG_VERSION"));
let single_file_req_paths = if args.path_is_file { let single_file_req_paths = if args.path_is_file {
vec![ vec![
@@ -70,13 +79,13 @@ impl Server {
encode_uri(&format!( encode_uri(&format!(
"{}{}", "{}{}",
&args.uri_prefix, &args.uri_prefix,
get_file_name(&args.path) get_file_name(&args.serve_path)
)), )),
] ]
} else { } else {
vec![] vec![]
}; };
let html = match args.assets_path.as_ref() { let html = match args.assets.as_ref() {
Some(path) => Cow::Owned(std::fs::read_to_string(path.join("index.html"))?), Some(path) => Cow::Owned(std::fs::read_to_string(path.join("index.html"))?),
None => Cow::Borrowed(INDEX_HTML), None => Cow::Borrowed(INDEX_HTML),
}; };
@@ -95,9 +104,9 @@ impl Server {
addr: Option<SocketAddr>, addr: Option<SocketAddr>,
) -> Result<Response, hyper::Error> { ) -> Result<Response, hyper::Error> {
let uri = req.uri().clone(); let uri = req.uri().clone();
let assets_prefix = self.assets_prefix.clone(); let assets_prefix = &self.assets_prefix;
let enable_cors = self.args.enable_cors; let enable_cors = self.args.enable_cors;
let mut http_log_data = self.args.log_http.data(&req, &self.args); let mut http_log_data = self.args.http_logger.data(&req);
if let Some(addr) = addr { if let Some(addr) = addr {
http_log_data.insert("remote_addr".to_string(), addr.ip().to_string()); http_log_data.insert("remote_addr".to_string(), addr.ip().to_string());
} }
@@ -105,8 +114,8 @@ impl Server {
let mut res = match self.clone().handle(req).await { let mut res = match self.clone().handle(req).await {
Ok(res) => { Ok(res) => {
http_log_data.insert("status".to_string(), res.status().as_u16().to_string()); http_log_data.insert("status".to_string(), res.status().as_u16().to_string());
if !uri.path().starts_with(&assets_prefix) { if !uri.path().starts_with(assets_prefix) {
self.args.log_http.log(&http_log_data, None); self.args.http_logger.log(&http_log_data, None);
} }
res res
} }
@@ -116,7 +125,7 @@ impl Server {
*res.status_mut() = status; *res.status_mut() = status;
http_log_data.insert("status".to_string(), status.as_u16().to_string()); http_log_data.insert("status".to_string(), status.as_u16().to_string());
self.args self.args
.log_http .http_logger
.log(&http_log_data, Some(err.to_string())); .log(&http_log_data, Some(err.to_string()));
res res
} }
@@ -148,12 +157,7 @@ impl Server {
} }
}; };
let guard = self.args.auth.guard( let guard = self.args.auth.guard(&relative_path, &method, authorization);
&relative_path,
&method,
authorization,
self.args.auth_method.clone(),
);
let (user, access_paths) = match guard { let (user, access_paths) = match guard {
(None, None) => { (None, None) => {
@@ -184,7 +188,7 @@ impl Server {
.iter() .iter()
.any(|v| v.as_str() == req_path) .any(|v| v.as_str() == req_path)
{ {
self.handle_send_file(&self.args.path, headers, head_only, &mut res) self.handle_send_file(&self.args.serve_path, headers, head_only, &mut res)
.await?; .await?;
} else { } else {
status_not_found(&mut res); status_not_found(&mut res);
@@ -294,7 +298,10 @@ impl Server {
} }
} else if is_file { } else if is_file {
if query_params.contains_key("edit") { if query_params.contains_key("edit") {
self.handle_edit_file(path, head_only, user, &mut res) self.handle_deal_file(path, DataKind::Edit, head_only, user, &mut res)
.await?;
} else if query_params.contains_key("view") {
self.handle_deal_file(path, DataKind::View, head_only, user, &mut res)
.await?; .await?;
} else { } else {
self.handle_send_file(path, headers, head_only, &mut res) self.handle_send_file(path, headers, head_only, &mut res)
@@ -322,10 +329,37 @@ impl Server {
set_webdav_headers(&mut res); set_webdav_headers(&mut res);
} }
Method::PUT => { Method::PUT => {
if !allow_upload || (!allow_delete && is_file && size > 0) { if is_dir || !allow_upload || (!allow_delete && size > 0) {
status_forbid(&mut res); status_forbid(&mut res);
} else { } else {
self.handle_upload(path, req, &mut res).await?; self.handle_upload(path, None, size, req, &mut res).await?;
}
}
Method::PATCH => {
if is_miss {
status_not_found(&mut res);
} else if !allow_upload {
status_forbid(&mut res);
} else {
let offset = match parse_upload_offset(headers, size) {
Ok(v) => v,
Err(err) => {
status_bad_request(&mut res, &err.to_string());
return Ok(res);
}
};
match offset {
Some(offset) => {
if offset < size && !allow_delete {
status_forbid(&mut res);
}
self.handle_upload(path, Some(offset), size, req, &mut res)
.await?;
}
None => {
*res.status_mut() = StatusCode::METHOD_NOT_ALLOWED;
}
}
} }
} }
Method::DELETE => { Method::DELETE => {
@@ -340,12 +374,13 @@ impl Server {
method => match method.as_str() { method => match method.as_str() {
"PROPFIND" => { "PROPFIND" => {
if is_dir { if is_dir {
let access_paths = if access_paths.perm().indexonly() { let access_paths =
// see https://github.com/sigoden/dufs/issues/229 if access_paths.perm().inherit() && authorization.is_none() {
AccessPaths::new(AccessPerm::ReadOnly) // see https://github.com/sigoden/dufs/issues/229
} else { AccessPaths::new(AccessPerm::ReadOnly)
access_paths } else {
}; access_paths
};
self.handle_propfind_dir(path, headers, access_paths, &mut res) self.handle_propfind_dir(path, headers, access_paths, &mut res)
.await?; .await?;
} else if is_file { } else if is_file {
@@ -366,7 +401,7 @@ impl Server {
status_forbid(&mut res); status_forbid(&mut res);
} else if !is_miss { } else if !is_miss {
*res.status_mut() = StatusCode::METHOD_NOT_ALLOWED; *res.status_mut() = StatusCode::METHOD_NOT_ALLOWED;
*res.body_mut() = Body::from("Already exists"); *res.body_mut() = body_full("Already exists");
} else { } else {
self.handle_mkcol(path, &mut res).await?; self.handle_mkcol(path, &mut res).await?;
} }
@@ -412,28 +447,48 @@ impl Server {
Ok(res) Ok(res)
} }
async fn handle_upload(&self, path: &Path, mut req: Request, res: &mut Response) -> Result<()> { async fn handle_upload(
&self,
path: &Path,
upload_offset: Option<u64>,
size: u64,
req: Request,
res: &mut Response,
) -> Result<()> {
ensure_path_parent(path).await?; ensure_path_parent(path).await?;
let (mut file, status) = match upload_offset {
let mut file = match fs::File::create(&path).await { None => (fs::File::create(path).await?, StatusCode::CREATED),
Ok(v) => v, Some(offset) if offset == size => (
Err(_) => { fs::OpenOptions::new().append(true).open(path).await?,
status_forbid(res); StatusCode::NO_CONTENT,
return Ok(()); ),
Some(offset) => {
let mut file = fs::OpenOptions::new().write(true).open(path).await?;
file.seek(SeekFrom::Start(offset)).await?;
(file, StatusCode::NO_CONTENT)
} }
}; };
let stream = IncomingStream::new(req.into_body());
let body_with_io_error = req let body_with_io_error = stream.map_err(|err| io::Error::new(io::ErrorKind::Other, err));
.body_mut()
.map_err(|err| io::Error::new(io::ErrorKind::Other, err));
let body_reader = StreamReader::new(body_with_io_error); let body_reader = StreamReader::new(body_with_io_error);
futures::pin_mut!(body_reader); pin_mut!(body_reader);
io::copy(&mut body_reader, &mut file).await?; let ret = io::copy(&mut body_reader, &mut file).await;
let size = fs::metadata(path)
.await
.map(|v| v.len())
.unwrap_or_default();
if ret.is_err() {
if upload_offset.is_none() && size < RESUMABLE_UPLOAD_MIN_SIZE {
let _ = tokio::fs::remove_file(&path).await;
}
ret?;
}
*res.status_mut() = status;
*res.status_mut() = StatusCode::CREATED;
Ok(()) Ok(())
} }
@@ -459,7 +514,7 @@ impl Server {
) -> Result<()> { ) -> Result<()> {
let mut paths = vec![]; let mut paths = vec![];
if exist { if exist {
paths = match self.list_dir(path, path, access_paths).await { paths = match self.list_dir(path, path, access_paths.clone()).await {
Ok(paths) => paths, Ok(paths) => paths,
Err(_) => { Err(_) => {
status_forbid(res); status_forbid(res);
@@ -467,7 +522,16 @@ impl Server {
} }
} }
}; };
self.send_index(path, paths, exist, query_params, head_only, user, res) self.send_index(
path,
paths,
exist,
query_params,
head_only,
user,
access_paths,
res,
)
} }
async fn handle_search_dir( async fn handle_search_dir(
@@ -484,17 +548,23 @@ impl Server {
.get("q") .get("q")
.ok_or_else(|| anyhow!("invalid q"))? .ok_or_else(|| anyhow!("invalid q"))?
.to_lowercase(); .to_lowercase();
if !search.is_empty() { if search.is_empty() {
return self
.handle_ls_dir(path, true, query_params, head_only, user, access_paths, res)
.await;
} else {
let path_buf = path.to_path_buf(); let path_buf = path.to_path_buf();
let hidden = Arc::new(self.args.hidden.to_vec()); let hidden = Arc::new(self.args.hidden.to_vec());
let hidden = hidden.clone(); let hidden = hidden.clone();
let running = self.running.clone(); let running = self.running.clone();
let access_paths = access_paths.clone();
let search_paths = tokio::task::spawn_blocking(move || { let search_paths = tokio::task::spawn_blocking(move || {
let mut paths: Vec<PathBuf> = vec![]; let mut paths: Vec<PathBuf> = vec![];
for dir in access_paths.leaf_paths(&path_buf) { for dir in access_paths.child_paths(&path_buf) {
let mut it = WalkDir::new(&dir).into_iter(); let mut it = WalkDir::new(&dir).into_iter();
it.next();
while let Some(Ok(entry)) = it.next() { while let Some(Ok(entry)) = it.next() {
if !running.load(Ordering::SeqCst) { if !running.load(atomic::Ordering::SeqCst) {
break; break;
} }
let entry_path = entry.path(); let entry_path = entry.path();
@@ -532,7 +602,16 @@ impl Server {
} }
} }
} }
self.send_index(path, paths, true, query_params, head_only, user, res) self.send_index(
path,
paths,
true,
query_params,
head_only,
user,
access_paths,
res,
)
} }
async fn handle_zip_dir( async fn handle_zip_dir(
@@ -553,13 +632,29 @@ impl Server {
let path = path.to_owned(); let path = path.to_owned();
let hidden = self.args.hidden.clone(); let hidden = self.args.hidden.clone();
let running = self.running.clone(); let running = self.running.clone();
let compression = self.args.compress.to_compression();
tokio::spawn(async move { tokio::spawn(async move {
if let Err(e) = zip_dir(&mut writer, &path, access_paths, &hidden, running).await { if let Err(e) = zip_dir(
&mut writer,
&path,
access_paths,
&hidden,
compression,
running,
)
.await
{
error!("Failed to zip {}, {}", path.display(), e); error!("Failed to zip {}, {}", path.display(), e);
} }
}); });
let reader = Streamer::new(reader, BUF_SIZE); let reader_stream = ReaderStream::new(reader);
*res.body_mut() = Body::wrap_stream(reader.into_stream()); let stream_body = StreamBody::new(
reader_stream
.map_ok(Frame::data)
.map_err(|err| anyhow!("{err}")),
);
let boxed_body = stream_body.boxed();
*res.body_mut() = boxed_body;
Ok(()) Ok(())
} }
@@ -599,7 +694,7 @@ impl Server {
res: &mut Response, res: &mut Response,
) -> Result<()> { ) -> Result<()> {
if path.extension().is_none() { if path.extension().is_none() {
let path = self.args.path.join(INDEX_NAME); let path = self.args.serve_path.join(INDEX_NAME);
self.handle_send_file(&path, headers, head_only, res) self.handle_send_file(&path, headers, head_only, res)
.await?; .await?;
} else { } else {
@@ -615,26 +710,28 @@ impl Server {
res: &mut Response, res: &mut Response,
) -> Result<bool> { ) -> Result<bool> {
if let Some(name) = req_path.strip_prefix(&self.assets_prefix) { if let Some(name) = req_path.strip_prefix(&self.assets_prefix) {
match self.args.assets_path.as_ref() { match self.args.assets.as_ref() {
Some(assets_path) => { Some(assets_path) => {
let path = assets_path.join(name); let path = assets_path.join(name);
self.handle_send_file(&path, headers, false, res).await?; self.handle_send_file(&path, headers, false, res).await?;
} }
None => match name { None => match name {
"index.js" => { "index.js" => {
*res.body_mut() = Body::from(INDEX_JS); *res.body_mut() = body_full(INDEX_JS);
res.headers_mut().insert( res.headers_mut().insert(
"content-type", "content-type",
HeaderValue::from_static("application/javascript"), HeaderValue::from_static("application/javascript; charset=UTF-8"),
); );
} }
"index.css" => { "index.css" => {
*res.body_mut() = Body::from(INDEX_CSS); *res.body_mut() = body_full(INDEX_CSS);
res.headers_mut() res.headers_mut().insert(
.insert("content-type", HeaderValue::from_static("text/css")); "content-type",
HeaderValue::from_static("text/css; charset=UTF-8"),
);
} }
"favicon.ico" => { "favicon.ico" => {
*res.body_mut() = Body::from(FAVICON_ICO); *res.body_mut() = body_full(FAVICON_ICO);
res.headers_mut() res.headers_mut()
.insert("content-type", HeaderValue::from_static("image/x-icon")); .insert("content-type", HeaderValue::from_static("image/x-icon"));
} }
@@ -645,7 +742,11 @@ impl Server {
} }
res.headers_mut().insert( res.headers_mut().insert(
"cache-control", "cache-control",
HeaderValue::from_static("max-age=2592000, public"), HeaderValue::from_static("public, max-age=31536000, immutable"),
);
res.headers_mut().insert(
"x-content-type-options",
HeaderValue::from_static("nosniff"),
); );
Ok(true) Ok(true)
} else { } else {
@@ -662,6 +763,7 @@ impl Server {
) -> Result<()> { ) -> Result<()> {
let (file, meta) = tokio::join!(fs::File::open(path), fs::metadata(path),); let (file, meta) = tokio::join!(fs::File::open(path), fs::metadata(path),);
let (mut file, meta) = (file?, meta?); let (mut file, meta) = (file?, meta?);
let size = meta.len();
let mut use_range = true; let mut use_range = true;
if let Some((etag, last_modified)) = extract_cache_headers(&meta) { if let Some((etag, last_modified)) = extract_cache_headers(&meta) {
let cached = { let cached = {
@@ -693,7 +795,12 @@ impl Server {
} }
let range = if use_range { let range = if use_range {
parse_range(headers) headers.get(RANGE).map(|range| {
range
.to_str()
.ok()
.and_then(|range| parse_range(range, size))
})
} else { } else {
None None
}; };
@@ -708,27 +815,27 @@ impl Server {
res.headers_mut().typed_insert(AcceptRanges::bytes()); res.headers_mut().typed_insert(AcceptRanges::bytes());
let size = meta.len();
if let Some(range) = range { if let Some(range) = range {
if range if let Some((start, end)) = range {
.end file.seek(SeekFrom::Start(start)).await?;
.map_or_else(|| range.start < size, |v| v >= range.start) let range_size = end - start + 1;
&& file.seek(SeekFrom::Start(range.start)).await.is_ok()
{
let end = range.end.unwrap_or(size - 1).min(size - 1);
let part_size = end - range.start + 1;
let reader = Streamer::new(file, BUF_SIZE);
*res.status_mut() = StatusCode::PARTIAL_CONTENT; *res.status_mut() = StatusCode::PARTIAL_CONTENT;
let content_range = format!("bytes {}-{}/{}", range.start, end, size); let content_range = format!("bytes {}-{}/{}", start, end, size);
res.headers_mut() res.headers_mut()
.insert(CONTENT_RANGE, content_range.parse()?); .insert(CONTENT_RANGE, content_range.parse()?);
res.headers_mut() res.headers_mut()
.insert(CONTENT_LENGTH, format!("{part_size}").parse()?); .insert(CONTENT_LENGTH, format!("{range_size}").parse()?);
if head_only { if head_only {
return Ok(()); return Ok(());
} }
*res.body_mut() = Body::wrap_stream(reader.into_stream_sized(part_size));
let stream_body = StreamBody::new(
LengthLimitedStream::new(file, range_size as usize)
.map_ok(Frame::data)
.map_err(|err| anyhow!("{err}")),
);
let boxed_body = stream_body.boxed();
*res.body_mut() = boxed_body;
} else { } else {
*res.status_mut() = StatusCode::RANGE_NOT_SATISFIABLE; *res.status_mut() = StatusCode::RANGE_NOT_SATISFIABLE;
res.headers_mut() res.headers_mut()
@@ -740,28 +847,40 @@ impl Server {
if head_only { if head_only {
return Ok(()); return Ok(());
} }
let reader = Streamer::new(file, BUF_SIZE);
*res.body_mut() = Body::wrap_stream(reader.into_stream()); let reader_stream = ReaderStream::new(file);
let stream_body = StreamBody::new(
reader_stream
.map_ok(Frame::data)
.map_err(|err| anyhow!("{err}")),
);
let boxed_body = stream_body.boxed();
*res.body_mut() = boxed_body;
} }
Ok(()) Ok(())
} }
async fn handle_edit_file( async fn handle_deal_file(
&self, &self,
path: &Path, path: &Path,
kind: DataKind,
head_only: bool, head_only: bool,
user: Option<String>, user: Option<String>,
res: &mut Response, res: &mut Response,
) -> Result<()> { ) -> Result<()> {
let (file, meta) = tokio::join!(fs::File::open(path), fs::metadata(path),); let (file, meta) = tokio::join!(fs::File::open(path), fs::metadata(path),);
let (file, meta) = (file?, meta?); let (file, meta) = (file?, meta?);
let href = format!("/{}", normalize_path(path.strip_prefix(&self.args.path)?)); let href = format!(
"/{}",
normalize_path(path.strip_prefix(&self.args.serve_path)?)
);
let mut buffer: Vec<u8> = vec![]; let mut buffer: Vec<u8> = vec![];
file.take(1024).read_to_end(&mut buffer).await?; file.take(1024).read_to_end(&mut buffer).await?;
let editable = meta.len() <= TEXT_MAX_SIZE && content_inspector::inspect(&buffer).is_text(); let editable =
meta.len() <= EDITABLE_TEXT_MAX_SIZE && content_inspector::inspect(&buffer).is_text();
let data = EditData { let data = EditData {
href, href,
kind: DataKind::Edit, kind,
uri_prefix: self.args.uri_prefix.clone(), uri_prefix: self.args.uri_prefix.clone(),
allow_upload: self.args.allow_upload, allow_upload: self.args.allow_upload,
allow_delete: self.args.allow_delete, allow_delete: self.args.allow_delete,
@@ -773,14 +892,14 @@ impl Server {
.typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8)); .typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8));
let output = self let output = self
.html .html
.replace("__ASSERTS_PREFIX__", &self.assets_prefix) .replace("__ASSETS_PREFIX__", &self.assets_prefix)
.replace("__INDEX_DATA__", &serde_json::to_string(&data)?); .replace("__INDEX_DATA__", &serde_json::to_string(&data)?);
res.headers_mut() res.headers_mut()
.typed_insert(ContentLength(output.as_bytes().len() as u64)); .typed_insert(ContentLength(output.as_bytes().len() as u64));
if head_only { if head_only {
return Ok(()); return Ok(());
} }
*res.body_mut() = output.into(); *res.body_mut() = body_full(output);
Ok(()) Ok(())
} }
@@ -795,18 +914,21 @@ impl Server {
Some(v) => match v.to_str().ok().and_then(|v| v.parse().ok()) { Some(v) => match v.to_str().ok().and_then(|v| v.parse().ok()) {
Some(v) => v, Some(v) => v,
None => { None => {
*res.status_mut() = StatusCode::BAD_REQUEST; status_bad_request(res, "");
return Ok(()); return Ok(());
} }
}, },
None => 1, None => 1,
}; };
let mut paths = match self.to_pathitem(path, &self.args.path).await? { let mut paths = match self.to_pathitem(path, &self.args.serve_path).await? {
Some(v) => vec![v], Some(v) => vec![v],
None => vec![], None => vec![],
}; };
if depth != 0 { if depth != 0 {
match self.list_dir(path, &self.args.path, access_paths).await { match self
.list_dir(path, &self.args.serve_path, access_paths)
.await
{
Ok(child) => paths.extend(child), Ok(child) => paths.extend(child),
Err(_) => { Err(_) => {
status_forbid(res); status_forbid(res);
@@ -826,7 +948,7 @@ impl Server {
} }
async fn handle_propfind_file(&self, path: &Path, res: &mut Response) -> Result<()> { async fn handle_propfind_file(&self, path: &Path, res: &mut Response) -> Result<()> {
if let Some(pathitem) = self.to_pathitem(path, &self.args.path).await? { if let Some(pathitem) = self.to_pathitem(path, &self.args.serve_path).await? {
res_multistatus(res, &pathitem.to_dav_xml(self.args.uri_prefix.as_str())); res_multistatus(res, &pathitem.to_dav_xml(self.args.uri_prefix.as_str()));
} else { } else {
status_not_found(res); status_not_found(res);
@@ -892,7 +1014,7 @@ impl Server {
res.headers_mut() res.headers_mut()
.insert("lock-token", format!("<{token}>").parse()?); .insert("lock-token", format!("<{token}>").parse()?);
*res.body_mut() = Body::from(format!( *res.body_mut() = body_full(format!(
r#"<?xml version="1.0" encoding="utf-8"?> r#"<?xml version="1.0" encoding="utf-8"?>
<D:prop xmlns:D="DAV:"><D:lockdiscovery><D:activelock> <D:prop xmlns:D="DAV:"><D:lockdiscovery><D:activelock>
<D:locktoken><D:href>{token}</D:href></D:locktoken> <D:locktoken><D:href>{token}</D:href></D:locktoken>
@@ -926,17 +1048,16 @@ impl Server {
query_params: &HashMap<String, String>, query_params: &HashMap<String, String>,
head_only: bool, head_only: bool,
user: Option<String>, user: Option<String>,
access_paths: AccessPaths,
res: &mut Response, res: &mut Response,
) -> Result<()> { ) -> Result<()> {
if let Some(sort) = query_params.get("sort") { if let Some(sort) = query_params.get("sort") {
if sort == "name" { if sort == "name" {
paths.sort_by(|v1, v2| { paths.sort_by(|v1, v2| v1.sort_by_name(v2))
alphanumeric_sort::compare_str(v1.name.to_lowercase(), v2.name.to_lowercase())
})
} else if sort == "mtime" { } else if sort == "mtime" {
paths.sort_by(|v1, v2| v1.mtime.cmp(&v2.mtime)) paths.sort_by(|v1, v2| v1.sort_by_mtime(v2))
} else if sort == "size" { } else if sort == "size" {
paths.sort_by(|v1, v2| v1.size.unwrap_or(0).cmp(&v2.size.unwrap_or(0))) paths.sort_by(|v1, v2| v1.sort_by_size(v2))
} }
if query_params if query_params
.get("order") .get("order")
@@ -946,7 +1067,7 @@ impl Server {
paths.reverse() paths.reverse()
} }
} else { } else {
paths.sort_unstable(); paths.sort_by(|v1, v2| v1.sort_by_name(v2))
} }
if query_params.contains_key("simple") { if query_params.contains_key("simple") {
let output = paths let output = paths
@@ -964,19 +1085,23 @@ impl Server {
.typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8)); .typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8));
res.headers_mut() res.headers_mut()
.typed_insert(ContentLength(output.as_bytes().len() as u64)); .typed_insert(ContentLength(output.as_bytes().len() as u64));
*res.body_mut() = output.into(); *res.body_mut() = body_full(output);
if head_only { if head_only {
return Ok(()); return Ok(());
} }
return Ok(()); return Ok(());
} }
let href = format!("/{}", normalize_path(path.strip_prefix(&self.args.path)?)); let href = format!(
"/{}",
normalize_path(path.strip_prefix(&self.args.serve_path)?)
);
let readwrite = access_paths.perm().readwrite();
let data = IndexData { let data = IndexData {
kind: DataKind::Index, kind: DataKind::Index,
href, href,
uri_prefix: self.args.uri_prefix.clone(), uri_prefix: self.args.uri_prefix.clone(),
allow_upload: self.args.allow_upload, allow_upload: self.args.allow_upload && readwrite,
allow_delete: self.args.allow_delete, allow_delete: self.args.allow_delete && readwrite,
allow_search: self.args.allow_search, allow_search: self.args.allow_search,
allow_archive: self.args.allow_archive, allow_archive: self.args.allow_archive,
dir_exists: exist, dir_exists: exist,
@@ -992,23 +1117,28 @@ impl Server {
res.headers_mut() res.headers_mut()
.typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8)); .typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8));
self.html self.html
.replace("__ASSERTS_PREFIX__", &self.assets_prefix) .replace("__ASSETS_PREFIX__", &self.assets_prefix)
.replace("__INDEX_DATA__", &serde_json::to_string(&data)?) .replace("__INDEX_DATA__", &serde_json::to_string(&data)?)
}; };
res.headers_mut() res.headers_mut()
.typed_insert(ContentLength(output.as_bytes().len() as u64)); .typed_insert(ContentLength(output.as_bytes().len() as u64));
res.headers_mut()
.typed_insert(CacheControl::new().with_no_cache());
res.headers_mut().insert(
"x-content-type-options",
HeaderValue::from_static("nosniff"),
);
if head_only { if head_only {
return Ok(()); return Ok(());
} }
*res.body_mut() = output.into(); *res.body_mut() = body_full(output);
Ok(()) Ok(())
} }
fn auth_reject(&self, res: &mut Response) -> Result<()> { fn auth_reject(&self, res: &mut Response) -> Result<()> {
let value = self.args.auth_method.www_auth()?;
set_webdav_headers(res); set_webdav_headers(res);
res.headers_mut().insert(WWW_AUTHENTICATE, value.parse()?);
// set 401 to make the browser pop up the login box www_authenticate(res, &self.args)?;
*res.status_mut() = StatusCode::UNAUTHORIZED; *res.status_mut() = StatusCode::UNAUTHORIZED;
Ok(()) Ok(())
} }
@@ -1017,7 +1147,7 @@ impl Server {
fs::canonicalize(path) fs::canonicalize(path)
.await .await
.ok() .ok()
.map(|v| v.starts_with(&self.args.path)) .map(|v| v.starts_with(&self.args.serve_path))
.unwrap_or_default() .unwrap_or_default()
} }
@@ -1040,12 +1170,10 @@ impl Server {
}; };
let authorization = headers.get(AUTHORIZATION); let authorization = headers.get(AUTHORIZATION);
let guard = self.args.auth.guard( let guard = self
&relative_path, .args
req.method(), .auth
authorization, .guard(&relative_path, req.method(), authorization);
self.args.auth_method.clone(),
);
match guard { match guard {
(_, Some(_)) => {} (_, Some(_)) => {}
@@ -1085,14 +1213,14 @@ impl Server {
fn join_path(&self, path: &str) -> Option<PathBuf> { fn join_path(&self, path: &str) -> Option<PathBuf> {
if path.is_empty() { if path.is_empty() {
return Some(self.args.path.clone()); return Some(self.args.serve_path.clone());
} }
let path = if cfg!(windows) { let path = if cfg!(windows) {
path.replace('/', "\\") path.replace('/', "\\")
} else { } else {
path.to_string() path.to_string()
}; };
Some(self.args.path.join(path)) Some(self.args.serve_path.join(path))
} }
async fn list_dir( async fn list_dir(
@@ -1102,8 +1230,8 @@ impl Server {
access_paths: AccessPaths, access_paths: AccessPaths,
) -> Result<Vec<PathItem>> { ) -> Result<Vec<PathItem>> {
let mut paths: Vec<PathItem> = vec![]; let mut paths: Vec<PathItem> = vec![];
if access_paths.perm().indexonly() { if access_paths.perm().inherit() {
for name in access_paths.child_paths() { for name in access_paths.child_names() {
let entry_path = entry_path.join(name); let entry_path = entry_path.join(name);
self.add_pathitem(&mut paths, base_path, &entry_path).await; self.add_pathitem(&mut paths, base_path, &entry_path).await;
} }
@@ -1158,10 +1286,11 @@ impl Server {
} }
} }
#[derive(Debug, Serialize)] #[derive(Debug, Serialize, PartialEq)]
enum DataKind { enum DataKind {
Index, Index,
Edit, Edit,
View,
} }
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
@@ -1248,12 +1377,45 @@ impl PathItem {
), ),
} }
} }
pub fn base_name(&self) -> &str { pub fn base_name(&self) -> &str {
self.name.split('/').last().unwrap_or_default() self.name.split('/').last().unwrap_or_default()
} }
pub fn sort_by_name(&self, other: &Self) -> Ordering {
match self.path_type.cmp(&other.path_type) {
Ordering::Equal => {
alphanumeric_sort::compare_str(self.name.to_lowercase(), other.name.to_lowercase())
}
v => v,
}
}
pub fn sort_by_mtime(&self, other: &Self) -> Ordering {
match self.path_type.cmp(&other.path_type) {
Ordering::Equal => self.mtime.cmp(&other.mtime),
v => v,
}
}
pub fn sort_by_size(&self, other: &Self) -> Ordering {
match self.path_type.cmp(&other.path_type) {
Ordering::Equal => {
if self.is_dir() {
alphanumeric_sort::compare_str(
self.name.to_lowercase(),
other.name.to_lowercase(),
)
} else {
self.size.unwrap_or(0).cmp(&other.size.unwrap_or(0))
}
}
v => v,
}
}
} }
#[derive(Debug, Serialize, Eq, PartialEq, Ord, PartialOrd)] #[derive(Debug, Serialize, Eq, PartialEq)]
enum PathType { enum PathType {
Dir, Dir,
SymlinkDir, SymlinkDir,
@@ -1261,6 +1423,24 @@ enum PathType {
SymlinkFile, SymlinkFile,
} }
impl Ord for PathType {
fn cmp(&self, other: &Self) -> Ordering {
let to_value = |t: &Self| -> u8 {
if matches!(t, Self::Dir | Self::SymlinkDir) {
0
} else {
1
}
};
to_value(self).cmp(&to_value(other))
}
}
impl PartialOrd for PathType {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
fn to_timestamp(time: &SystemTime) -> u64 { fn to_timestamp(time: &SystemTime) -> u64 {
time.duration_since(SystemTime::UNIX_EPOCH) time.duration_since(SystemTime::UNIX_EPOCH)
.unwrap_or_default() .unwrap_or_default()
@@ -1310,7 +1490,7 @@ fn res_multistatus(res: &mut Response, content: &str) {
"content-type", "content-type",
HeaderValue::from_static("application/xml; charset=utf-8"), HeaderValue::from_static("application/xml; charset=utf-8"),
); );
*res.body_mut() = Body::from(format!( *res.body_mut() = body_full(format!(
r#"<?xml version="1.0" encoding="utf-8" ?> r#"<?xml version="1.0" encoding="utf-8" ?>
<D:multistatus xmlns:D="DAV:"> <D:multistatus xmlns:D="DAV:">
{content} {content}
@@ -1323,6 +1503,7 @@ async fn zip_dir<W: AsyncWrite + Unpin>(
dir: &Path, dir: &Path,
access_paths: AccessPaths, access_paths: AccessPaths,
hidden: &[String], hidden: &[String],
compression: Compression,
running: Arc<AtomicBool>, running: Arc<AtomicBool>,
) -> Result<()> { ) -> Result<()> {
let mut writer = ZipFileWriter::with_tokio(writer); let mut writer = ZipFileWriter::with_tokio(writer);
@@ -1331,10 +1512,11 @@ async fn zip_dir<W: AsyncWrite + Unpin>(
let dir_clone = dir.to_path_buf(); let dir_clone = dir.to_path_buf();
let zip_paths = tokio::task::spawn_blocking(move || { let zip_paths = tokio::task::spawn_blocking(move || {
let mut paths: Vec<PathBuf> = vec![]; let mut paths: Vec<PathBuf> = vec![];
for dir in access_paths.leaf_paths(&dir_clone) { for dir in access_paths.child_paths(&dir_clone) {
let mut it = WalkDir::new(&dir).into_iter(); let mut it = WalkDir::new(&dir).into_iter();
it.next();
while let Some(Ok(entry)) = it.next() { while let Some(Ok(entry)) = it.next() {
if !running.load(Ordering::SeqCst) { if !running.load(atomic::Ordering::SeqCst) {
break; break;
} }
let entry_path = entry.path(); let entry_path = entry.path();
@@ -1375,7 +1557,7 @@ async fn zip_dir<W: AsyncWrite + Unpin>(
None => continue, None => continue,
}; };
let (datetime, mode) = get_file_mtime_and_mode(&zip_path).await?; let (datetime, mode) = get_file_mtime_and_mode(&zip_path).await?;
let builder = ZipEntryBuilder::new(filename.into(), Compression::Deflate) let builder = ZipEntryBuilder::new(filename.into(), compression)
.unix_permissions(mode) .unix_permissions(mode)
.last_modification_date(ZipDateTime::from_chrono(&datetime)); .last_modification_date(ZipDateTime::from_chrono(&datetime));
let mut file = File::open(&zip_path).await?; let mut file = File::open(&zip_path).await?;
@@ -1396,59 +1578,46 @@ fn extract_cache_headers(meta: &Metadata) -> Option<(ETag, LastModified)> {
Some((etag, last_modified)) Some((etag, last_modified))
} }
#[derive(Debug)]
struct RangeValue {
start: u64,
end: Option<u64>,
}
fn parse_range(headers: &HeaderMap<HeaderValue>) -> Option<RangeValue> {
let range_hdr = headers.get(RANGE)?;
let hdr = range_hdr.to_str().ok()?;
let mut sp = hdr.splitn(2, '=');
let units = sp.next()?;
if units == "bytes" {
let range = sp.next()?;
let mut sp_range = range.splitn(2, '-');
let start: u64 = sp_range.next()?.parse().ok()?;
let end: Option<u64> = if let Some(end) = sp_range.next() {
if end.is_empty() {
None
} else {
Some(end.parse().ok()?)
}
} else {
None
};
Some(RangeValue { start, end })
} else {
None
}
}
fn status_forbid(res: &mut Response) { fn status_forbid(res: &mut Response) {
*res.status_mut() = StatusCode::FORBIDDEN; *res.status_mut() = StatusCode::FORBIDDEN;
*res.body_mut() = Body::from("Forbidden"); *res.body_mut() = body_full("Forbidden");
} }
fn status_not_found(res: &mut Response) { fn status_not_found(res: &mut Response) {
*res.status_mut() = StatusCode::NOT_FOUND; *res.status_mut() = StatusCode::NOT_FOUND;
*res.body_mut() = Body::from("Not Found"); *res.body_mut() = body_full("Not Found");
} }
fn status_no_content(res: &mut Response) { fn status_no_content(res: &mut Response) {
*res.status_mut() = StatusCode::NO_CONTENT; *res.status_mut() = StatusCode::NO_CONTENT;
} }
fn status_bad_request(res: &mut Response, body: &str) {
*res.status_mut() = StatusCode::BAD_REQUEST;
if !body.is_empty() {
*res.body_mut() = body_full(body.to_string());
}
}
fn set_content_diposition(res: &mut Response, inline: bool, filename: &str) -> Result<()> { fn set_content_diposition(res: &mut Response, inline: bool, filename: &str) -> Result<()> {
let kind = if inline { "inline" } else { "attachment" }; let kind = if inline { "inline" } else { "attachment" };
let filename: String = filename
.chars()
.map(|ch| {
if ch.is_ascii_control() && ch != '\t' {
' '
} else {
ch
}
})
.collect();
let value = if filename.is_ascii() { let value = if filename.is_ascii() {
HeaderValue::from_str(&format!("{kind}; filename=\"{}\"", filename,))? HeaderValue::from_str(&format!("{kind}; filename=\"{}\"", filename,))?
} else { } else {
HeaderValue::from_str(&format!( HeaderValue::from_str(&format!(
"{kind}; filename=\"{}\"; filename*=UTF-8''{}", "{kind}; filename=\"{}\"; filename*=UTF-8''{}",
filename, filename,
encode_uri(filename), encode_uri(&filename),
))? ))?
}; };
res.headers_mut().insert(CONTENT_DISPOSITION, value); res.headers_mut().insert(CONTENT_DISPOSITION, value);
@@ -1469,10 +1638,12 @@ fn is_hidden(hidden: &[String], file_name: &str, is_dir_type: bool) -> bool {
fn set_webdav_headers(res: &mut Response) { fn set_webdav_headers(res: &mut Response) {
res.headers_mut().insert( res.headers_mut().insert(
"Allow", "Allow",
HeaderValue::from_static("GET,HEAD,PUT,OPTIONS,DELETE,PROPFIND,COPY,MOVE"), HeaderValue::from_static("GET,HEAD,PUT,OPTIONS,DELETE,PATCH,PROPFIND,COPY,MOVE"),
);
res.headers_mut().insert(
"DAV",
HeaderValue::from_static("1, 2, 3, sabredav-partialupdate"),
); );
res.headers_mut()
.insert("DAV", HeaderValue::from_static("1,2"));
} }
async fn get_content_type(path: &Path) -> Result<String> { async fn get_content_type(path: &Path) -> Result<String> {
@@ -1505,3 +1676,17 @@ async fn get_content_type(path: &Path) -> Result<String> {
}; };
Ok(content_type) Ok(content_type)
} }
fn parse_upload_offset(headers: &HeaderMap<HeaderValue>, size: u64) -> Result<Option<u64>> {
let value = match headers.get("x-update-range") {
Some(v) => v,
None => return Ok(None),
};
let err = || anyhow!("Invalid X-Updage-Range header");
let value = value.to_str().map_err(|_| err())?;
if value == "append" {
return Ok(Some(size));
}
let (start, _) = parse_range(value, size).ok_or_else(err)?;
Ok(Some(start))
}

View File

@@ -1,68 +0,0 @@
use async_stream::stream;
use futures::{Stream, StreamExt};
use std::io::Error;
use std::pin::Pin;
use tokio::io::{AsyncRead, AsyncReadExt};
pub struct Streamer<R>
where
R: AsyncRead + Unpin + Send + 'static,
{
reader: R,
buf_size: usize,
}
impl<R> Streamer<R>
where
R: AsyncRead + Unpin + Send + 'static,
{
#[inline]
pub fn new(reader: R, buf_size: usize) -> Self {
Self { reader, buf_size }
}
pub fn into_stream(
mut self,
) -> Pin<Box<impl ?Sized + Stream<Item = Result<Vec<u8>, Error>> + 'static>> {
let stream = stream! {
loop {
let mut buf = vec![0; self.buf_size];
let r = self.reader.read(&mut buf).await?;
if r == 0 {
break
}
buf.truncate(r);
yield Ok(buf);
}
};
stream.boxed()
}
// allow truncation as truncated remaining is always less than buf_size: usize
pub fn into_stream_sized(
mut self,
max_length: u64,
) -> Pin<Box<impl ?Sized + Stream<Item = Result<Vec<u8>, Error>> + 'static>> {
let stream = stream! {
let mut remaining = max_length;
loop {
if remaining == 0 {
break;
}
let bs = if remaining >= self.buf_size as u64 {
self.buf_size
} else {
remaining as usize
};
let mut buf = vec![0; bs];
let r = self.reader.read(&mut buf).await?;
if r == 0 {
break;
} else {
buf.truncate(r);
yield Ok(buf);
}
remaining -= r as u64;
}
};
stream.boxed()
}
}

View File

@@ -1,161 +0,0 @@
use anyhow::{anyhow, bail, Context as AnyhowContext, Result};
use core::task::{Context, Poll};
use futures::ready;
use hyper::server::accept::Accept;
use hyper::server::conn::{AddrIncoming, AddrStream};
use rustls::{Certificate, PrivateKey};
use std::future::Future;
use std::net::SocketAddr;
use std::path::Path;
use std::pin::Pin;
use std::sync::Arc;
use std::{fs, io};
use tokio::io::{AsyncRead, AsyncWrite, ReadBuf};
use tokio_rustls::rustls::ServerConfig;
enum State {
Handshaking(tokio_rustls::Accept<AddrStream>),
Streaming(tokio_rustls::server::TlsStream<AddrStream>),
}
// tokio_rustls::server::TlsStream doesn't expose constructor methods,
// so we have to TlsAcceptor::accept and handshake to have access to it
// TlsStream implements AsyncRead/AsyncWrite handshaking tokio_rustls::Accept first
pub struct TlsStream {
state: State,
remote_addr: SocketAddr,
}
impl TlsStream {
fn new(stream: AddrStream, config: Arc<ServerConfig>) -> TlsStream {
let remote_addr = stream.remote_addr();
let accept = tokio_rustls::TlsAcceptor::from(config).accept(stream);
TlsStream {
state: State::Handshaking(accept),
remote_addr,
}
}
pub fn remote_addr(&self) -> SocketAddr {
self.remote_addr
}
}
impl AsyncRead for TlsStream {
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context,
buf: &mut ReadBuf,
) -> Poll<io::Result<()>> {
let pin = self.get_mut();
match pin.state {
State::Handshaking(ref mut accept) => match ready!(Pin::new(accept).poll(cx)) {
Ok(mut stream) => {
let result = Pin::new(&mut stream).poll_read(cx, buf);
pin.state = State::Streaming(stream);
result
}
Err(err) => Poll::Ready(Err(err)),
},
State::Streaming(ref mut stream) => Pin::new(stream).poll_read(cx, buf),
}
}
}
impl AsyncWrite for TlsStream {
fn poll_write(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<io::Result<usize>> {
let pin = self.get_mut();
match pin.state {
State::Handshaking(ref mut accept) => match ready!(Pin::new(accept).poll(cx)) {
Ok(mut stream) => {
let result = Pin::new(&mut stream).poll_write(cx, buf);
pin.state = State::Streaming(stream);
result
}
Err(err) => Poll::Ready(Err(err)),
},
State::Streaming(ref mut stream) => Pin::new(stream).poll_write(cx, buf),
}
}
fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
match self.state {
State::Handshaking(_) => Poll::Ready(Ok(())),
State::Streaming(ref mut stream) => Pin::new(stream).poll_flush(cx),
}
}
fn poll_shutdown(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
match self.state {
State::Handshaking(_) => Poll::Ready(Ok(())),
State::Streaming(ref mut stream) => Pin::new(stream).poll_shutdown(cx),
}
}
}
pub struct TlsAcceptor {
config: Arc<ServerConfig>,
incoming: AddrIncoming,
}
impl TlsAcceptor {
pub fn new(config: Arc<ServerConfig>, incoming: AddrIncoming) -> TlsAcceptor {
TlsAcceptor { config, incoming }
}
}
impl Accept for TlsAcceptor {
type Conn = TlsStream;
type Error = io::Error;
fn poll_accept(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Result<Self::Conn, Self::Error>>> {
let pin = self.get_mut();
match ready!(Pin::new(&mut pin.incoming).poll_accept(cx)) {
Some(Ok(sock)) => Poll::Ready(Some(Ok(TlsStream::new(sock, pin.config.clone())))),
Some(Err(e)) => Poll::Ready(Some(Err(e))),
None => Poll::Ready(None),
}
}
}
// Load public certificate from file.
pub fn load_certs<T: AsRef<Path>>(filename: T) -> Result<Vec<Certificate>> {
// Open certificate file.
let cert_file = fs::File::open(filename.as_ref())
.with_context(|| format!("Failed to access `{}`", filename.as_ref().display()))?;
let mut reader = io::BufReader::new(cert_file);
// Load and return certificate.
let certs = rustls_pemfile::certs(&mut reader).with_context(|| "Failed to load certificate")?;
if certs.is_empty() {
bail!("No supported certificate in file");
}
Ok(certs.into_iter().map(Certificate).collect())
}
// Load private key from file.
pub fn load_private_key<T: AsRef<Path>>(filename: T) -> Result<PrivateKey> {
let key_file = fs::File::open(filename.as_ref())
.with_context(|| format!("Failed to access `{}`", filename.as_ref().display()))?;
let mut reader = io::BufReader::new(key_file);
// Load and return a single private key.
let keys = rustls_pemfile::read_all(&mut reader)
.with_context(|| "There was a problem with reading private key")?
.into_iter()
.find_map(|item| match item {
rustls_pemfile::Item::RSAKey(key)
| rustls_pemfile::Item::PKCS8Key(key)
| rustls_pemfile::Item::ECKey(key) => Some(key),
_ => None,
})
.ok_or_else(|| anyhow!("No supported private key in file"))?;
Ok(PrivateKey(keys))
}

View File

@@ -1,31 +0,0 @@
use hyper::server::accept::Accept;
use tokio::net::UnixListener;
use std::pin::Pin;
use std::task::{Context, Poll};
pub struct UnixAcceptor {
inner: UnixListener,
}
impl UnixAcceptor {
pub fn from_listener(listener: UnixListener) -> Self {
Self { inner: listener }
}
}
impl Accept for UnixAcceptor {
type Conn = tokio::net::UnixStream;
type Error = std::io::Error;
fn poll_accept(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Result<Self::Conn, Self::Error>>> {
match self.inner.poll_accept(cx) {
Poll::Pending => Poll::Pending,
Poll::Ready(Ok((socket, _addr))) => Poll::Ready(Some(Ok(socket))),
Poll::Ready(Err(err)) => Poll::Ready(Some(Err(err))),
}
}
}

View File

@@ -1,5 +1,7 @@
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
#[cfg(feature = "tls")]
use rustls_pki_types::{CertificateDer, PrivateKeyDer};
use std::{ use std::{
borrow::Cow, borrow::Cow,
path::Path, path::Path,
@@ -58,26 +60,115 @@ pub fn glob(pattern: &str, target: &str) -> bool {
pat.matches(target) pat.matches(target)
} }
#[test] // Load public certificate from file.
fn test_glob_key() { #[cfg(feature = "tls")]
assert!(glob("", "")); pub fn load_certs<T: AsRef<Path>>(filename: T) -> Result<Vec<CertificateDer<'static>>> {
assert!(glob(".*", ".git")); // Open certificate file.
assert!(glob("abc", "abc")); let cert_file = std::fs::File::open(filename.as_ref())
assert!(glob("a*c", "abc")); .with_context(|| format!("Failed to access `{}`", filename.as_ref().display()))?;
assert!(glob("a?c", "abc")); let mut reader = std::io::BufReader::new(cert_file);
assert!(glob("a*c", "abbc"));
assert!(glob("*c", "abc")); // Load and return certificate.
assert!(glob("a*", "abc")); let mut certs = vec![];
assert!(glob("?c", "bc")); for cert in rustls_pemfile::certs(&mut reader) {
assert!(glob("a?", "ab")); let cert = cert.with_context(|| "Failed to load certificate")?;
assert!(!glob("abc", "adc")); certs.push(cert)
assert!(!glob("abc", "abcd")); }
assert!(!glob("a?c", "abbc")); if certs.is_empty() {
assert!(!glob("*.log", "log")); anyhow::bail!("No supported certificate in file");
assert!(glob("*.abc-cba", "xyz.abc-cba")); }
assert!(glob("*.abc-cba", "123.xyz.abc-cba")); Ok(certs)
assert!(glob("*.log", ".log")); }
assert!(glob("*.log", "a.log"));
assert!(glob("*/", "abc/")); // Load private key from file.
assert!(!glob("*/", "abc")); #[cfg(feature = "tls")]
pub fn load_private_key<T: AsRef<Path>>(filename: T) -> Result<PrivateKeyDer<'static>> {
let key_file = std::fs::File::open(filename.as_ref())
.with_context(|| format!("Failed to access `{}`", filename.as_ref().display()))?;
let mut reader = std::io::BufReader::new(key_file);
// Load and return a single private key.
for key in rustls_pemfile::read_all(&mut reader) {
let key = key.with_context(|| "There was a problem with reading private key")?;
match key {
rustls_pemfile::Item::Pkcs1Key(key) => return Ok(PrivateKeyDer::Pkcs1(key)),
rustls_pemfile::Item::Pkcs8Key(key) => return Ok(PrivateKeyDer::Pkcs8(key)),
rustls_pemfile::Item::Sec1Key(key) => return Ok(PrivateKeyDer::Sec1(key)),
_ => {}
}
}
anyhow::bail!("No supported private key in file");
}
pub fn parse_range(range: &str, size: u64) -> Option<(u64, u64)> {
let (unit, range) = range.split_once('=')?;
if unit != "bytes" || range.contains(',') {
return None;
}
let (start, end) = range.split_once('-')?;
if start.is_empty() {
let offset = end.parse::<u64>().ok()?;
if offset <= size {
Some((size - offset, size - 1))
} else {
None
}
} else {
let start = start.parse::<u64>().ok()?;
if start < size {
if end.is_empty() {
Some((start, size - 1))
} else {
let end = end.parse::<u64>().ok()?;
if end < size {
Some((start, end))
} else {
None
}
}
} else {
None
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_glob_key() {
assert!(glob("", ""));
assert!(glob(".*", ".git"));
assert!(glob("abc", "abc"));
assert!(glob("a*c", "abc"));
assert!(glob("a?c", "abc"));
assert!(glob("a*c", "abbc"));
assert!(glob("*c", "abc"));
assert!(glob("a*", "abc"));
assert!(glob("?c", "bc"));
assert!(glob("a?", "ab"));
assert!(!glob("abc", "adc"));
assert!(!glob("abc", "abcd"));
assert!(!glob("a?c", "abbc"));
assert!(!glob("*.log", "log"));
assert!(glob("*.abc-cba", "xyz.abc-cba"));
assert!(glob("*.abc-cba", "123.xyz.abc-cba"));
assert!(glob("*.log", ".log"));
assert!(glob("*.log", "a.log"));
assert!(glob("*/", "abc/"));
assert!(!glob("*/", "abc"));
}
#[test]
fn test_parse_range() {
assert_eq!(parse_range("bytes=0-499", 500), Some((0, 499)));
assert_eq!(parse_range("bytes=0-", 500), Some((0, 499)));
assert_eq!(parse_range("bytes=299-", 500), Some((299, 499)));
assert_eq!(parse_range("bytes=-500", 500), Some((0, 499)));
assert_eq!(parse_range("bytes=-300", 500), Some((200, 499)));
assert_eq!(parse_range("bytes=500-", 500), None);
assert_eq!(parse_range("bytes=-501", 500), None);
assert_eq!(parse_range("bytes=0-500", 500), None);
}
} }

View File

@@ -32,7 +32,7 @@ fn asset_js(server: TestServer) -> Result<(), Error> {
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
assert_eq!( assert_eq!(
resp.headers().get("content-type").unwrap(), resp.headers().get("content-type").unwrap(),
"application/javascript" "application/javascript; charset=UTF-8"
); );
Ok(()) Ok(())
} }
@@ -46,7 +46,10 @@ fn asset_css(server: TestServer) -> Result<(), Error> {
); );
let resp = reqwest::blocking::get(url)?; let resp = reqwest::blocking::get(url)?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
assert_eq!(resp.headers().get("content-type").unwrap(), "text/css"); assert_eq!(
resp.headers().get("content-type").unwrap(),
"text/css; charset=UTF-8"
);
Ok(()) Ok(())
} }
@@ -90,7 +93,7 @@ fn asset_js_with_prefix(
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
assert_eq!( assert_eq!(
resp.headers().get("content-type").unwrap(), resp.headers().get("content-type").unwrap(),
"application/javascript" "application/javascript; charset=UTF-8"
); );
Ok(()) Ok(())
} }

View File

@@ -10,7 +10,15 @@ use rstest::rstest;
fn no_auth(#[with(&["--auth", "user:pass@/:rw", "-A"])] server: TestServer) -> Result<(), Error> { fn no_auth(#[with(&["--auth", "user:pass@/:rw", "-A"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
assert!(resp.headers().contains_key("www-authenticate")); let values: Vec<&str> = resp
.headers()
.get_all("www-authenticate")
.iter()
.map(|v| v.to_str().unwrap())
.collect();
assert!(values[0].starts_with("Digest"));
assert!(values[1].starts_with("Basic"));
let url = format!("{}file1", server.url()); let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?; let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
@@ -18,20 +26,48 @@ fn no_auth(#[with(&["--auth", "user:pass@/:rw", "-A"])] server: TestServer) -> R
} }
#[rstest] #[rstest]
fn auth(#[with(&["--auth", "user:pass@/:rw", "-A"])] server: TestServer) -> Result<(), Error> { #[case(server(&["--auth", "user:pass@/:rw", "-A"]), "user", "pass")]
#[case(server(&["--auth", "user:pa:ss@1@/:rw", "-A"]), "user", "pa:ss@1")]
fn auth(#[case] server: TestServer, #[case] user: &str, #[case] pass: &str) -> Result<(), Error> {
let url = format!("{}file1", server.url()); let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?; let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
let resp = fetch!(b"PUT", &url) let resp = fetch!(b"PUT", &url)
.body(b"abc".to_vec()) .body(b"abc".to_vec())
.send_with_digest_auth("user", "pass")?; .send_with_digest_auth(user, pass)?;
assert_eq!(resp.status(), 201);
Ok(())
}
const HASHED_PASSWORD_AUTH: &str = "user:$6$gQxZwKyWn/ZmWEA2$4uV7KKMnSUnET2BtWTj/9T5.Jq3h/MdkOlnIl5hdlTxDZ4MZKmJ.kl6C.NL9xnNPqC4lVHC1vuI0E5cLpTJX81@/:rw"; // user:pass
#[rstest]
fn auth_hashed_password(
#[with(&["--auth", HASHED_PASSWORD_AUTH, "-A"])] server: TestServer,
) -> Result<(), Error> {
let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 401);
if let Err(err) = fetch!(b"PUT", &url)
.body(b"abc".to_vec())
.send_with_digest_auth("user", "pass")
{
assert_eq!(
format!("{err:?}"),
r#"DigestAuth(MissingRequired("realm", "Basic realm=\"DUFS\""))"#
);
}
let resp = fetch!(b"PUT", &url)
.body(b"abc".to_vec())
.basic_auth("user", Some("pass"))
.send()?;
assert_eq!(resp.status(), 201); assert_eq!(resp.status(), 201);
Ok(()) Ok(())
} }
#[rstest] #[rstest]
fn auth_and_public( fn auth_and_public(
#[with(&["--auth", "user:pass@/:rw|@/", "-A"])] server: TestServer, #[with(&["-a", "user:pass@/:rw", "-a", "@/", "-A"])] server: TestServer,
) -> Result<(), Error> { ) -> Result<(), Error> {
let url = format!("{}file1", server.url()); let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?; let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
@@ -65,6 +101,20 @@ fn auth_skip_on_options_method(
#[rstest] #[rstest]
fn auth_check( fn auth_check(
#[with(&["--auth", "user:pass@/:rw", "--auth", "user2:pass2@/", "-A"])] server: TestServer,
) -> Result<(), Error> {
let url = format!("{}index.html", server.url());
let resp = fetch!(b"WRITEABLE", &url).send()?;
assert_eq!(resp.status(), 401);
let resp = fetch!(b"WRITEABLE", &url).send_with_digest_auth("user2", "pass2")?;
assert_eq!(resp.status(), 403);
let resp = fetch!(b"WRITEABLE", &url).send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 200);
Ok(())
}
#[rstest]
fn auth_compact_rules(
#[with(&["--auth", "user:pass@/:rw|user2:pass2@/", "-A"])] server: TestServer, #[with(&["--auth", "user:pass@/:rw|user2:pass2@/", "-A"])] server: TestServer,
) -> Result<(), Error> { ) -> Result<(), Error> {
let url = format!("{}index.html", server.url()); let url = format!("{}index.html", server.url());
@@ -79,7 +129,7 @@ fn auth_check(
#[rstest] #[rstest]
fn auth_readonly( fn auth_readonly(
#[with(&["--auth", "user:pass@/:rw|user2:pass2@/", "-A"])] server: TestServer, #[with(&["--auth", "user:pass@/:rw", "--auth", "user2:pass2@/", "-A"])] server: TestServer,
) -> Result<(), Error> { ) -> Result<(), Error> {
let url = format!("{}index.html", server.url()); let url = format!("{}index.html", server.url());
let resp = fetch!(b"GET", &url).send()?; let resp = fetch!(b"GET", &url).send()?;
@@ -94,9 +144,26 @@ fn auth_readonly(
Ok(()) Ok(())
} }
#[rstest]
fn auth_forbidden(
#[with(&["--auth", "user:pass@/:rw,/dir1:-", "-A"])] server: TestServer,
) -> Result<(), Error> {
let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url)
.body(b"abc".to_vec())
.send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 201);
let url = format!("{}dir1/file1", server.url());
let resp = fetch!(b"PUT", &url)
.body(b"abc".to_vec())
.send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 403);
Ok(())
}
#[rstest] #[rstest]
fn auth_nest( fn auth_nest(
#[with(&["--auth", "user:pass@/:rw|user2:pass2@/", "--auth", "user3:pass3@/dir1:rw", "-A"])] #[with(&["--auth", "user:pass@/:rw", "--auth", "user2:pass2@/", "--auth", "user3:pass3@/dir1:rw", "-A"])]
server: TestServer, server: TestServer,
) -> Result<(), Error> { ) -> Result<(), Error> {
let url = format!("{}dir1/file1", server.url()); let url = format!("{}dir1/file1", server.url());
@@ -125,8 +192,8 @@ fn auth_nest_share(
} }
#[rstest] #[rstest]
#[case(server(&["--auth", "user:pass@/:rw", "--auth-method", "basic", "-A"]), "user", "pass")] #[case(server(&["--auth", "user:pass@/:rw", "-A"]), "user", "pass")]
#[case(server(&["--auth", "u1:p1@/:rw", "--auth-method", "basic", "-A"]), "u1", "p1")] #[case(server(&["--auth", "u1:p1@/:rw", "-A"]), "u1", "p1")]
fn auth_basic( fn auth_basic(
#[case] server: TestServer, #[case] server: TestServer,
#[case] user: &str, #[case] user: &str,
@@ -204,7 +271,7 @@ fn auth_partial_index(
#[rstest] #[rstest]
fn no_auth_propfind_dir( fn no_auth_propfind_dir(
#[with(&["--auth", "user:pass@/:rw", "--auth", "@/dir-assets", "-A"])] server: TestServer, #[with(&["--auth", "admin:admin@/:rw", "--auth", "@/dir-assets", "-A"])] server: TestServer,
) -> Result<(), Error> { ) -> Result<(), Error> {
let resp = fetch!(b"PROPFIND", server.url()).send()?; let resp = fetch!(b"PROPFIND", server.url()).send()?;
assert_eq!(resp.status(), 207); assert_eq!(resp.status(), 207);
@@ -213,3 +280,54 @@ fn no_auth_propfind_dir(
assert!(body.contains("<D:href>/dir1/</D:href>")); assert!(body.contains("<D:href>/dir1/</D:href>"));
Ok(()) Ok(())
} }
#[rstest]
fn auth_propfind_dir(
#[with(&["--auth", "admin:admin@/:rw", "--auth", "user:pass@/dir-assets", "-A"])]
server: TestServer,
) -> Result<(), Error> {
let resp = fetch!(b"PROPFIND", server.url()).send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 207);
let body = resp.text()?;
assert!(body.contains("<D:href>/dir-assets/</D:href>"));
assert!(!body.contains("<D:href>/dir1/</D:href>"));
Ok(())
}
#[rstest]
fn auth_data(
#[with(&["-a", "user:pass@/:rw", "-a", "@/", "-A"])] server: TestServer,
) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?;
let content = resp.text()?;
let json = utils::retrive_json(&content).unwrap();
assert_eq!(json["allow_delete"], serde_json::Value::Bool(false));
assert_eq!(json["allow_upload"], serde_json::Value::Bool(false));
let resp = fetch!(b"GET", server.url())
.basic_auth("user", Some("pass"))
.send()?;
let content = resp.text()?;
let json = utils::retrive_json(&content).unwrap();
assert_eq!(json["allow_delete"], serde_json::Value::Bool(true));
assert_eq!(json["allow_upload"], serde_json::Value::Bool(true));
Ok(())
}
#[rstest]
fn auth_precedence(
#[with(&["--auth", "user:pass@/dir1:rw,/dir1/test.txt", "-A"])] server: TestServer,
) -> Result<(), Error> {
let url = format!("{}dir1/test.txt", server.url());
let resp = fetch!(b"PUT", &url)
.body(b"abc".to_vec())
.send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 403);
let url = format!("{}dir1/file1", server.url());
let resp = fetch!(b"PUT", &url)
.body(b"abc".to_vec())
.send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 201);
Ok(())
}

View File

@@ -76,9 +76,7 @@ fn validate_printed_urls(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> R
.collect::<Vec<_>>(); .collect::<Vec<_>>();
assert!(!urls.is_empty()); assert!(!urls.is_empty());
for url in urls { reqwest::blocking::get(urls[0])?.error_for_status()?;
reqwest::blocking::get(url)?.error_for_status()?;
}
child.kill()?; child.kill()?;

56
tests/config.rs Normal file
View File

@@ -0,0 +1,56 @@
mod fixtures;
mod utils;
use assert_cmd::prelude::*;
use assert_fs::TempDir;
use diqwest::blocking::WithDigestAuth;
use fixtures::{port, tmpdir, wait_for_port, Error};
use rstest::rstest;
use std::path::PathBuf;
use std::process::{Command, Stdio};
#[rstest]
fn use_config_file(tmpdir: TempDir, port: u16) -> Result<(), Error> {
let config_path = get_config_path().display().to_string();
let mut child = Command::cargo_bin("dufs")?
.arg(tmpdir.path())
.arg("-p")
.arg(port.to_string())
.args(["--config", &config_path])
.stdout(Stdio::piped())
.spawn()?;
wait_for_port(port);
let url = format!("http://localhost:{port}/dufs/index.html");
let resp = fetch!(b"GET", &url).send()?;
assert_eq!(resp.status(), 401);
let url = format!("http://localhost:{port}/dufs/index.html");
let resp = fetch!(b"GET", &url).send_with_digest_auth("user", "pass")?;
assert_eq!(resp.text()?, "This is index.html");
let url = format!("http://localhost:{port}/dufs?simple");
let resp = fetch!(b"GET", &url).send_with_digest_auth("user", "pass")?;
let text: String = resp.text().unwrap();
assert!(text.split('\n').any(|c| c == "dir1/"));
assert!(!text.split('\n').any(|c| c == "dir3/"));
assert!(!text.split('\n').any(|c| c == "test.txt"));
let url = format!("http://localhost:{port}/dufs/dir1/upload.txt");
let resp = fetch!(b"PUT", &url)
.body("Hello")
.send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 201);
child.kill()?;
Ok(())
}
fn get_config_path() -> PathBuf {
let mut path = std::env::current_dir().expect("Failed to get current directory");
path.push("tests");
path.push("data");
path.push("config.yaml");
path
}

9
tests/data/config.yaml Normal file
View File

@@ -0,0 +1,9 @@
bind:
- 0.0.0.0
path-prefix: dufs
hidden:
- dir3
- test.txt
auth:
- user:pass@/:rw
allow-upload: true

View File

@@ -16,7 +16,14 @@ pub const BIN_FILE: &str = "😀.bin";
/// File names for testing purpose /// File names for testing purpose
#[allow(dead_code)] #[allow(dead_code)]
pub static FILES: &[&str] = &["test.txt", "test.html", "index.html", BIN_FILE]; pub static FILES: &[&str] = &[
"test.txt",
"test.html",
"index.html",
#[cfg(not(target_os = "windows"))]
"file\n1.txt",
BIN_FILE,
];
/// Directory names for testing directory don't exist /// Directory names for testing directory don't exist
#[allow(dead_code)] #[allow(dead_code)]
@@ -46,7 +53,7 @@ pub fn tmpdir() -> TempDir {
let tmpdir = assert_fs::TempDir::new().expect("Couldn't create a temp dir for tests"); let tmpdir = assert_fs::TempDir::new().expect("Couldn't create a temp dir for tests");
for file in FILES { for file in FILES {
if *file == BIN_FILE { if *file == BIN_FILE {
tmpdir.child(file).write_binary(b"bin\0\0123").unwrap(); tmpdir.child(file).write_binary(b"bin\0\x00123").unwrap();
} else { } else {
tmpdir tmpdir
.child(file) .child(file)
@@ -58,7 +65,7 @@ pub fn tmpdir() -> TempDir {
if *directory == DIR_ASSETS { if *directory == DIR_ASSETS {
tmpdir tmpdir
.child(format!("{}{}", directory, "index.html")) .child(format!("{}{}", directory, "index.html"))
.write_str("__ASSERTS_PREFIX__index.js;DATA = __INDEX_DATA__") .write_str("__ASSETS_PREFIX__index.js;DATA = __INDEX_DATA__")
.unwrap(); .unwrap();
} else { } else {
for file in FILES { for file in FILES {
@@ -68,7 +75,7 @@ pub fn tmpdir() -> TempDir {
if *file == BIN_FILE { if *file == BIN_FILE {
tmpdir tmpdir
.child(format!("{directory}{file}")) .child(format!("{directory}{file}"))
.write_binary(b"bin\0\0123") .write_binary(b"bin\0\x00123")
.unwrap(); .unwrap();
} else { } else {
tmpdir tmpdir

View File

@@ -40,7 +40,12 @@ fn head_dir_404(server: TestServer) -> Result<(), Error> {
} }
#[rstest] #[rstest]
fn get_dir_zip(#[with(&["-A"])] server: TestServer) -> Result<(), Error> { #[case(server(&["--allow-archive"] as &[&str]))]
#[case(server(&["--allow-archive", "--compress", "none"]))]
#[case(server(&["--allow-archive", "--compress", "low"]))]
#[case(server(&["--allow-archive", "--compress", "medium"]))]
#[case(server(&["--allow-archive", "--compress", "high"]))]
fn get_dir_zip(#[case] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?zip", server.url()))?; let resp = reqwest::blocking::get(format!("{}?zip", server.url()))?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
assert_eq!( assert_eq!(
@@ -123,6 +128,15 @@ fn get_dir_search3(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
Ok(()) Ok(())
} }
#[rstest]
fn get_dir_search4(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}dir1?q=dir1&simple", server.url()))?;
assert_eq!(resp.status(), 200);
let text = resp.text().unwrap();
assert!(text.is_empty());
Ok(())
}
#[rstest] #[rstest]
fn head_dir_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> { fn head_dir_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"HEAD", format!("{}?q={}", server.url(), "test.html")).send()?; let resp = fetch!(b"HEAD", format!("{}?q={}", server.url(), "test.html")).send()?;
@@ -138,9 +152,7 @@ fn head_dir_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
#[rstest] #[rstest]
fn empty_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> { fn empty_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?q=", server.url()))?; let resp = reqwest::blocking::get(format!("{}?q=", server.url()))?;
assert_eq!(resp.status(), 200); assert_resp_paths!(resp);
let paths = utils::retrieve_index_paths(&resp.text()?);
assert!(paths.is_empty());
Ok(()) Ok(())
} }
@@ -195,6 +207,18 @@ fn get_file_emoji_path(server: TestServer) -> Result<(), Error> {
Ok(()) Ok(())
} }
#[cfg(not(target_os = "windows"))]
#[rstest]
fn get_file_newline_path(server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}file%0A1.txt", server.url()))?;
assert_eq!(resp.status(), 200);
assert_eq!(
resp.headers().get("content-disposition").unwrap(),
"inline; filename=\"file 1.txt\""
);
Ok(())
}
#[rstest] #[rstest]
fn get_file_edit(server: TestServer) -> Result<(), Error> { fn get_file_edit(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"GET", format!("{}index.html?edit", server.url())).send()?; let resp = fetch!(b"GET", format!("{}index.html?edit", server.url())).send()?;
@@ -226,9 +250,12 @@ fn options_dir(server: TestServer) -> Result<(), Error> {
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
assert_eq!( assert_eq!(
resp.headers().get("allow").unwrap(), resp.headers().get("allow").unwrap(),
"GET,HEAD,PUT,OPTIONS,DELETE,PROPFIND,COPY,MOVE" "GET,HEAD,PUT,OPTIONS,DELETE,PATCH,PROPFIND,COPY,MOVE"
);
assert_eq!(
resp.headers().get("dav").unwrap(),
"1, 2, 3, sabredav-partialupdate"
); );
assert_eq!(resp.headers().get("dav").unwrap(), "1,2");
Ok(()) Ok(())
} }
@@ -306,3 +333,19 @@ fn get_file_content_type(server: TestServer) -> Result<(), Error> {
); );
Ok(()) Ok(())
} }
#[rstest]
fn resumable_upload(#[with(&["--allow-upload"])] server: TestServer) -> Result<(), Error> {
let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 201);
let resp = fetch!(b"PATCH", &url)
.header("X-Update-Range", "append")
.body(b"123".to_vec())
.send()?;
assert_eq!(resp.status(), 204);
let resp = reqwest::blocking::get(url)?;
assert_eq!(resp.status(), 200);
assert_eq!(resp.text().unwrap(), "abc123");
Ok(())
}

View File

@@ -12,7 +12,7 @@ use std::process::{Command, Stdio};
#[rstest] #[rstest]
#[case(&["-a", "user:pass@/:rw", "--log-format", "$remote_user"], false)] #[case(&["-a", "user:pass@/:rw", "--log-format", "$remote_user"], false)]
#[case(&["-a", "user:pass@/:rw", "--log-format", "$remote_user", "--auth-method", "basic"], true)] #[case(&["-a", "user:pass@/:rw", "--log-format", "$remote_user"], true)]
fn log_remote_user( fn log_remote_user(
tmpdir: TempDir, tmpdir: TempDir,
port: u16, port: u16,
@@ -41,7 +41,7 @@ fn log_remote_user(
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let mut buf = [0; 1000]; let mut buf = [0; 2048];
let buf_len = stdout.read(&mut buf)?; let buf_len = stdout.read(&mut buf)?;
let output = std::str::from_utf8(&buf[0..buf_len])?; let output = std::str::from_utf8(&buf[0..buf_len])?;
@@ -69,10 +69,12 @@ fn no_log(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error
let resp = fetch!(b"GET", &format!("http://localhost:{port}")).send()?; let resp = fetch!(b"GET", &format!("http://localhost:{port}")).send()?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let mut buf = [0; 1000]; let mut buf = [0; 2048];
let buf_len = stdout.read(&mut buf)?; let buf_len = stdout.read(&mut buf)?;
let output = std::str::from_utf8(&buf[0..buf_len])?; let output = std::str::from_utf8(&buf[0..buf_len])?;
assert_eq!(output.lines().last().unwrap(), ""); assert_eq!(output.lines().last().unwrap(), "");
child.kill()?;
Ok(()) Ok(())
} }

View File

@@ -2,7 +2,7 @@ mod fixtures;
mod utils; mod utils;
use fixtures::{server, Error, TestServer}; use fixtures::{server, Error, TestServer};
use headers::HeaderValue; use reqwest::header::HeaderValue;
use rstest::rstest; use rstest::rstest;
#[rstest] #[rstest]
@@ -23,14 +23,10 @@ fn get_file_range_beyond(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"GET", format!("{}index.html", server.url())) let resp = fetch!(b"GET", format!("{}index.html", server.url()))
.header("range", HeaderValue::from_static("bytes=12-20")) .header("range", HeaderValue::from_static("bytes=12-20"))
.send()?; .send()?;
assert_eq!(resp.status(), 206); assert_eq!(resp.status(), 416);
assert_eq!( assert_eq!(resp.headers().get("content-range").unwrap(), "bytes */18");
resp.headers().get("content-range").unwrap(),
"bytes 12-17/18"
);
assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes"); assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes");
assert_eq!(resp.headers().get("content-length").unwrap(), "6"); assert_eq!(resp.headers().get("content-length").unwrap(), "0");
assert_eq!(resp.text()?, "x.html");
Ok(()) Ok(())
} }

View File

@@ -7,6 +7,8 @@ use predicates::str::contains;
use reqwest::blocking::ClientBuilder; use reqwest::blocking::ClientBuilder;
use rstest::rstest; use rstest::rstest;
use crate::fixtures::port;
/// Can start the server with TLS and receive encrypted responses. /// Can start the server with TLS and receive encrypted responses.
#[rstest] #[rstest]
#[case(server(&[ #[case(server(&[
@@ -33,8 +35,16 @@ fn tls_works(#[case] server: TestServer) -> Result<(), Error> {
/// Wrong path for cert throws error. /// Wrong path for cert throws error.
#[rstest] #[rstest]
fn wrong_path_cert() -> Result<(), Error> { fn wrong_path_cert() -> Result<(), Error> {
let port = port().to_string();
Command::cargo_bin("dufs")? Command::cargo_bin("dufs")?
.args(["--tls-cert", "wrong", "--tls-key", "tests/data/key.pem"]) .args([
"--tls-cert",
"wrong",
"--tls-key",
"tests/data/key.pem",
"--port",
&port,
])
.assert() .assert()
.failure() .failure()
.stderr(contains("Failed to access `wrong`")); .stderr(contains("Failed to access `wrong`"));
@@ -45,8 +55,16 @@ fn wrong_path_cert() -> Result<(), Error> {
/// Wrong paths for key throws errors. /// Wrong paths for key throws errors.
#[rstest] #[rstest]
fn wrong_path_key() -> Result<(), Error> { fn wrong_path_key() -> Result<(), Error> {
let port = port().to_string();
Command::cargo_bin("dufs")? Command::cargo_bin("dufs")?
.args(["--tls-cert", "tests/data/cert.pem", "--tls-key", "wrong"]) .args([
"--tls-cert",
"tests/data/cert.pem",
"--tls-key",
"wrong",
"--port",
&port,
])
.assert() .assert()
.failure() .failure()
.stderr(contains("Failed to access `wrong`")); .stderr(contains("Failed to access `wrong`"));

View File

@@ -20,7 +20,7 @@ macro_rules! assert_resp_paths {
#[macro_export] #[macro_export]
macro_rules! fetch { macro_rules! fetch {
($method:literal, $url:expr) => { ($method:literal, $url:expr) => {
reqwest::blocking::Client::new().request(hyper::Method::from_bytes($method)?, $url) reqwest::blocking::Client::new().request(reqwest::Method::from_bytes($method)?, $url)
}; };
} }
@@ -59,7 +59,8 @@ pub fn encode_uri(v: &str) -> String {
parts.join("/") parts.join("/")
} }
fn retrive_json(content: &str) -> Option<Value> { #[allow(dead_code)]
pub fn retrive_json(content: &str) -> Option<Value> {
let lines: Vec<&str> = content.lines().collect(); let lines: Vec<&str> = content.lines().collect();
let line = lines.iter().find(|v| v.contains("DATA ="))?; let line = lines.iter().find(|v| v.contains("DATA ="))?;
let line_col = line.find("DATA =").unwrap() + 6; let line_col = line.find("DATA =").unwrap() + 6;