Compare commits

...

36 Commits

Author SHA1 Message Date
sigoden
e9383d71ed chore(release): version v0.15.0 2022-06-10 08:41:51 +08:00
sigoden
8258dabe4a fix: query dir param 2022-06-10 08:00:27 +08:00
Joe Koop
0e236b61f6 feat: add empty state placeholder to page(#30)
* added "Empty folder" text to the page

* added text for nonexistent directory and no search results
2022-06-10 07:41:09 +08:00
sigoden
09788ed031 chore: update favicon 2022-06-09 22:49:01 +08:00
Joe Koop
46ebe978ae feat: add basic dark theme (#29) 2022-06-09 22:16:43 +08:00
sigoden
e01f2030e1 chore: optimize code 2022-06-09 21:35:52 +08:00
Joe Koop
8d03ec151a fix: encode webdav href as uri (#28)
* Revert "fix: filename xml escaping"

This reverts commit ce154d9ebc.

* webdav filenames are fixed
2022-06-09 21:28:35 +08:00
sigoden
870e92e306 chore(release): version v0.14.0 2022-06-07 09:02:43 +08:00
sigoden
261c8b6ee5 feat: add favicon (#27)
Return favicon only if requested, avoid 404 errors

close #16
2022-06-07 08:59:44 +08:00
sigoden
5ce7bde05c fix: send index page with content-type (#26) 2022-06-06 11:20:42 +08:00
sigoden
63a7b530bb feat: support ipv6 (#25) 2022-06-06 10:52:12 +08:00
sigoden
7481db5071 chore(release): version v0.13.2 2022-06-06 08:03:00 +08:00
sigoden
b0cc901416 fix: escape path-prefix/url-prefix different 2022-06-06 08:00:26 +08:00
Joe Koop
ce154d9ebc fix: filename xml escaping 2022-06-06 07:54:12 +08:00
sigoden
7c4c264206 chore(release): version v0.13.1 2022-06-06 07:15:48 +08:00
sigoden
c1e0c6bb2f refactor: use logger (#22) 2022-06-06 07:13:22 +08:00
sigoden
f138915f20 fix: escape filename (#21)
close #19
2022-06-06 06:51:35 +08:00
sigoden
a0b413ef30 chore(release): version v0.13.0 2022-06-05 09:33:10 +08:00
sigoden
fc13d41c17 chore(docker): use scratch as docker base image 2022-06-05 09:30:26 +08:00
sigoden
882a9ae716 fix: ctrl+c not exit sometimes 2022-06-05 09:22:24 +08:00
sigoden
5578ee9190 feat: add webdav proppatch handler (#18) 2022-06-05 07:35:05 +08:00
Ryan Russell
916602ae2d chore: fix typos (#17)
* chore(server.rs): fix `retrieve_listening_addrs`

Signed-off-by: Ryan Russell <git@ryanrussell.org>

* docs(index.js): Fix `breadcrumb`

Signed-off-by: Ryan Russell <git@ryanrussell.org>
2022-06-05 06:12:37 +08:00
sigoden
2f40313a54 feat: use digest auth (#14)
* feat: switch to digest auth

* implement digest auth

* cargo fmt

* no lock
2022-06-05 00:09:21 +08:00
sigoden
05155aa532 feat: implement more webdav methods (#13)
Now you can mount the server as webdav driver on windows.
2022-06-04 19:08:18 +08:00
sigoden
4605701366 chore(release): version v0.12.1 2022-06-04 13:39:03 +08:00
sigoden
b7c550e09b chore(release): version v0.12.0 2022-06-04 13:21:46 +08:00
sigoden
fff8fc3ac5 chore: incorrect icon of uploaded file 2022-06-04 13:20:39 +08:00
sigoden
0616602659 feat: remove unzip uploaded feature (#11)
Use drag&drop/webdav to upload folders
2022-06-04 13:01:17 +08:00
sigoden
0a64762df4 feat: support webdav (#10) 2022-06-04 12:51:56 +08:00
sigoden
f103e15e15 chore(release): version v0.11.0 2022-06-03 11:19:57 +08:00
sigoden
9dda55b7c8 feat: listen 0.0.0.0 by default 2022-06-03 11:19:16 +08:00
sigoden
c3dd0f0ec5 feat: support gracefully shutdown server 2022-06-03 11:00:12 +08:00
sigoden
4167e5c07e chore(ci): publish to docker
* ci: publish to docker

* update release.yaml

* update Dockerfile
2022-06-03 10:36:06 +08:00
sigoden
f66e129985 chore(release): version v0.10.1 2022-06-03 07:21:15 +08:00
sigoden
7c3970480e chore: add type comments to assets/js 2022-06-03 07:18:12 +08:00
sigoden
34bc8d411a fix: panic when bind already used port 2022-06-03 07:15:41 +08:00
14 changed files with 1109 additions and 318 deletions

13
.dockerignore Normal file
View File

@@ -0,0 +1,13 @@
# Directories
/.git/
/.github/
/target/
/examples/
/docs/
/benches/
/tmp/
# Files
.gitignore
*.md
LICENSE*

View File

@@ -6,8 +6,10 @@ on:
- v[0-9]+.[0-9]+.[0-9]+*
jobs:
all:
name: All
release:
name: Publish to Github Reelases
outputs:
rc: ${{ steps.check-tag.outputs.rc }}
strategy:
matrix:
@@ -124,3 +126,40 @@ jobs:
prerelease: ${{ steps.check-tag.outputs.rc == 'true' }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
docker:
name: Publish to Docker Hub
if: startsWith(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
needs: release
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
push: ${{ needs.release.outputs.rc == 'false' }}
tags: ${{ github.repository }}:latest, ${{ github.repository }}:${{ github.ref_name }}
publish-crate:
name: Publish to crates.io
if: ${{ needs.release.outputs.rc == 'false' }}
runs-on: ubuntu-latest
needs: release
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
- name: Publish
env:
CARGO_REGISTRY_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}
run: cargo publish

View File

@@ -2,6 +2,78 @@
All notable changes to this project will be documented in this file.
## [0.15.0] - 2022-06-10
### Bug Fixes
- Encode webdav href as uri ([#28](https://github.com/sigoden/duf/issues/28))
- Query dir param
### Features
- Add basic dark theme ([#29](https://github.com/sigoden/duf/issues/29))
- Add empty state placeholder to page([#30](https://github.com/sigoden/duf/issues/30))
## [0.14.0] - 2022-06-07
### Bug Fixes
- Send index page with content-type ([#26](https://github.com/sigoden/duf/issues/26))
### Features
- Support ipv6 ([#25](https://github.com/sigoden/duf/issues/25))
- Add favicon ([#27](https://github.com/sigoden/duf/issues/27))
## [0.13.2] - 2022-06-06
### Bug Fixes
- Filename xml escaping
- Escape path-prefix/url-prefix different
## [0.13.1] - 2022-06-05
### Bug Fixes
- Escape filename ([#21](https://github.com/sigoden/duf/issues/21))
### Refactor
- Use logger ([#22](https://github.com/sigoden/duf/issues/22))
## [0.13.0] - 2022-06-05
### Bug Fixes
- Ctrl+c not exit sometimes
### Features
- Implement more webdav methods ([#13](https://github.com/sigoden/duf/issues/13))
- Use digest auth ([#14](https://github.com/sigoden/duf/issues/14))
- Add webdav proppatch handler ([#18](https://github.com/sigoden/duf/issues/18))
## [0.12.1] - 2022-06-04
### Features
- Support webdav ([#10](https://github.com/sigoden/duf/issues/10))
- Remove unzip uploaded feature ([#11](https://github.com/sigoden/duf/issues/11))
## [0.11.0] - 2022-06-03
### Features
- Support gracefully shutdown server
- Listen 0.0.0.0 by default
## [0.10.1] - 2022-06-02
### Bug Fixes
- Panic when bind already used port
## [0.10.0] - 2022-06-02
### Bug Fixes
@@ -19,10 +91,6 @@ All notable changes to this project will be documented in this file.
- Change auth logic/options
- Improve ui
### Miscellaneous Tasks
- Insert cli output
### Refactor
- Small improvement
@@ -52,12 +120,6 @@ All notable changes to this project will be documented in this file.
- Add some headers to res
- Support render-index/render-spa
### Miscellaneous Tasks
- Move src/assets out of src
- Update description
- Upgrade version
## [0.7.0] - 2022-05-31
### Bug Fixes
@@ -71,10 +133,6 @@ All notable changes to this project will be documented in this file.
- Drag and drop uploads, upload folder
### Miscellaneous Tasks
- Upgrade version
## [0.6.0] - 2022-05-31
### Features
@@ -83,10 +141,6 @@ All notable changes to this project will be documented in this file.
- Distinct upload and delete operation
- Support range requests
### Miscellaneous Tasks
- Upgrade version
### Refactor
- Improve code quality
@@ -99,12 +153,6 @@ All notable changes to this project will be documented in this file.
- Add no-auth-read options
- Unzip zip file when unload
### Miscellaneous Tasks
- Reorganize web static files
- Rename src/static to src/assets
- Upgrade version
## [0.4.0] - 2022-05-29
### Features
@@ -112,10 +160,6 @@ All notable changes to this project will be documented in this file.
- Replace --static option to --no-edit
- Add cors
### Miscellaneous Tasks
- Upgrade version
## [0.3.0] - 2022-05-29
### Documentation
@@ -162,10 +206,6 @@ All notable changes to this project will be documented in this file.
- Add logger
- Download folder as zip file
### Miscellaneous Tasks
- Update cargo metadata
## [0.1.0] - 2022-05-26
### Bug Fixes
@@ -183,11 +223,6 @@ All notable changes to this project will be documented in this file.
- Support delete operation
- Remove parent path
### Miscellaneous Tasks
- Add readme and license
- Update cargo metadata
### Styling
- Cargo fmt

166
Cargo.lock generated
View File

@@ -286,17 +286,21 @@ dependencies = [
[[package]]
name = "duf"
version = "0.10.0"
version = "0.15.0"
dependencies = [
"async-walkdir",
"async_zip",
"base64",
"chrono",
"clap",
"env_logger",
"futures",
"get_if_addrs",
"headers",
"hyper",
"lazy_static",
"log",
"md5",
"mime_guess",
"percent-encoding",
"rustls",
@@ -307,6 +311,19 @@ dependencies = [
"tokio-rustls",
"tokio-stream",
"tokio-util",
"urlencoding",
"uuid",
"xml-rs",
]
[[package]]
name = "env_logger"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b2cf0344971ee6c64c31be0d530793fba457d322dfec2810c453d0ef228f9c3"
dependencies = [
"humantime",
"log",
]
[[package]]
@@ -326,13 +343,11 @@ dependencies = [
[[package]]
name = "flate2"
version = "1.0.23"
version = "1.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b39522e96686d38f4bc984b9198e3a0613264abaebaff2c5c918bfa6b6da09af"
checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"
dependencies = [
"cfg-if",
"crc32fast",
"libc",
"miniz_oxide",
]
@@ -484,6 +499,17 @@ dependencies = [
"libc",
]
[[package]]
name = "getrandom"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9be70c98951c83b8d2f8f60d7065fa6d5146873094452a1008da8c2f1e4205ad"
dependencies = [
"cfg-if",
"libc",
"wasi 0.10.0+wasi-snapshot-preview1",
]
[[package]]
name = "hashbrown"
version = "0.11.2"
@@ -526,9 +552,9 @@ dependencies = [
[[package]]
name = "http"
version = "0.2.7"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff8670570af52249509a86f5e3e18a08c60b177071826898fde8997cf5f6bfbb"
checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399"
dependencies = [
"bytes",
"fnv",
@@ -558,6 +584,12 @@ version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421"
[[package]]
name = "humantime"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]]
name = "hyper"
version = "0.14.19"
@@ -647,15 +679,21 @@ dependencies = [
[[package]]
name = "lzma-sys"
version = "0.1.17"
version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bdb4b7c3eddad11d3af9e86c487607d2d2442d185d848575365c4856ba96d619"
checksum = "e06754c4acf47d49c727d5665ca9fb828851cda315ed3bd51edd148ef78a8772"
dependencies = [
"cc",
"libc",
"pkg-config",
]
[[package]]
name = "md5"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
[[package]]
name = "memchr"
version = "2.5.0"
@@ -680,9 +718,9 @@ dependencies = [
[[package]]
name = "miniz_oxide"
version = "0.5.1"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2b29bd4bc3f33391105ebee3589c19197c4271e3e5a9ec9bfe8127eeff8f082"
checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc"
dependencies = [
"adler",
]
@@ -770,6 +808,12 @@ version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae"
[[package]]
name = "ppv-lite86"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872"
[[package]]
name = "proc-macro2"
version = "1.0.39"
@@ -788,6 +832,36 @@ dependencies = [
"proc-macro2",
]
[[package]]
name = "rand"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"libc",
"rand_chacha",
"rand_core",
]
[[package]]
name = "rand_chacha"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core",
]
[[package]]
name = "rand_core"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
dependencies = [
"getrandom",
]
[[package]]
name = "ring"
version = "0.16.20"
@@ -882,6 +956,15 @@ dependencies = [
"digest",
]
[[package]]
name = "signal-hook-registry"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0"
dependencies = [
"libc",
]
[[package]]
name = "slab"
version = "0.4.6"
@@ -906,9 +989,9 @@ checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
[[package]]
name = "syn"
version = "1.0.95"
version = "1.0.96"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fbaf6116ab8924f39d52792136fb74fd60a80194cf1b1c6ffa6453eef1c3f942"
checksum = "0748dd251e24453cb8717f0354206b91557e4ec8703673a4b30208f2abaf1ebf"
dependencies = [
"proc-macro2",
"quote",
@@ -954,9 +1037,9 @@ dependencies = [
[[package]]
name = "tokio"
version = "1.18.2"
version = "1.19.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4903bf0427cf68dddd5aa6a93220756f8be0c34fcfa9f5e6191e103e15a31395"
checksum = "c51a52ed6686dd62c320f9b89299e9dfb46f730c7a48e635c19f21d116cb1439"
dependencies = [
"bytes",
"libc",
@@ -965,6 +1048,7 @@ dependencies = [
"num_cpus",
"once_cell",
"pin-project-lite",
"signal-hook-registry",
"socket2",
"tokio-macros",
"winapi 0.3.9",
@@ -972,9 +1056,9 @@ dependencies = [
[[package]]
name = "tokio-macros"
version = "1.7.0"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b557f72f448c511a979e2564e55d74e6c4432fc96ff4f6241bc6bded342643b7"
checksum = "9724f9a975fb987ef7a3cd9be0350edcbe130698af5b8f7a631e23d42d052484"
dependencies = [
"proc-macro2",
"quote",
@@ -994,9 +1078,9 @@ dependencies = [
[[package]]
name = "tokio-stream"
version = "0.1.8"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "50145484efff8818b5ccd256697f36863f587da82cf8b409c53adf1e840798e3"
checksum = "df54d54117d6fdc4e4fea40fe1e4e566b3505700e148a6827e59b34b0d2600d9"
dependencies = [
"futures-core",
"pin-project-lite",
@@ -1005,9 +1089,9 @@ dependencies = [
[[package]]
name = "tokio-util"
version = "0.7.2"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f988a1a1adc2fb21f9c12aa96441da33a1728193ae0b95d2be22dbd17fcb4e5c"
checksum = "cc463cd8deddc3770d20f9852143d50bf6094e640b485cb2e189a2099085ff45"
dependencies = [
"bytes",
"futures-core",
@@ -1031,21 +1115,9 @@ checksum = "5d0ecdcb44a79f0fe9844f0c4f33a342cbcbb5117de8001e6ba0dc2351327d09"
dependencies = [
"cfg-if",
"pin-project-lite",
"tracing-attributes",
"tracing-core",
]
[[package]]
name = "tracing-attributes"
version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc6b8ad3567499f98a1db7a752b07a7c8c7c7c34c332ec00effb2b0027974b7c"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "tracing-core"
version = "0.1.26"
@@ -1088,6 +1160,22 @@ version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
[[package]]
name = "urlencoding"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68b90931029ab9b034b300b797048cf23723400aa757e8a2bfb9d748102f9821"
[[package]]
name = "uuid"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c6d5d669b51467dcf7b2f1a796ce0f955f05f01cafda6c19d6e95f730df29238"
dependencies = [
"getrandom",
"rand",
]
[[package]]
name = "version_check"
version = "0.9.4"
@@ -1268,10 +1356,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680"
[[package]]
name = "xz2"
version = "0.1.6"
name = "xml-rs"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c179869f34fc7c01830d3ce7ea2086bc3a07e0d35289b667d0a8bf910258926c"
checksum = "d2d7d3948613f75c98fd9328cfdcc45acc4d360655289d0a7d4ec931392200a3"
[[package]]
name = "xz2"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "388c44dc09d76f1536602ead6d325eb532f5c122f17782bd57fb47baeeb767e2"
dependencies = [
"lzma-sys",
]

View File

@@ -1,20 +1,20 @@
[package]
name = "duf"
version = "0.10.0"
version = "0.15.0"
edition = "2021"
authors = ["sigoden <sigoden@gmail.com>"]
description = "Duf is a fully functional file server."
description = "Duf is a simple file server."
license = "MIT OR Apache-2.0"
homepage = "https://github.com/sigoden/duf"
repository = "https://github.com/sigoden/duf"
autotests = false
categories = ["command-line-utilities", "web-programming::http-server"]
keywords = ["static", "file", "server", "http", "cli"]
keywords = ["static", "file", "server", "webdav", "cli"]
[dependencies]
clap = { version = "3", default-features = false, features = ["std", "cargo"] }
chrono = "0.4"
tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util"]}
tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util", "signal"]}
tokio-rustls = "0.23"
tokio-stream = { version = "0.1", features = ["net"] }
tokio-util = { version = "0.7", features = ["codec", "io-util"] }
@@ -31,6 +31,13 @@ mime_guess = "2.0.4"
get_if_addrs = "0.5.3"
rustls = { version = "0.20", default-features = false, features = ["tls12"] }
rustls-pemfile = "1"
md5 = "0.7.0"
lazy_static = "1.4.0"
uuid = { version = "1.1.1", features = ["v4", "fast-rng"] }
urlencoding = "2.1.0"
xml-rs = "0.8"
env_logger = { version = "0.9.0", default-features = false, features = ["humantime"] }
log = "0.4.17"
[profile.release]
lto = true

10
Dockerfile Normal file
View File

@@ -0,0 +1,10 @@
FROM rust:1.61 as builder
RUN rustup target add x86_64-unknown-linux-musl
RUN apt-get update && apt-get install --no-install-recommends -y musl-tools
WORKDIR /app
COPY . .
RUN cargo build --target x86_64-unknown-linux-musl --release
FROM scratch
COPY --from=builder /app/target/x86_64-unknown-linux-musl/release/duf /bin/
ENTRYPOINT ["/bin/duf"]

View File

@@ -3,7 +3,7 @@
[![CI](https://github.com/sigoden/duf/actions/workflows/ci.yaml/badge.svg)](https://github.com/sigoden/duf/actions/workflows/ci.yaml)
[![Crates](https://img.shields.io/crates/v/duf.svg)](https://crates.io/crates/duf)
Duf is a fully functional file server.
Duf is a simple file server. Support static serve, search, upload, webdav...
![demo](https://user-images.githubusercontent.com/4012553/171526189-09afc2de-793f-4216-b3d5-31ea408d3610.png)
@@ -11,13 +11,12 @@ Duf is a fully functional file server.
- Serve static files
- Download folder as zip file
- Search files
- Upload files and folders (Drag & Drop)
- Delete files
- Basic authentication
- Upload zip file then unzip
- Search files
- Partial responses (Parallel/Resume download)
- Support https/tls
- Authentication
- Support https
- Support webdav
- Easy to use with curl
## Install
@@ -28,6 +27,12 @@ Duf is a fully functional file server.
cargo install duf
```
### With docker
```
docker run -v /tmp:/tmp -p 5000:5000 --rm -it docker.io/sigoden/duf /tmp
```
### Binaries on macOS, Linux, Windows
Download from [Github Releases](https://github.com/sigoden/duf/releases), unzip and add duf to your $PATH.
@@ -35,7 +40,7 @@ Download from [Github Releases](https://github.com/sigoden/duf/releases), unzip
## CLI
```
Duf is a fully functional file server.
Duf is a simple file server.
USAGE:
duf [OPTIONS] [path]
@@ -50,7 +55,7 @@ OPTIONS:
--allow-delete Allow delete files/folders
--allow-symlink Allow symlink to files/folders outside root directory
--allow-upload Allow upload files/folders
-b, --bind <address> Specify bind address [default: 127.0.0.1]
-b, --bind <address> Specify bind address [default: 0.0.0.0]
--cors Enable CORS, sets `Access-Control-Allow-Origin: *`
-h, --help Print help information
-p, --port <port> Specify port to listen on [default: 5000]
@@ -76,18 +81,10 @@ duf
duf folder_name
```
Listen on all Interfaces and port 3000
```
duf -b 0.0.0.0 -p 3000
```
Allow all operations such as upload, delete
```sh
duf --allow-all
# or
duf -A
```
Only allow upload operation
@@ -102,7 +99,7 @@ Serve a single page application (SPA)
duf --render-spa
```
Serve https
Use https
```
duf --tls-cert my.crt --tls-key my.key
@@ -113,8 +110,6 @@ duf --tls-cert my.crt --tls-key my.key
Download a file
```
curl http://127.0.0.1:5000/some-file
curl -o some-file2 http://127.0.0.1:5000/some-file
```
Download a folder as zip file
@@ -129,12 +124,6 @@ Upload a file
curl --upload-file some-file http://127.0.0.1:5000/some-file
```
Unzip zip file when unload
```
curl --upload-file some-folder.zip http://127.0.0.1:5000/some-folder.zip?unzip
```
Delete a file/folder
```

BIN
assets/favicon.ico Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.1 KiB

View File

@@ -97,6 +97,11 @@ body {
padding: 0 1em;
}
.empty-folder {
display: block;
padding-top: 1rem;
}
.uploaders-table th,
.paths-table th {
text-align: left;
@@ -173,3 +178,39 @@ body {
.uploader {
padding-right: 1em;
}
/* dark theme */
@media (prefers-color-scheme: dark) {
body {
background-color: #000;
}
html,
.breadcrumb>b,
.searchbar #search {
color: #fff;
}
.uploaders-table th,
.paths-table th {
color: #ddd;
}
svg,
.path svg {
fill: #d0e6ff;
}
.searchbar {
background-color: #111;
border-color: #fff6;
}
.searchbar svg {
fill: #fff6;
}
.path a {
color: #3191ff;
}
}

View File

@@ -1,10 +1,44 @@
/**
* @typedef {object} PathItem
* @property {"Dir"|"SymlinkDir"|"File"|"SymlinkFile"} path_type
* @property {boolean} is_symlink
* @property {string} name
* @property {number} mtime
* @property {number} size
*/
// https://stackoverflow.com/a/901144/3642588
const params = new Proxy(new URLSearchParams(window.location.search), {
get: (searchParams, prop) => searchParams.get(prop),
});
const dirEmptyNote = params.q ? 'No results' : DATA.dir_exists ? 'Empty folder' : 'Folder will be created when a file is uploaded';
/**
* @type Element
*/
let $pathsTable, $pathsTableBody, $uploadersTable;
/**
* @type string
*/
let baseDir;
class Uploader {
/**
* @type number
*/
idx;
/**
* @type File
*/
file;
/**
* @type string
*/
name;
/**
* @type Element
*/
$uploadStatus;
static globalIdx = 0;
constructor(file, dirs) {
@@ -16,9 +50,6 @@ class Uploader {
upload() {
const { file, idx, name } = this;
let url = getUrl(name);
if (file.name == baseDir + ".zip") {
url += "?unzip";
}
$uploadersTable.insertAdjacentHTML("beforeend", `
<tr id="upload${idx}" class="uploader">
<td class="path cell-name">
@@ -29,12 +60,13 @@ class Uploader {
</tr>`);
$uploadersTable.classList.remove("hidden");
this.$uploadStatus = document.getElementById(`uploadStatus${idx}`);
document.querySelector('.main i.empty-folder').remove();
const ajax = new XMLHttpRequest();
ajax.upload.addEventListener("progress", e => this.progress(e), false);
ajax.addEventListener("readystatechange", () => {
if(ajax.readyState === 4) {
if (ajax.status == 200) {
if (ajax.status >= 200 && ajax.status < 300) {
this.complete();
} else {
this.fail();
@@ -61,6 +93,10 @@ class Uploader {
}
}
/**
* Add breadcrumb
* @param {string} value
*/
function addBreadcrumb(value) {
const $breadcrumb = document.querySelector(".breadcrumb");
const parts = value.split("/").filter(v => !!v);
@@ -83,6 +119,11 @@ function addBreadcrumb(value) {
}
}
/**
* Add pathitem
* @param {PathItem} file
* @param {number} index
*/
function addPath(file, index) {
const url = getUrl(file.name)
let actionDelete = "";
@@ -126,6 +167,11 @@ function addPath(file, index) {
</tr>`)
}
/**
* Delete pathitem
* @param {number} index
* @returns
*/
async function deletePath(index) {
const file = DATA.paths[index];
if (!file) return;
@@ -136,11 +182,13 @@ async function deletePath(index) {
const res = await fetch(getUrl(file.name), {
method: "DELETE",
});
if (res.status === 200) {
if (res.status >= 200 && res.status < 300) {
document.getElementById(`addPath${index}`).remove();
DATA.paths[index] = null;
if (!DATA.paths.find(v => !!v)) {
$pathsTable.classList.add("hidden");
document.querySelector('.main').insertAdjacentHTML("afterbegin", '<i class="empty-folder"></i>');
document.querySelector('.main .empty-folder').textContent = dirEmptyNote;
}
} else {
throw new Error(await res.text())
@@ -236,6 +284,10 @@ function ready() {
$pathsTableBody = document.querySelector(".paths-table tbody");
$uploadersTable = document.querySelector(".uploaders-table");
if (params.q) {
document.getElementById('search').value = params.q;
}
addBreadcrumb(DATA.breadcrumb);
if (Array.isArray(DATA.paths)) {
const len = DATA.paths.length;
@@ -245,6 +297,10 @@ function ready() {
for (let i = 0; i < len; i++) {
addPath(DATA.paths[i], i);
}
if (len == 0) {
document.querySelector('.main').insertAdjacentHTML("afterbegin", '<i class="empty-folder"></i>');
document.querySelector('.main .empty-folder').textContent = dirEmptyNote;
}
}
if (DATA.allow_upload) {
dropzone();

View File

@@ -1,10 +1,11 @@
use clap::crate_description;
use clap::{Arg, ArgMatches};
use rustls::{Certificate, PrivateKey};
use std::net::SocketAddr;
use std::net::{IpAddr, SocketAddr};
use std::path::{Path, PathBuf};
use std::{env, fs, io};
use crate::auth::parse_auth;
use crate::BoxResult;
const ABOUT: &str = concat!("\n", crate_description!()); // Add extra newline.
@@ -16,7 +17,7 @@ fn app() -> clap::Command<'static> {
Arg::new("address")
.short('b')
.long("bind")
.default_value("127.0.0.1")
.default_value("0.0.0.0")
.help("Specify bind address")
.value_name("address"),
)
@@ -110,11 +111,11 @@ pub fn matches() -> ArgMatches {
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Args {
pub address: String,
pub port: u16,
pub addr: SocketAddr,
pub path: PathBuf,
pub path_prefix: Option<String>,
pub auth: Option<String>,
pub path_prefix: String,
pub uri_prefix: String,
pub auth: Option<(String, String)>,
pub no_auth_access: bool,
pub allow_upload: bool,
pub allow_delete: bool,
@@ -131,12 +132,24 @@ impl Args {
/// If a parsing error ocurred, exit the process and print out informative
/// error message to user.
pub fn parse(matches: ArgMatches) -> BoxResult<Args> {
let address = matches.value_of("address").unwrap_or_default().to_owned();
let ip = matches.value_of("address").unwrap_or_default();
let port = matches.value_of_t::<u16>("port")?;
let addr = to_addr(ip, port)?;
let path = Args::parse_path(matches.value_of_os("path").unwrap_or_default())?;
let path_prefix = matches.value_of("path-prefix").map(|v| v.to_owned());
let path_prefix = matches
.value_of("path-prefix")
.map(|v| v.trim_matches('/').to_owned())
.unwrap_or_default();
let uri_prefix = if path_prefix.is_empty() {
"/".to_owned()
} else {
format!("/{}/", &path_prefix)
};
let cors = matches.is_present("cors");
let auth = matches.value_of("auth").map(|v| v.to_owned());
let auth = match matches.value_of("auth") {
Some(auth) => Some(parse_auth(auth)?),
None => None,
};
let no_auth_access = matches.is_present("no-auth-access");
let allow_upload = matches.is_present("allow-all") || matches.is_present("allow-upload");
let allow_delete = matches.is_present("allow-all") || matches.is_present("allow-delete");
@@ -153,10 +166,10 @@ impl Args {
};
Ok(Args {
address,
port,
addr,
path,
path_prefix,
uri_prefix,
auth,
no_auth_access,
cors,
@@ -173,7 +186,7 @@ impl Args {
fn parse_path<P: AsRef<Path>>(path: P) -> BoxResult<PathBuf> {
let path = path.as_ref();
if !path.exists() {
bail!("error: path \"{}\" doesn't exist", path.display());
return Err(format!("Path `{}` doesn't exist", path.display()).into());
}
env::current_dir()
@@ -181,32 +194,17 @@ impl Args {
p.push(path); // If path is absolute, it replaces the current path.
std::fs::canonicalize(p)
})
.or_else(|err| {
bail!(
"error: failed to access path \"{}\": {}",
path.display(),
err,
)
})
}
/// Construct socket address from arguments.
pub fn address(&self) -> BoxResult<SocketAddr> {
format!("{}:{}", self.address, self.port)
.parse()
.or_else(|err| {
bail!(
"error: invalid address {}:{} : {}",
self.address,
self.port,
err,
)
})
.map_err(|err| format!("Failed to access path `{}`: {}", path.display(), err,).into())
}
}
fn to_addr(ip: &str, port: u16) -> BoxResult<SocketAddr> {
let ip: IpAddr = ip.parse()?;
Ok(SocketAddr::new(ip, port))
}
// Load public certificate from file.
pub fn load_certs(filename: &str) -> BoxResult<Vec<Certificate>> {
fn load_certs(filename: &str) -> BoxResult<Vec<Certificate>> {
// Open certificate file.
let certfile =
fs::File::open(&filename).map_err(|e| format!("Failed to open {}: {}", &filename, e))?;
@@ -221,7 +219,7 @@ pub fn load_certs(filename: &str) -> BoxResult<Vec<Certificate>> {
}
// Load private key from file.
pub fn load_private_key(filename: &str) -> BoxResult<PrivateKey> {
fn load_private_key(filename: &str) -> BoxResult<PrivateKey> {
// Open keyfile.
let keyfile =
fs::File::open(&filename).map_err(|e| format!("Failed to open {}: {}", &filename, e))?;

209
src/auth.rs Normal file
View File

@@ -0,0 +1,209 @@
use headers::HeaderValue;
use lazy_static::lazy_static;
use md5::Context;
use std::{
collections::HashMap,
time::{SystemTime, UNIX_EPOCH},
};
use uuid::Uuid;
use crate::BoxResult;
const REALM: &str = "DUF";
lazy_static! {
static ref NONCESTARTHASH: Context = {
let mut h = Context::new();
h.consume(Uuid::new_v4().as_bytes());
h.consume(std::process::id().to_be_bytes());
h
};
}
pub fn generate_www_auth(stale: bool) -> String {
let str_stale = if stale { "stale=true," } else { "" };
format!(
"Digest realm=\"{}\",nonce=\"{}\",{}qop=\"auth\",algorithm=\"MD5\"",
REALM,
create_nonce(),
str_stale
)
}
pub fn parse_auth(auth: &str) -> BoxResult<(String, String)> {
let p: Vec<&str> = auth.trim().split(':').collect();
let err = "Invalid auth value";
if p.len() != 2 {
return Err(err.into());
}
let user = p[0];
let pass = p[1];
let mut h = Context::new();
h.consume(format!("{}:{}:{}", user, REALM, pass).as_bytes());
Ok((user.to_owned(), format!("{:x}", h.compute())))
}
pub fn valid_digest(
header_value: &HeaderValue,
method: &str,
auth_user: &str,
auth_pass: &str,
) -> Option<()> {
let digest_value = strip_prefix(header_value.as_bytes(), b"Digest ")?;
let user_vals = to_headermap(digest_value).ok()?;
if let (Some(username), Some(nonce), Some(user_response)) = (
user_vals
.get(b"username".as_ref())
.and_then(|b| std::str::from_utf8(*b).ok()),
user_vals.get(b"nonce".as_ref()),
user_vals.get(b"response".as_ref()),
) {
match validate_nonce(nonce) {
Ok(true) => {}
_ => return None,
}
if auth_user != username {
return None;
}
let mut ha = Context::new();
ha.consume(method);
ha.consume(b":");
if let Some(uri) = user_vals.get(b"uri".as_ref()) {
ha.consume(uri);
}
let ha = format!("{:x}", ha.compute());
let mut correct_response = None;
if let Some(qop) = user_vals.get(b"qop".as_ref()) {
if qop == &b"auth".as_ref() || qop == &b"auth-int".as_ref() {
correct_response = Some({
let mut c = Context::new();
c.consume(&auth_pass);
c.consume(b":");
c.consume(nonce);
c.consume(b":");
if let Some(nc) = user_vals.get(b"nc".as_ref()) {
c.consume(nc);
}
c.consume(b":");
if let Some(cnonce) = user_vals.get(b"cnonce".as_ref()) {
c.consume(cnonce);
}
c.consume(b":");
c.consume(qop);
c.consume(b":");
c.consume(&*ha);
format!("{:x}", c.compute())
});
}
}
let correct_response = match correct_response {
Some(r) => r,
None => {
let mut c = Context::new();
c.consume(&auth_pass);
c.consume(b":");
c.consume(nonce);
c.consume(b":");
c.consume(&*ha);
format!("{:x}", c.compute())
}
};
if correct_response.as_bytes() == *user_response {
// grant access
return Some(());
}
}
None
}
/// Check if a nonce is still valid.
/// Return an error if it was never valid
fn validate_nonce(nonce: &[u8]) -> Result<bool, ()> {
if nonce.len() != 34 {
return Err(());
}
//parse hex
if let Ok(n) = std::str::from_utf8(nonce) {
//get time
if let Ok(secs_nonce) = u32::from_str_radix(&n[..8], 16) {
//check time
let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
let secs_now = now.as_secs() as u32;
if let Some(dur) = secs_now.checked_sub(secs_nonce) {
//check hash
let mut h = NONCESTARTHASH.clone();
h.consume(secs_nonce.to_be_bytes());
let h = format!("{:x}", h.compute());
if h[..26] == n[8..34] {
return Ok(dur < 300); // from the last 5min
//Authentication-Info ?
}
}
}
}
Err(())
}
fn strip_prefix<'a>(search: &'a [u8], prefix: &[u8]) -> Option<&'a [u8]> {
let l = prefix.len();
if search.len() < l {
return None;
}
if &search[..l] == prefix {
Some(&search[l..])
} else {
None
}
}
fn to_headermap(header: &[u8]) -> Result<HashMap<&[u8], &[u8]>, ()> {
let mut sep = Vec::new();
let mut asign = Vec::new();
let mut i: usize = 0;
let mut esc = false;
for c in header {
match (c, esc) {
(b'=', false) => asign.push(i),
(b',', false) => sep.push(i),
(b'"', false) => esc = true,
(b'"', true) => esc = false,
_ => {}
}
i += 1;
}
sep.push(i); // same len for both Vecs
i = 0;
let mut ret = HashMap::new();
for (&k, &a) in sep.iter().zip(asign.iter()) {
while header[i] == b' ' {
i += 1;
}
if a <= i || k <= 1 + a {
//keys and vals must contain one char
return Err(());
}
let key = &header[i..a];
let val = if header[1 + a] == b'"' && header[k - 1] == b'"' {
//escaped
&header[2 + a..k - 1]
} else {
//not escaped
&header[1 + a..k]
};
i = 1 + k;
ret.insert(key, val);
}
Ok(ret)
}
fn create_nonce() -> String {
let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
let secs = now.as_secs() as u32;
let mut h = NONCESTARTHASH.clone();
h.consume(secs.to_be_bytes());
let n = format!("{:08x}{:032x}", secs, h.compute());
n[..34].to_string()
}

View File

@@ -1,14 +1,15 @@
macro_rules! bail {
($($tt:tt)*) => {
return Err(From::from(format!($($tt)*)))
}
}
mod args;
mod auth;
mod server;
#[macro_use]
extern crate log;
pub type BoxResult<T> = Result<T, Box<dyn std::error::Error>>;
use std::env;
use std::io::Write;
use crate::args::{matches, Args};
use crate::server::serve;
@@ -18,11 +19,34 @@ async fn main() {
}
async fn run() -> BoxResult<()> {
if env::var("RUST_LOG").is_err() {
env::set_var("RUST_LOG", "info")
}
env_logger::builder()
.format(|buf, record| {
let timestamp = buf.timestamp();
writeln!(buf, "[{} {}] {}", timestamp, record.level(), record.args())
})
.init();
let args = Args::parse(matches())?;
serve(args).await
tokio::select! {
ret = serve(args) => {
ret
},
_ = shutdown_signal() => {
Ok(())
},
}
}
fn handle_err<T>(err: Box<dyn std::error::Error>) -> T {
eprintln!("Server error: {}", err);
eprintln!("error: {}", err);
std::process::exit(1);
}
async fn shutdown_signal() {
tokio::signal::ctrl_c()
.await
.expect("Failed to install CTRL+C signal handler")
}

View File

@@ -1,10 +1,11 @@
use crate::auth::{generate_www_auth, valid_digest};
use crate::{Args, BoxResult};
use xml::escape::escape_str_pcdata;
use async_walkdir::WalkDir;
use async_zip::read::seek::ZipFileReader;
use async_zip::write::{EntryOptions, ZipFileWriter};
use async_zip::Compression;
use chrono::Local;
use chrono::{TimeZone, Utc};
use futures::stream::StreamExt;
use futures::TryStreamExt;
use get_if_addrs::get_if_addrs;
@@ -18,13 +19,13 @@ use hyper::header::{
WWW_AUTHENTICATE,
};
use hyper::service::{make_service_fn, service_fn};
use hyper::{Body, Method, StatusCode};
use hyper::{Body, Method, StatusCode, Uri};
use percent_encoding::percent_decode;
use rustls::ServerConfig;
use serde::Serialize;
use std::convert::Infallible;
use std::fs::Metadata;
use std::net::IpAddr;
use std::net::{IpAddr, SocketAddr};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::SystemTime;
@@ -35,6 +36,7 @@ use tokio::{fs, io};
use tokio_rustls::TlsAcceptor;
use tokio_util::codec::{BytesCodec, FramedRead};
use tokio_util::io::{ReaderStream, StreamReader};
use uuid::Uuid;
type Request = hyper::Request<Body>;
type Response = hyper::Response<Body>;
@@ -42,6 +44,7 @@ type Response = hyper::Response<Body>;
const INDEX_HTML: &str = include_str!("../assets/index.html");
const INDEX_CSS: &str = include_str!("../assets/index.css");
const INDEX_JS: &str = include_str!("../assets/index.js");
const FAVICON_ICO: &[u8] = include_bytes!("../assets/favicon.ico");
const INDEX_NAME: &str = "index.html";
const BUF_SIZE: usize = 1024 * 16;
@@ -54,51 +57,53 @@ macro_rules! status {
pub async fn serve(args: Args) -> BoxResult<()> {
let args = Arc::new(args);
let socket_addr = args.address()?;
let inner = Arc::new(InnerService::new(args.clone()));
if let Some((certs, key)) = args.tls.as_ref() {
let config = ServerConfig::builder()
.with_safe_defaults()
.with_no_client_auth()
.with_single_cert(certs.clone(), key.clone())?;
let tls_acceptor = TlsAcceptor::from(Arc::new(config));
let arc_acceptor = Arc::new(tls_acceptor);
let listener = TcpListener::bind(&socket_addr).await.unwrap();
let incoming = tokio_stream::wrappers::TcpListenerStream::new(listener);
let incoming = hyper::server::accept::from_stream(incoming.filter_map(|socket| async {
match socket {
Ok(stream) => match arc_acceptor.clone().accept(stream).await {
Ok(val) => Some(Ok::<_, Infallible>(val)),
Err(_) => None,
},
Err(_) => None,
}
}));
let server = hyper::Server::builder(incoming).serve(make_service_fn(move |_| {
let inner = inner.clone();
async move {
Ok::<_, Infallible>(service_fn(move |req| {
let inner = inner.clone();
inner.call(req)
}))
}
}));
print_listening(args.address.as_str(), args.port, true);
server.await?;
} else {
let server = hyper::Server::bind(&socket_addr).serve(make_service_fn(move |_| {
let inner = inner.clone();
async move {
Ok::<_, Infallible>(service_fn(move |req| {
let inner = inner.clone();
inner.call(req)
}))
}
}));
print_listening(args.address.as_str(), args.port, false);
server.await?;
match args.tls.clone() {
Some((certs, key)) => {
let config = ServerConfig::builder()
.with_safe_defaults()
.with_no_client_auth()
.with_single_cert(certs, key)?;
let tls_acceptor = TlsAcceptor::from(Arc::new(config));
let arc_acceptor = Arc::new(tls_acceptor);
let listener = TcpListener::bind(&args.addr).await?;
let incoming = tokio_stream::wrappers::TcpListenerStream::new(listener);
let incoming =
hyper::server::accept::from_stream(incoming.filter_map(|socket| async {
match socket {
Ok(stream) => match arc_acceptor.clone().accept(stream).await {
Ok(val) => Some(Ok::<_, Infallible>(val)),
Err(_) => None,
},
Err(_) => None,
}
}));
let server = hyper::Server::builder(incoming).serve(make_service_fn(move |_| {
let inner = inner.clone();
async move {
Ok::<_, Infallible>(service_fn(move |req| {
let inner = inner.clone();
inner.call(req)
}))
}
}));
print_listening(&args.addr, &args.uri_prefix, true);
server.await?;
}
None => {
let server = hyper::Server::try_bind(&args.addr)?.serve(make_service_fn(move |_| {
let inner = inner.clone();
async move {
Ok::<_, Infallible>(service_fn(move |req| {
let inner = inner.clone();
inner.call(req)
}))
}
}));
print_listening(&args.addr, &args.uri_prefix, false);
server.await?;
}
}
Ok(())
}
@@ -116,20 +121,16 @@ impl InnerService {
let uri = req.uri().clone();
let cors = self.args.cors;
let timestamp = Local::now().format("%d/%b/%Y %H:%M:%S");
let mut res = match self.handle(req).await {
Ok(res) => {
println!(r#"[{}] "{} {}" - {}"#, timestamp, method, uri, res.status());
info!(r#""{} {}" - {}"#, method, uri, res.status());
res
}
Err(err) => {
let mut res = Response::default();
let status = StatusCode::INTERNAL_SERVER_ERROR;
status!(res, status);
eprintln!(
r#"[{}] "{} {}" - {} {}"#,
timestamp, method, uri, status, err
);
error!(r#""{} {}" - {} {}"#, method, uri, status, err);
res
}
};
@@ -160,11 +161,10 @@ impl InnerService {
let query = req.uri().query().unwrap_or_default();
let meta = fs::metadata(path).await.ok();
let is_miss = meta.is_none();
let is_dir = meta.map(|v| v.is_dir()).unwrap_or_default();
let is_file = !is_miss && !is_dir;
let (is_miss, is_dir, is_file, size) = match fs::metadata(path).await.ok() {
Some(meta) => (false, meta.is_dir(), meta.is_file(), meta.len()),
None => (true, false, false, 0),
};
let allow_upload = self.args.allow_upload;
let allow_delete = self.args.allow_delete;
@@ -175,17 +175,24 @@ impl InnerService {
status!(res, StatusCode::NOT_FOUND);
return Ok(res);
}
if is_miss && path.ends_with("favicon.ico") {
*res.body_mut() = Body::from(FAVICON_ICO);
res.headers_mut()
.insert("content-type", "image/x-icon".parse().unwrap());
return Ok(res);
}
match *req.method() {
Method::GET => {
let headers = req.headers();
let headers = req.headers();
match req.method() {
&Method::GET => {
if is_dir {
if render_index || render_spa {
self.handle_render_index(path, headers, &mut res).await?;
} else if query == "zip" {
self.handle_zip_dir(path, &mut res).await?;
} else if query.starts_with("q=") {
self.handle_query_dir(path, &query[3..], &mut res).await?;
} else if let Some(q) = query.strip_prefix("q=") {
self.handle_query_dir(path, q, &mut res).await?;
} else {
self.handle_ls_dir(path, true, &mut res).await?;
}
@@ -199,28 +206,76 @@ impl InnerService {
status!(res, StatusCode::NOT_FOUND);
}
}
Method::OPTIONS => {
status!(res, StatusCode::NO_CONTENT);
&Method::OPTIONS => {
self.handle_method_options(&mut res);
}
Method::PUT => {
if !allow_upload || (!allow_delete && is_file) {
&Method::PUT => {
if !allow_upload || (!allow_delete && is_file && size > 0) {
status!(res, StatusCode::FORBIDDEN);
} else {
self.handle_upload(path, req, &mut res).await?;
}
}
Method::DELETE => {
&Method::DELETE => {
if !allow_delete {
status!(res, StatusCode::FORBIDDEN);
} else if !is_miss {
self.handle_delete(path, is_dir).await?
self.handle_delete(path, is_dir, &mut res).await?
} else {
status!(res, StatusCode::NOT_FOUND);
}
}
_ => {
status!(res, StatusCode::METHOD_NOT_ALLOWED);
&Method::HEAD => {
if is_miss {
status!(res, StatusCode::NOT_FOUND);
} else {
status!(res, StatusCode::OK);
}
}
method => match method.as_str() {
"PROPFIND" => {
if is_dir {
self.handle_propfind_dir(path, headers, &mut res).await?;
} else if is_file {
self.handle_propfind_file(path, &mut res).await?;
} else {
status!(res, StatusCode::NOT_FOUND);
}
}
"PROPPATCH" => {
if is_file {
self.handle_proppatch(req_path, &mut res).await?;
} else {
status!(res, StatusCode::NOT_FOUND);
}
}
"MKCOL" if allow_upload && is_miss => self.handle_mkcol(path, &mut res).await?,
"COPY" if allow_upload && !is_miss => {
self.handle_copy(path, headers, &mut res).await?
}
"MOVE" if allow_upload && allow_delete && !is_miss => {
self.handle_move(path, headers, &mut res).await?
}
"LOCK" => {
// Fake lock
if is_file {
self.handle_lock(req_path, &mut res).await?;
} else {
status!(res, StatusCode::NOT_FOUND);
}
}
"UNLOCK" => {
// Fake unlock
if is_miss {
status!(res, StatusCode::NOT_FOUND);
} else {
status!(res, StatusCode::OK);
}
}
_ => {
status!(res, StatusCode::METHOD_NOT_ALLOWED);
}
},
}
Ok(res)
}
@@ -231,20 +286,7 @@ impl InnerService {
mut req: Request,
res: &mut Response,
) -> BoxResult<()> {
let ensure_parent = match path.parent() {
Some(parent) => match fs::metadata(parent).await {
Ok(meta) => meta.is_dir(),
Err(_) => {
fs::create_dir_all(parent).await?;
true
}
},
None => false,
};
if !ensure_parent {
status!(res, StatusCode::FORBIDDEN);
return Ok(());
}
ensure_path_parent(path).await?;
let mut file = fs::File::create(&path).await?;
@@ -258,44 +300,32 @@ impl InnerService {
io::copy(&mut body_reader, &mut file).await?;
let query = req.uri().query().unwrap_or_default();
if query == "unzip" {
if let Err(e) = self.unzip_file(path).await {
eprintln!("Failed to unzip {}, {}", path.display(), e);
status!(res, StatusCode::BAD_REQUEST);
}
fs::remove_file(&path).await?;
}
status!(res, StatusCode::CREATED);
Ok(())
}
async fn handle_delete(&self, path: &Path, is_dir: bool) -> BoxResult<()> {
async fn handle_delete(&self, path: &Path, is_dir: bool, res: &mut Response) -> BoxResult<()> {
match is_dir {
true => fs::remove_dir_all(path).await?,
false => fs::remove_file(path).await?,
}
status!(res, StatusCode::NO_CONTENT);
Ok(())
}
async fn handle_ls_dir(&self, path: &Path, exist: bool, res: &mut Response) -> BoxResult<()> {
let mut paths: Vec<PathItem> = vec![];
let mut paths = vec![];
if exist {
let mut rd = match fs::read_dir(path).await {
Ok(rd) => rd,
paths = match self.list_dir(path, path).await {
Ok(paths) => paths,
Err(_) => {
status!(res, StatusCode::FORBIDDEN);
return Ok(());
}
};
while let Some(entry) = rd.next_entry().await? {
let entry_path = entry.path();
if let Ok(Some(item)) = self.to_pathitem(entry_path, path.to_path_buf()).await {
paths.push(item);
}
}
}
self.send_index(path, paths, res)
};
self.send_index(path, paths, res, exist)
}
async fn handle_query_dir(
@@ -324,23 +354,30 @@ impl InnerService {
}
}
}
self.send_index(path, paths, res)
self.send_index(path, paths, res, true)
}
async fn handle_zip_dir(&self, path: &Path, res: &mut Response) -> BoxResult<()> {
let (mut writer, reader) = tokio::io::duplex(BUF_SIZE);
let filename = path.file_name().unwrap().to_str().unwrap();
let filename = path
.file_name()
.and_then(|v| v.to_str())
.ok_or_else(|| format!("Failed to get name of `{}`", path.display()))?;
let path = path.to_owned();
tokio::spawn(async move {
if let Err(e) = zip_dir(&mut writer, &path).await {
eprintln!("Failed to zip {}, {}", path.display(), e);
error!("Failed to zip {}, {}", path.display(), e);
}
});
let stream = ReaderStream::new(reader);
*res.body_mut() = Body::wrap_stream(stream);
res.headers_mut().insert(
CONTENT_DISPOSITION,
HeaderValue::from_str(&format!("attachment; filename=\"{}.zip\"", filename,)).unwrap(),
HeaderValue::from_str(&format!(
"attachment; filename=\"{}.zip\"",
encode_uri(filename),
))
.unwrap(),
);
Ok(())
}
@@ -448,11 +485,167 @@ impl InnerService {
Ok(())
}
fn handle_method_options(&self, res: &mut Response) {
res.headers_mut().insert(
"Allow",
"GET,HEAD,PUT,OPTIONS,DELETE,PROPFIND,COPY,MOVE"
.parse()
.unwrap(),
);
res.headers_mut().insert("DAV", "1".parse().unwrap());
status!(res, StatusCode::NO_CONTENT);
}
async fn handle_propfind_dir(
&self,
path: &Path,
headers: &HeaderMap<HeaderValue>,
res: &mut Response,
) -> BoxResult<()> {
let depth: u32 = match headers.get("depth") {
Some(v) => match v.to_str().ok().and_then(|v| v.parse().ok()) {
Some(v) => v,
None => {
status!(res, StatusCode::BAD_REQUEST);
return Ok(());
}
},
None => 0,
};
let mut paths = vec![self.to_pathitem(path, &self.args.path).await?.unwrap()];
if depth > 0 {
match self.list_dir(path, &self.args.path).await {
Ok(child) => paths.extend(child),
Err(_) => {
status!(res, StatusCode::FORBIDDEN);
return Ok(());
}
}
}
let output = paths
.iter()
.map(|v| v.to_dav_xml(self.args.uri_prefix.as_str()))
.fold(String::new(), |mut acc, v| {
acc.push_str(&v);
acc
});
res_multistatus(res, &output);
Ok(())
}
async fn handle_propfind_file(&self, path: &Path, res: &mut Response) -> BoxResult<()> {
if let Some(pathitem) = self.to_pathitem(path, &self.args.path).await? {
res_multistatus(res, &pathitem.to_dav_xml(self.args.uri_prefix.as_str()));
} else {
status!(res, StatusCode::NOT_FOUND);
}
Ok(())
}
async fn handle_mkcol(&self, path: &Path, res: &mut Response) -> BoxResult<()> {
fs::create_dir_all(path).await?;
status!(res, StatusCode::CREATED);
Ok(())
}
async fn handle_copy(
&self,
path: &Path,
headers: &HeaderMap<HeaderValue>,
res: &mut Response,
) -> BoxResult<()> {
let dest = match self.extract_dest(headers) {
Some(dest) => dest,
None => {
status!(res, StatusCode::BAD_REQUEST);
return Ok(());
}
};
let meta = fs::symlink_metadata(path).await?;
if meta.is_dir() {
status!(res, StatusCode::BAD_REQUEST);
return Ok(());
}
ensure_path_parent(&dest).await?;
fs::copy(path, &dest).await?;
status!(res, StatusCode::NO_CONTENT);
Ok(())
}
async fn handle_move(
&self,
path: &Path,
headers: &HeaderMap<HeaderValue>,
res: &mut Response,
) -> BoxResult<()> {
let dest = match self.extract_dest(headers) {
Some(dest) => dest,
None => {
status!(res, StatusCode::BAD_REQUEST);
return Ok(());
}
};
ensure_path_parent(&dest).await?;
fs::rename(path, &dest).await?;
status!(res, StatusCode::NO_CONTENT);
Ok(())
}
async fn handle_lock(&self, req_path: &str, res: &mut Response) -> BoxResult<()> {
let token = if self.args.auth.is_none() {
Utc::now().timestamp().to_string()
} else {
format!("opaquelocktoken:{}", Uuid::new_v4())
};
res.headers_mut().insert(
"content-type",
"application/xml; charset=utf-8".parse().unwrap(),
);
res.headers_mut()
.insert("lock-token", format!("<{}>", token).parse().unwrap());
*res.body_mut() = Body::from(format!(
r#"<?xml version="1.0" encoding="utf-8"?>
<D:prop xmlns:D="DAV:"><D:lockdiscovery><D:activelock>
<D:locktoken><D:href>{}</D:href></D:locktoken>
<D:lockroot><D:href>{}</D:href></D:lockroot>
</D:activelock></D:lockdiscovery></D:prop>"#,
token, req_path
));
Ok(())
}
async fn handle_proppatch(&self, req_path: &str, res: &mut Response) -> BoxResult<()> {
let output = format!(
r#"<D:response>
<D:href>{}</D:href>
<D:propstat>
<D:prop>
</D:prop>
<D:status>HTTP/1.1 403 Forbidden</D:status>
</D:propstat>
</D:response>"#,
req_path
);
res_multistatus(res, &output);
Ok(())
}
fn send_index(
&self,
path: &Path,
mut paths: Vec<PathItem>,
res: &mut Response,
exist: bool,
) -> BoxResult<()> {
paths.sort_unstable();
let rel_path = match self.args.path.parent() {
@@ -464,6 +657,7 @@ impl InnerService {
paths,
allow_upload: self.args.allow_upload,
allow_delete: self.args.allow_delete,
dir_exists: exist,
};
let data = serde_json::to_string(&data).unwrap();
let output = INDEX_HTML.replace(
@@ -481,34 +675,35 @@ impl InnerService {
),
);
*res.body_mut() = output.into();
res.headers_mut()
.typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8));
Ok(())
}
fn auth_guard(&self, req: &Request, res: &mut Response) -> bool {
let method = req.method();
let pass = {
match &self.args.auth {
None => true,
Some(auth) => match req.headers().get(AUTHORIZATION) {
Some(value) => match value.to_str().ok().map(|v| {
let mut it = v.split(' ');
(it.next(), it.next())
}) {
Some((Some("Basic"), Some(tail))) => base64::decode(tail)
.ok()
.and_then(|v| String::from_utf8(v).ok())
.map(|v| v.as_str() == auth)
.unwrap_or_default(),
_ => false,
},
None => self.args.no_auth_access && req.method() == Method::GET,
Some((user, pass)) => match req.headers().get(AUTHORIZATION) {
Some(value) => {
valid_digest(value, method.as_str(), user.as_str(), pass.as_str()).is_some()
}
None => {
self.args.no_auth_access
&& (method == Method::GET
|| method == Method::OPTIONS
|| method == Method::HEAD
|| method.as_str() == "PROPFIND")
}
},
}
};
if !pass {
let value = generate_www_auth(false);
status!(res, StatusCode::UNAUTHORIZED);
res.headers_mut()
.insert(WWW_AUTHENTICATE, HeaderValue::from_static("Basic"));
.insert(WWW_AUTHENTICATE, value.parse().unwrap());
}
pass
}
@@ -521,30 +716,10 @@ impl InnerService {
.unwrap_or_default()
}
async fn unzip_file(&self, path: &Path) -> BoxResult<()> {
let root = path.parent().unwrap();
let mut zip = ZipFileReader::new(File::open(&path).await?).await?;
for i in 0..zip.entries().len() {
let entry = &zip.entries()[i];
let entry_name = entry.name();
let entry_path = root.join(entry_name);
if entry_name.ends_with('/') {
fs::create_dir_all(entry_path).await?;
} else {
if !self.args.allow_delete && fs::metadata(&entry_path).await.is_ok() {
continue;
}
if let Some(parent) = entry_path.parent() {
if fs::symlink_metadata(parent).await.is_err() {
fs::create_dir_all(&parent).await?;
}
}
let mut outfile = fs::File::create(&entry_path).await?;
let mut reader = zip.entry_reader(i).await?;
io::copy(&mut reader, &mut outfile).await?;
}
}
Ok(())
fn extract_dest(&self, headers: &HeaderMap<HeaderValue>) -> Option<PathBuf> {
let dest = headers.get("Destination")?.to_str().ok()?;
let uri: Uri = dest.parse().ok()?;
self.extract_path(uri.path())
}
fn extract_path(&self, path: &str) -> Option<PathBuf> {
@@ -563,15 +738,25 @@ impl InnerService {
fn strip_path_prefix<'a, P: AsRef<Path>>(&self, path: &'a P) -> Option<&'a Path> {
let path = path.as_ref();
match self.args.path_prefix.as_deref() {
Some(prefix) => {
let prefix = prefix.trim_start_matches('/');
path.strip_prefix(prefix).ok()
}
None => Some(path),
if self.args.path_prefix.is_empty() {
Some(path)
} else {
path.strip_prefix(&self.args.path_prefix).ok()
}
}
async fn list_dir(&self, entry_path: &Path, base_path: &Path) -> BoxResult<Vec<PathItem>> {
let mut paths: Vec<PathItem> = vec![];
let mut rd = fs::read_dir(entry_path).await?;
while let Ok(Some(entry)) = rd.next_entry().await {
let entry_path = entry.path();
if let Ok(Some(item)) = self.to_pathitem(entry_path.as_path(), base_path).await {
paths.push(item);
}
}
Ok(paths)
}
async fn to_pathitem<P: AsRef<Path>>(
&self,
path: P,
@@ -597,9 +782,15 @@ impl InnerService {
PathType::Dir | PathType::SymlinkDir => None,
PathType::File | PathType::SymlinkFile => Some(meta.len()),
};
let base_name = rel_path
.file_name()
.and_then(|v| v.to_str())
.unwrap_or("/")
.to_owned();
let name = normalize_path(rel_path);
Ok(Some(PathItem {
path_type,
base_name,
name,
mtime,
size,
@@ -607,22 +798,66 @@ impl InnerService {
}
}
#[derive(Debug, Serialize, Eq, PartialEq, Ord, PartialOrd)]
#[derive(Debug, Serialize)]
struct IndexData {
breadcrumb: String,
paths: Vec<PathItem>,
allow_upload: bool,
allow_delete: bool,
dir_exists: bool,
}
#[derive(Debug, Serialize, Eq, PartialEq, Ord, PartialOrd)]
struct PathItem {
path_type: PathType,
base_name: String,
name: String,
mtime: u64,
size: Option<u64>,
}
impl PathItem {
pub fn to_dav_xml(&self, prefix: &str) -> String {
let mtime = Utc.timestamp_millis(self.mtime as i64).to_rfc2822();
let href = encode_uri(&format!("{}{}", prefix, &self.name));
let displayname = escape_str_pcdata(&self.base_name);
match self.path_type {
PathType::Dir | PathType::SymlinkDir => format!(
r#"<D:response>
<D:href>{}</D:href>
<D:propstat>
<D:prop>
<D:displayname>{}</D:displayname>
<D:getlastmodified>{}</D:getlastmodified>
<D:resourcetype><D:collection/></D:resourcetype>
</D:prop>
<D:status>HTTP/1.1 200 OK</D:status>
</D:propstat>
</D:response>"#,
href, displayname, mtime
),
PathType::File | PathType::SymlinkFile => format!(
r#"<D:response>
<D:href>{}</D:href>
<D:propstat>
<D:prop>
<D:displayname>{}</D:displayname>
<D:getcontentlength>{}</D:getcontentlength>
<D:getlastmodified>{}</D:getlastmodified>
<D:resourcetype></D:resourcetype>
</D:prop>
<D:status>HTTP/1.1 200 OK</D:status>
</D:propstat>
</D:response>"#,
href,
displayname,
self.size.unwrap_or_default(),
mtime
),
}
}
}
#[derive(Debug, Serialize, Eq, PartialEq, Ord, PartialOrd)]
enum PathType {
Dir,
@@ -646,6 +881,15 @@ fn normalize_path<P: AsRef<Path>>(path: P) -> String {
}
}
async fn ensure_path_parent(path: &Path) -> BoxResult<()> {
if let Some(parent) = path.parent() {
if fs::symlink_metadata(parent).await.is_err() {
fs::create_dir_all(&parent).await?;
}
}
Ok(())
}
fn add_cors(res: &mut Response) {
res.headers_mut()
.typed_insert(AccessControlAllowOrigin::ANY);
@@ -656,6 +900,21 @@ fn add_cors(res: &mut Response) {
);
}
fn res_multistatus(res: &mut Response, content: &str) {
*res.status_mut() = StatusCode::MULTI_STATUS;
res.headers_mut().insert(
"content-type",
"application/xml; charset=utf-8".parse().unwrap(),
);
*res.body_mut() = Body::from(format!(
r#"<?xml version="1.0" encoding="utf-8" ?>
<D:multistatus xmlns:D="DAV:">
{}
</D:multistatus>"#,
content,
));
}
async fn zip_dir<W: AsyncWrite + Unpin>(writer: &mut W, dir: &Path) -> BoxResult<()> {
let mut writer = ZipFileWriter::new(writer);
let mut walkdir = WalkDir::new(dir);
@@ -723,31 +982,48 @@ fn to_content_range(range: &Range, complete_length: u64) -> Option<ContentRange>
})
}
fn print_listening(address: &str, port: u16, tls: bool) {
let addrs = retrive_listening_addrs(address);
fn print_listening(addr: &SocketAddr, prefix: &str, tls: bool) {
let prefix = encode_uri(prefix.trim_end_matches('/'));
let addrs = retrieve_listening_addrs(addr);
let protocol = if tls { "https" } else { "http" };
if addrs.len() == 1 {
eprintln!("Listening on {}://{}:{}", protocol, addrs[0], port);
eprintln!("Listening on {}://{}{}", protocol, addr, prefix);
} else {
eprintln!("Listening on:");
for addr in addrs {
eprintln!(" {}://{}:{}", protocol, addr, port);
eprintln!(" {}://{}{}", protocol, addr, prefix);
}
eprintln!();
}
}
fn retrive_listening_addrs(address: &str) -> Vec<String> {
if address == "0.0.0.0" {
fn retrieve_listening_addrs(addr: &SocketAddr) -> Vec<SocketAddr> {
let ip = addr.ip();
let port = addr.port();
if ip.is_unspecified() {
if let Ok(interfaces) = get_if_addrs() {
let mut ifaces: Vec<IpAddr> = interfaces
.into_iter()
.map(|v| v.ip())
.filter(|v| v.is_ipv4())
.filter(|v| {
if ip.is_ipv4() {
v.is_ipv4()
} else {
v.is_ipv6()
}
})
.collect();
ifaces.sort();
return ifaces.into_iter().map(|v| v.to_string()).collect();
return ifaces
.into_iter()
.map(|v| SocketAddr::new(v, port))
.collect();
}
}
vec![address.to_owned()]
vec![addr.to_owned()]
}
fn encode_uri(v: &str) -> String {
let parts: Vec<_> = v.split('/').map(urlencoding::encode).collect();
parts.join("/")
}