Compare commits

...

23 Commits

Author SHA1 Message Date
sigoden
ce740b1fb1 chore: release v0.41.0 (#389) 2024-05-22 11:20:24 +08:00
sigoden
1eb69f6806 chore: ui minior refinement 2024-05-22 01:56:06 +00:00
sigoden
5f0369aa39 chore: js format 2024-05-14 09:04:49 +00:00
sigoden
fe2358506d fix: head div overlap main contents when wrap (#386) 2024-05-14 17:04:06 +08:00
sigoden
6b6d69a8ef feat: add log-file option (#383) 2024-05-11 17:13:31 +08:00
sigoden
cb7d417fd3 fix: strange issue that occurs only on Microsoft WebDAV (#382) 2024-05-11 16:18:18 +08:00
sigoden
75f06f749c chore: fix typos and clippy (#379) 2024-05-05 06:23:18 +08:00
sigoden
d0c79a95e5 chore: update issue tempalte for bug report 2024-04-27 04:00:02 +00:00
Qishuai Liu
ffc0991a12 refactor: add fixed-width numerals to date and size on file list page (#378) 2024-04-26 17:34:38 +08:00
sigoden
51f9c87e65 chore: update deps 2024-04-19 01:41:41 +00:00
sigoden
529bb33f0b chore: update ci 2024-04-19 01:39:36 +00:00
sigoden
3d3bb822ee chore: update readme 2024-04-19 01:06:34 +00:00
sigoden
9353b2e759 feat: add api to get the hash of a file (#375) 2024-04-19 08:48:54 +08:00
sigoden
a277698322 chore: update docker 2024-04-07 23:01:59 +00:00
sigoden
0ff2b15c9a refactor: digest_auth related tests (#372) 2024-04-08 06:56:51 +08:00
sigoden
319333cd22 chore: update deps 2024-04-07 21:19:34 +00:00
sigoden
d66c9de8c8 feat: tls handshake timeout (#368) 2024-03-08 10:29:12 +08:00
sigoden
7c0fa3dab7 chore: update deps 2024-03-08 00:52:31 +00:00
sigoden
48066d79e0 chore: fix typo 2024-03-08 00:46:35 +00:00
tobyp
1c41db0c2d fix: timestamp format of getlastmodified in dav xml (#366) 2024-02-22 08:30:01 +08:00
Matthias Möller
76ef7ba0fb chore: removes unnecessary clone (#364) 2024-02-17 20:09:20 +08:00
sigoden
3deac84cc9 chore: add docker pulls badge to readme 2024-02-14 11:54:59 +00:00
sigoden
638b715bc2 chore: release v0.40.0 (#361)
* chore: release v0.40.0

* update deps
2024-02-13 12:05:46 +08:00
23 changed files with 801 additions and 653 deletions

View File

@@ -7,6 +7,12 @@ about: Create a report to help us improve
<!-- A clear and concise description of what the bug is. --> <!-- A clear and concise description of what the bug is. -->
**Configuration**
<!-- The dufs command-line arguments or configuration -->
<!-- If the problems are related to auth/perm, please conceal only the user:pass, but do not hide the entire `auth` configuration. -->
**Log** **Log**
The dufs log is crucial for locating the problem, so please do not omit it. The dufs log is crucial for locating the problem, so please do not omit it.
@@ -15,4 +21,4 @@ The dufs log is crucial for locating the problem, so please do not omit it.
- Dufs version: - Dufs version:
- Browser/Webdav info: - Browser/Webdav info:
- OS info: - OS info:
- Proxy server: e.g. nginx, cloudflare - Proxy server: e.g. nginx, cloudflare

View File

@@ -29,7 +29,7 @@ jobs:
RUSTFLAGS: --deny warnings RUSTFLAGS: --deny warnings
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: Install Rust Toolchain Components - name: Install Rust Toolchain Components
uses: dtolnay/rust-toolchain@stable uses: dtolnay/rust-toolchain@stable

View File

@@ -54,28 +54,13 @@ jobs:
os: ubuntu-latest os: ubuntu-latest
use-cross: true use-cross: true
cargo-flags: "" cargo-flags: ""
- target: mips-unknown-linux-musl
os: ubuntu-latest
use-cross: true
cargo-flags: "--no-default-features"
- target: mipsel-unknown-linux-musl
os: ubuntu-latest
use-cross: true
cargo-flags: "--no-default-features"
- target: mips64-unknown-linux-gnuabi64
os: ubuntu-latest
use-cross: true
cargo-flags: "--no-default-features"
- target: mips64el-unknown-linux-gnuabi64
os: ubuntu-latest
use-cross: true
cargo-flags: "--no-default-features"
runs-on: ${{matrix.os}} runs-on: ${{matrix.os}}
env: env:
BUILD_CMD: cargo BUILD_CMD: cargo
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: Check Tag - name: Check Tag
id: check-tag id: check-tag
@@ -94,20 +79,18 @@ jobs:
uses: dtolnay/rust-toolchain@stable uses: dtolnay/rust-toolchain@stable
with: with:
targets: ${{ matrix.target }} targets: ${{ matrix.target }}
# Since rust 1.72, mips platforms are tier 3
toolchain: 1.71
- name: Install cross - name: Install cross
if: matrix.use-cross if: matrix.use-cross
uses: taiki-e/install-action@v2 uses: taiki-e/install-action@v2
with: with:
tool: cross tool: cross
- name: Overwrite build command env variable - name: Overwrite build command env variable
if: matrix.use-cross if: matrix.use-cross
shell: bash shell: bash
run: echo "BUILD_CMD=cross" >> $GITHUB_ENV run: echo "BUILD_CMD=cross" >> $GITHUB_ENV
- name: Show Version Information (Rust, cargo, GCC) - name: Show Version Information (Rust, cargo, GCC)
shell: bash shell: bash
run: | run: |
@@ -155,14 +138,12 @@ jobs:
fi fi
- name: Publish Archive - name: Publish Archive
uses: softprops/action-gh-release@v1 uses: softprops/action-gh-release@v2
if: ${{ startsWith(github.ref, 'refs/tags/') }} if: ${{ startsWith(github.ref, 'refs/tags/') }}
with: with:
draft: false draft: false
files: ${{ steps.package.outputs.archive }} files: ${{ steps.package.outputs.archive }}
prerelease: ${{ steps.check-tag.outputs.rc == 'true' }} prerelease: ${{ steps.check-tag.outputs.rc == 'true' }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
docker: docker:
name: Publish to Docker Hub name: Publish to Docker Hub
@@ -171,17 +152,18 @@ jobs:
needs: release needs: release
steps: steps:
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v2 uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2 uses: docker/setup-buildx-action@v3
- name: Login to DockerHub - name: Login to DockerHub
uses: docker/login-action@v2 uses: docker/login-action@v3
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ github.repository_owner }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push - name: Build and push
uses: docker/build-push-action@v4 uses: docker/build-push-action@v5
with: with:
file: Dockerfile-release
build-args: | build-args: |
REPO=${{ github.repository }} REPO=${{ github.repository }}
VER=${{ github.ref_name }} VER=${{ github.ref_name }}
@@ -199,7 +181,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: release needs: release
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable - uses: dtolnay/rust-toolchain@stable

View File

@@ -2,6 +2,41 @@
All notable changes to this project will be documented in this file. All notable changes to this project will be documented in this file.
## [0.41.0] - 2024-05-22
### Bug Fixes
- Timestamp format of getlastmodified in dav xml ([#366](https://github.com/sigoden/dufs/issues/366))
- Strange issue that occurs only on Microsoft WebDAV ([#382](https://github.com/sigoden/dufs/issues/382))
- Head div overlap main contents when wrap ([#386](https://github.com/sigoden/dufs/issues/386))
### Features
- Tls handshake timeout ([#368](https://github.com/sigoden/dufs/issues/368))
- Add api to get the hash of a file ([#375](https://github.com/sigoden/dufs/issues/375))
- Add log-file option ([#383](https://github.com/sigoden/dufs/issues/383))
### Refactor
- Digest_auth related tests ([#372](https://github.com/sigoden/dufs/issues/372))
- Add fixed-width numerals to date and size on file list page ([#378](https://github.com/sigoden/dufs/issues/378))
## [0.40.0] - 2024-02-13
### Bug Fixes
- Guard req and destination path ([#359](https://github.com/sigoden/dufs/issues/359))
### Features
- Revert supporting for forbidden permission ([#352](https://github.com/sigoden/dufs/issues/352))
### Refactor
- Do not try to bind ipv6 if no ipv6 ([#348](https://github.com/sigoden/dufs/issues/348))
- Improve invalid auth ([#356](https://github.com/sigoden/dufs/issues/356))
- Improve resolve_path and handle_assets, abandon guard_path ([#360](https://github.com/sigoden/dufs/issues/360))
## [0.39.0] - 2024-01-11 ## [0.39.0] - 2024-01-11
### Bug Fixes ### Bug Fixes
@@ -69,7 +104,7 @@ All notable changes to this project will be documented in this file.
- Remove one clone on `assets_prefix` ([#270](https://github.com/sigoden/dufs/issues/270)) - Remove one clone on `assets_prefix` ([#270](https://github.com/sigoden/dufs/issues/270))
- Optimize tests - Optimize tests
- Improve code quanity ([#282](https://github.com/sigoden/dufs/issues/282)) - Improve code quality ([#282](https://github.com/sigoden/dufs/issues/282))
## [0.36.0] - 2023-08-24 ## [0.36.0] - 2023-08-24

825
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "dufs" name = "dufs"
version = "0.39.0" version = "0.41.0"
edition = "2021" edition = "2021"
authors = ["sigoden <sigoden@gmail.com>"] authors = ["sigoden <sigoden@gmail.com>"]
description = "Dufs is a distinctive utility file server" description = "Dufs is a distinctive utility file server"
@@ -11,28 +11,28 @@ categories = ["command-line-utilities", "web-programming::http-server"]
keywords = ["static", "file", "server", "webdav", "cli"] keywords = ["static", "file", "server", "webdav", "cli"]
[dependencies] [dependencies]
clap = { version = "4", features = ["wrap_help", "env"] } clap = { version = "4.5", features = ["wrap_help", "env"] }
clap_complete = "4" clap_complete = "4.5"
chrono = { version = "0.4", default-features = false, features = ["clock"] } chrono = { version = "0.4", default-features = false, features = ["clock"] }
tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util", "signal"]} tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util", "signal"]}
tokio-util = { version = "0.7", features = ["io-util", "compat"] } tokio-util = { version = "0.7", features = ["io-util", "compat"] }
hyper = { version = "1.0", features = ["http1", "server"] } hyper = { version = "1", features = ["http1", "server"] }
percent-encoding = "2.3" percent-encoding = "2.3"
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
serde_json = "1" serde_json = "1"
futures-util = { version = "0.3", default-features = false, features = ["alloc"] } futures-util = { version = "0.3", default-features = false, features = ["alloc"] }
async_zip = { version = "0.0.16", default-features = false, features = ["deflate", "bzip2", "xz", "chrono", "tokio"] } async_zip = { version = "0.0.17", default-features = false, features = ["deflate", "bzip2", "xz", "chrono", "tokio"] }
headers = "0.4" headers = "0.4"
mime_guess = "2.0" mime_guess = "2.0"
if-addrs = "0.11" if-addrs = "0.12"
rustls-pemfile = { version = "2.0", optional = true } rustls-pemfile = { version = "2.0", optional = true }
tokio-rustls = { version = "0.25", optional = true } tokio-rustls = { version = "0.26", optional = true, default-features = false, features = ["ring", "tls12"]}
md5 = "0.7" md5 = "0.7"
lazy_static = "1.4" lazy_static = "1.4"
uuid = { version = "1.7", features = ["v4", "fast-rng"] } uuid = { version = "1.7", features = ["v4", "fast-rng"] }
urlencoding = "2.1" urlencoding = "2.1"
xml-rs = "0.8" xml-rs = "0.8"
log = "0.4" log = { version = "0.4", features = ["std"] }
socket2 = "0.5" socket2 = "0.5"
async-stream = "0.3" async-stream = "0.3"
walkdir = "2.3" walkdir = "2.3"
@@ -45,13 +45,14 @@ glob = "0.3"
indexmap = "2.2" indexmap = "2.2"
serde_yaml = "0.9" serde_yaml = "0.9"
sha-crypt = "0.5" sha-crypt = "0.5"
base64 = "0.21" base64 = "0.22"
smart-default = "0.7" smart-default = "0.7"
rustls-pki-types = "1.2" rustls-pki-types = "1.2"
hyper-util = { version = "0.1", features = ["server-auto", "tokio"] } hyper-util = { version = "0.1", features = ["server-auto", "tokio"] }
http-body-util = "0.1" http-body-util = "0.1"
bytes = "1.5" bytes = "1.5"
pin-project-lite = "0.2" pin-project-lite = "0.2"
sha2 = "0.10.8"
[features] [features]
default = ["tls"] default = ["tls"]
@@ -59,14 +60,14 @@ tls = ["rustls-pemfile", "tokio-rustls"]
[dev-dependencies] [dev-dependencies]
assert_cmd = "2" assert_cmd = "2"
reqwest = { version = "0.11", features = ["blocking", "multipart", "rustls-tls"], default-features = false } reqwest = { version = "0.12", features = ["blocking", "multipart", "rustls-tls"], default-features = false }
assert_fs = "1" assert_fs = "1"
port_check = "0.1" port_check = "0.2"
rstest = "0.18" rstest = "0.19"
regex = "1" regex = "1"
url = "2" url = "2"
diqwest = { version = "2.0", features = ["blocking"], default-features = false }
predicates = "3" predicates = "3"
digest_auth = "0.3.1"
[profile.release] [profile.release]
opt-level = 3 opt-level = 3

View File

@@ -1,17 +1,12 @@
FROM alpine as builder FROM --platform=linux/amd64 messense/rust-musl-cross:x86_64-musl AS amd64
ARG REPO VER TARGETPLATFORM COPY . .
RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \ RUN cargo install --path . --root /
TARGET="x86_64-unknown-linux-musl"; \
elif [ "$TARGETPLATFORM" = "linux/arm64" ]; then \ FROM --platform=linux/amd64 messense/rust-musl-cross:aarch64-musl AS arm64
TARGET="aarch64-unknown-linux-musl"; \ COPY . .
elif [ "$TARGETPLATFORM" = "linux/386" ]; then \ RUN cargo install --path . --root /
TARGET="i686-unknown-linux-musl"; \
elif [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then \ FROM ${TARGETARCH} AS builder
TARGET="armv7-unknown-linux-musleabihf"; \
fi && \
wget https://github.com/${REPO}/releases/download/${VER}/dufs-${VER}-${TARGET}.tar.gz && \
tar -xf dufs-${VER}-${TARGET}.tar.gz && \
mv dufs /bin/
FROM scratch FROM scratch
COPY --from=builder /bin/dufs /bin/dufs COPY --from=builder /bin/dufs /bin/dufs

19
Dockerfile-release Normal file
View File

@@ -0,0 +1,19 @@
FROM alpine as builder
ARG REPO VER TARGETPLATFORM
RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \
TARGET="x86_64-unknown-linux-musl"; \
elif [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
TARGET="aarch64-unknown-linux-musl"; \
elif [ "$TARGETPLATFORM" = "linux/386" ]; then \
TARGET="i686-unknown-linux-musl"; \
elif [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then \
TARGET="armv7-unknown-linux-musleabihf"; \
fi && \
wget https://github.com/${REPO}/releases/download/${VER}/dufs-${VER}-${TARGET}.tar.gz && \
tar -xf dufs-${VER}-${TARGET}.tar.gz && \
mv dufs /bin/
FROM scratch
COPY --from=builder /bin/dufs /bin/dufs
STOPSIGNAL SIGINT
ENTRYPOINT ["/bin/dufs"]

View File

@@ -2,6 +2,7 @@
[![CI](https://github.com/sigoden/dufs/actions/workflows/ci.yaml/badge.svg)](https://github.com/sigoden/dufs/actions/workflows/ci.yaml) [![CI](https://github.com/sigoden/dufs/actions/workflows/ci.yaml/badge.svg)](https://github.com/sigoden/dufs/actions/workflows/ci.yaml)
[![Crates](https://img.shields.io/crates/v/dufs.svg)](https://crates.io/crates/dufs) [![Crates](https://img.shields.io/crates/v/dufs.svg)](https://crates.io/crates/dufs)
[![Docker Pulls](https://img.shields.io/docker/pulls/sigoden/dufs)](https://hub.docker.com/r/sigoden/dufs)
Dufs is a distinctive utility file server that supports static serving, uploading, searching, accessing control, webdav... Dufs is a distinctive utility file server that supports static serving, uploading, searching, accessing control, webdav...
@@ -30,7 +31,7 @@ cargo install dufs
### With docker ### With docker
``` ```
docker run -v `pwd`:/data -p 5000:5000 --rm -it sigoden/dufs /data -A docker run -v `pwd`:/data -p 5000:5000 --rm sigoden/dufs /data -A
``` ```
### With [Homebrew](https://brew.sh) ### With [Homebrew](https://brew.sh)
@@ -72,6 +73,7 @@ Options:
--render-spa Serve SPA(Single Page Application) --render-spa Serve SPA(Single Page Application)
--assets <path> Set the path to the assets directory for overriding the built-in assets --assets <path> Set the path to the assets directory for overriding the built-in assets
--log-format <format> Customize http log format --log-format <format> Customize http log format
--log-file <file> Specify the file to save logs to, other than stdout/stderr
--compress <level> Set zip compress level [default: low] [possible values: none, low, medium, high] --compress <level> Set zip compress level [default: low] [possible values: none, low, medium, high]
--completions <shell> Print shell completion script for <shell> [possible values: bash, elvish, fish, powershell, zsh] --completions <shell> Print shell completion script for <shell> [possible values: bash, elvish, fish, powershell, zsh]
--tls-cert <path> Path to an SSL/TLS certificate to serve with HTTPS --tls-cert <path> Path to an SSL/TLS certificate to serve with HTTPS
@@ -157,7 +159,8 @@ curl -T path-to-file http://127.0.0.1:5000/new-path/path-to-file
Download a file Download a file
```sh ```sh
curl http://127.0.0.1:5000/path-to-file curl http://127.0.0.1:5000/path-to-file # download the file
curl http://127.0.0.1:5000/path-to-file?hash # retrieve the sha256 hash of the file
``` ```
Download a folder as zip file Download a folder as zip file
@@ -175,13 +178,13 @@ curl -X DELETE http://127.0.0.1:5000/path-to-file-or-folder
Create a directory Create a directory
```sh ```sh
curl -X MKCOL https://127.0.0.1:5000/path-to-folder curl -X MKCOL http://127.0.0.1:5000/path-to-folder
``` ```
Move the file/folder to the new path Move the file/folder to the new path
```sh ```sh
curl -X MOVE https://127.0.0.1:5000/path -H "Destination: https://127.0.0.1:5000/new-path" curl -X MOVE http://127.0.0.1:5000/path -H "Destination: http://127.0.0.1:5000/new-path"
``` ```
List/search directory contents List/search directory contents
@@ -246,12 +249,12 @@ Create hashed password
``` ```
$ mkpasswd -m sha-512 -s $ mkpasswd -m sha-512 -s
Password: 123456 Password: 123456
$6$qCAVUG7yn7t/hH4d$BWm8r5MoDywNmDP/J3V2S2a6flmKHC1IpblfoqZfuK.LtLBZ0KFXP9QIfJP8RqL8MCw4isdheoAMTuwOz.pAO/ $6$tWMB51u6Kb2ui3wd$5gVHP92V9kZcMwQeKTjyTRgySsYJu471Jb1I6iHQ8iZ6s07GgCIO69KcPBRuwPE5tDq05xMAzye0NxVKuJdYs/
``` ```
Use hashed password Use hashed password
``` ```
dufs -a 'admin:$6$qCAVUG7yn7t/hH4d$BWm8r5MoDywNmDP/J3V2S2a6flmKHC1IpblfoqZfuK.LtLBZ0KFXP9QIfJP8RqL8MCw4isdheoAMTuwOz.pAO/@/:rw' dufs -a 'admin:$6$tWMB51u6Kb2ui3wd$5gVHP92V9kZcMwQeKTjyTRgySsYJu471Jb1I6iHQ8iZ6s07GgCIO69KcPBRuwPE5tDq05xMAzye0NxVKuJdYs/@/:rw'
``` ```
Two important things for hashed passwords: Two important things for hashed passwords:
@@ -327,7 +330,7 @@ All options can be set using environment variables prefixed with `DUFS_`.
--config <file> DUFS_CONFIG=config.yaml --config <file> DUFS_CONFIG=config.yaml
-b, --bind <addrs> DUFS_BIND=0.0.0.0 -b, --bind <addrs> DUFS_BIND=0.0.0.0
-p, --port <port> DUFS_PORT=5000 -p, --port <port> DUFS_PORT=5000
--path-prefix <path> DUFS_PATH_PREFIX=/static --path-prefix <path> DUFS_PATH_PREFIX=/dufs
--hidden <value> DUFS_HIDDEN=tmp,*.log,*.lock --hidden <value> DUFS_HIDDEN=tmp,*.log,*.lock
-a, --auth <rules> DUFS_AUTH="admin:admin@/:rw|@/" -a, --auth <rules> DUFS_AUTH="admin:admin@/:rw|@/"
-A, --allow-all DUFS_ALLOW_ALL=true -A, --allow-all DUFS_ALLOW_ALL=true
@@ -340,9 +343,10 @@ All options can be set using environment variables prefixed with `DUFS_`.
--render-index DUFS_RENDER_INDEX=true --render-index DUFS_RENDER_INDEX=true
--render-try-index DUFS_RENDER_TRY_INDEX=true --render-try-index DUFS_RENDER_TRY_INDEX=true
--render-spa DUFS_RENDER_SPA=true --render-spa DUFS_RENDER_SPA=true
--assets <path> DUFS_ASSETS=/assets --assets <path> DUFS_ASSETS=./assets
--log-format <format> DUFS_LOG_FORMAT="" --log-format <format> DUFS_LOG_FORMAT=""
--compress <compress> DUFS_COMPRESS="low" --log-file <file> DUFS_LOG_FILE=./dufs.log
--compress <compress> DUFS_COMPRESS=low
--tls-cert <path> DUFS_TLS_CERT=cert.pem --tls-cert <path> DUFS_TLS_CERT=cert.pem
--tls-key <path> DUFS_TLS_KEY=key.pem --tls-key <path> DUFS_TLS_KEY=key.pem
``` ```
@@ -378,6 +382,7 @@ render-try-index: true
render-spa: true render-spa: true
assets: ./assets/ assets: ./assets/
log-format: '$remote_addr "$request" $status $http_user_agent' log-format: '$remote_addr "$request" $status $http_user_agent'
log-file: ./dufs.log
compress: low compress: low
tls-cert: tests/data/cert.pem tls-cert: tests/data/cert.pem
tls-key: tests/data/key_pkcs1.pem tls-key: tests/data/key_pkcs1.pem

0
assets/favicon.ico Executable file → Normal file
View File

Before

Width:  |  Height:  |  Size: 9.1 KiB

After

Width:  |  Height:  |  Size: 9.1 KiB

View File

@@ -6,7 +6,7 @@ html {
body { body {
/* prevent premature breadcrumb wrapping on mobile */ /* prevent premature breadcrumb wrapping on mobile */
min-width: 500px; min-width: 538px;
margin: 0; margin: 0;
} }
@@ -19,14 +19,15 @@ body {
flex-wrap: wrap; flex-wrap: wrap;
align-items: center; align-items: center;
padding: 0.6em 1em; padding: 0.6em 1em;
position: fixed; position: sticky;
width: 100%; top: 0;
background-color: white; background-color: white;
} }
.breadcrumb { .breadcrumb {
font-size: 1.25em; font-size: 1.25em;
padding-right: 0.6em; padding-right: 0.6em;
word-break: break-all;
} }
.breadcrumb>a { .breadcrumb>a {
@@ -108,7 +109,7 @@ body {
} }
.main { .main {
padding: 3.3em 1em 0; padding: 0 1em;
} }
.empty-folder { .empty-folder {
@@ -153,18 +154,20 @@ body {
.paths-table .cell-actions { .paths-table .cell-actions {
width: 90px; width: 90px;
display: flex; display: flex;
padding-left: 0.6em; padding-left: 0.5em;
} }
.paths-table .cell-mtime { .paths-table .cell-mtime {
width: 120px; width: 120px;
padding-left: 0.6em; padding-left: 0.5em;
font-variant-numeric: tabular-nums;
} }
.paths-table .cell-size { .paths-table .cell-size {
text-align: right; text-align: right;
width: 70px; width: 70px;
padding-left: 0.6em; padding-left: 0.5em;
font-variant-numeric: tabular-nums;
} }
.path svg { .path svg {
@@ -186,7 +189,7 @@ body {
display: block; display: block;
text-decoration: none; text-decoration: none;
max-width: calc(100vw - 375px); max-width: calc(100vw - 375px);
min-width: 200px; min-width: 170px;
} }
.path a:hover { .path a:hover {

View File

@@ -114,7 +114,6 @@ function ready() {
document.querySelector(".index-page").classList.remove("hidden"); document.querySelector(".index-page").classList.remove("hidden");
setupIndexPage(); setupIndexPage();
} else if (DATA.kind == "Edit") { } else if (DATA.kind == "Edit") {
document.title = `Edit ${DATA.href} - Dufs`; document.title = `Edit ${DATA.href} - Dufs`;
document.querySelector(".editor-page").classList.remove("hidden");; document.querySelector(".editor-page").classList.remove("hidden");;
@@ -886,12 +885,12 @@ async function assertResOK(res) {
} }
function getEncoding(contentType) { function getEncoding(contentType) {
const charset = contentType?.split(";")[1]; const charset = contentType?.split(";")[1];
if (/charset/i.test(charset)) { if (/charset/i.test(charset)) {
let encoding = charset.split("=")[1]; let encoding = charset.split("=")[1];
if (encoding) { if (encoding) {
return encoding.toLowerCase() return encoding.toLowerCase()
}
} }
return 'utf-8' }
return 'utf-8'
} }

View File

@@ -197,6 +197,15 @@ pub fn build_cli() -> Command {
.value_name("format") .value_name("format")
.help("Customize http log format"), .help("Customize http log format"),
) )
.arg(
Arg::new("log-file")
.env("DUFS_LOG_FILE")
.hide_env(true)
.long("log-file")
.value_name("file")
.value_parser(value_parser!(PathBuf))
.help("Specify the file to save logs to, other than stdout/stderr"),
)
.arg( .arg(
Arg::new("compress") Arg::new("compress")
.env("DUFS_COMPRESS") .env("DUFS_COMPRESS")
@@ -280,6 +289,7 @@ pub struct Args {
#[serde(deserialize_with = "deserialize_log_http")] #[serde(deserialize_with = "deserialize_log_http")]
#[serde(rename = "log-format")] #[serde(rename = "log-format")]
pub http_logger: HttpLogger, pub http_logger: HttpLogger,
pub log_file: Option<PathBuf>,
pub compress: Compress, pub compress: Compress,
pub tls_cert: Option<PathBuf>, pub tls_cert: Option<PathBuf>,
pub tls_key: Option<PathBuf>, pub tls_key: Option<PathBuf>,
@@ -301,7 +311,7 @@ impl Args {
} }
if let Some(path) = matches.get_one::<PathBuf>("serve-path") { if let Some(path) = matches.get_one::<PathBuf>("serve-path") {
args.serve_path = path.clone() args.serve_path.clone_from(path)
} }
args.serve_path = Self::sanitize_path(args.serve_path)?; args.serve_path = Self::sanitize_path(args.serve_path)?;
@@ -317,7 +327,7 @@ impl Args {
args.path_is_file = args.serve_path.metadata()?.is_file(); args.path_is_file = args.serve_path.metadata()?.is_file();
if let Some(path_prefix) = matches.get_one::<String>("path-prefix") { if let Some(path_prefix) = matches.get_one::<String>("path-prefix") {
args.path_prefix = path_prefix.clone(); args.path_prefix.clone_from(path_prefix)
} }
args.path_prefix = args.path_prefix.trim_matches('/').to_string(); args.path_prefix = args.path_prefix.trim_matches('/').to_string();
@@ -392,6 +402,10 @@ impl Args {
args.http_logger = log_format.parse()?; args.http_logger = log_format.parse()?;
} }
if let Some(log_file) = matches.get_one::<PathBuf>("log-file") {
args.log_file = Some(log_file.clone());
}
if let Some(compress) = matches.get_one::<Compress>("compress") { if let Some(compress) = matches.get_one::<Compress>("compress") {
args.compress = *compress; args.compress = *compress;
} }

View File

@@ -100,6 +100,7 @@ impl AccessControl {
path: &str, path: &str,
method: &Method, method: &Method,
authorization: Option<&HeaderValue>, authorization: Option<&HeaderValue>,
guard_options: bool,
) -> (Option<String>, Option<AccessPaths>) { ) -> (Option<String>, Option<AccessPaths>) {
if let Some(authorization) = authorization { if let Some(authorization) = authorization {
if let Some(user) = get_auth_user(authorization) { if let Some(user) = get_auth_user(authorization) {
@@ -116,7 +117,7 @@ impl AccessControl {
return (None, None); return (None, None);
} }
if method == Method::OPTIONS { if !guard_options && method == Method::OPTIONS {
return (None, Some(AccessPaths::new(AccessPerm::ReadOnly))); return (None, Some(AccessPaths::new(AccessPerm::ReadOnly)));
} }

View File

@@ -1,8 +1,14 @@
use anyhow::{Context, Result};
use chrono::{Local, SecondsFormat}; use chrono::{Local, SecondsFormat};
use log::{Level, Metadata, Record}; use log::{Level, LevelFilter, Metadata, Record};
use log::{LevelFilter, SetLoggerError}; use std::fs::{File, OpenOptions};
use std::io::Write;
use std::path::PathBuf;
use std::sync::Mutex;
struct SimpleLogger; struct SimpleLogger {
file: Option<Mutex<File>>,
}
impl log::Log for SimpleLogger { impl log::Log for SimpleLogger {
fn enabled(&self, metadata: &Metadata) -> bool { fn enabled(&self, metadata: &Metadata) -> bool {
@@ -12,10 +18,20 @@ impl log::Log for SimpleLogger {
fn log(&self, record: &Record) { fn log(&self, record: &Record) {
if self.enabled(record.metadata()) { if self.enabled(record.metadata()) {
let timestamp = Local::now().to_rfc3339_opts(SecondsFormat::Secs, true); let timestamp = Local::now().to_rfc3339_opts(SecondsFormat::Secs, true);
if record.level() < Level::Info { let text = format!("{} {} - {}", timestamp, record.level(), record.args());
eprintln!("{} {} - {}", timestamp, record.level(), record.args()); match &self.file {
} else { Some(file) => {
println!("{} {} - {}", timestamp, record.level(), record.args()); if let Ok(mut file) = file.lock() {
let _ = writeln!(file, "{text}");
}
}
None => {
if record.level() < Level::Info {
eprintln!("{text}");
} else {
println!("{text}");
}
}
} }
} }
} }
@@ -23,8 +39,23 @@ impl log::Log for SimpleLogger {
fn flush(&self) {} fn flush(&self) {}
} }
static LOGGER: SimpleLogger = SimpleLogger; pub fn init(log_file: Option<PathBuf>) -> Result<()> {
let file = match log_file {
pub fn init() -> Result<(), SetLoggerError> { None => None,
log::set_logger(&LOGGER).map(|()| log::set_max_level(LevelFilter::Info)) Some(log_file) => {
let file = OpenOptions::new()
.create(true)
.append(true)
.open(&log_file)
.with_context(|| {
format!("Failed to open the log file at '{}'", log_file.display())
})?;
Some(Mutex::new(file))
}
};
let logger = SimpleLogger { file };
log::set_boxed_logger(Box::new(logger))
.map(|_| log::set_max_level(LevelFilter::Info))
.with_context(|| "Failed to init logger")?;
Ok(())
} }

View File

@@ -29,13 +29,14 @@ use std::sync::{
atomic::{AtomicBool, Ordering}, atomic::{AtomicBool, Ordering},
Arc, Arc,
}; };
use std::time::Duration;
use tokio::time::timeout;
use tokio::{net::TcpListener, task::JoinHandle}; use tokio::{net::TcpListener, task::JoinHandle};
#[cfg(feature = "tls")] #[cfg(feature = "tls")]
use tokio_rustls::{rustls::ServerConfig, TlsAcceptor}; use tokio_rustls::{rustls::ServerConfig, TlsAcceptor};
#[tokio::main] #[tokio::main]
async fn main() -> Result<()> { async fn main() -> Result<()> {
logger::init().map_err(|e| anyhow!("Failed to init logger, {e}"))?;
let cmd = build_cli(); let cmd = build_cli();
let matches = cmd.get_matches(); let matches = cmd.get_matches();
if let Some(generator) = matches.get_one::<Shell>("completions") { if let Some(generator) = matches.get_one::<Shell>("completions") {
@@ -44,6 +45,7 @@ async fn main() -> Result<()> {
return Ok(()); return Ok(());
} }
let mut args = Args::parse(matches)?; let mut args = Args::parse(matches)?;
logger::init(args.log_file.clone()).map_err(|e| anyhow!("Failed to init logger, {e}"))?;
let (new_addrs, print_addrs) = check_addrs(&args)?; let (new_addrs, print_addrs) = check_addrs(&args)?;
args.addrs = new_addrs; args.addrs = new_addrs;
let running = Arc::new(AtomicBool::new(true)); let running = Arc::new(AtomicBool::new(true));
@@ -91,12 +93,19 @@ fn serve(args: Args, running: Arc<AtomicBool>) -> Result<Vec<JoinHandle<()>>> {
config.alpn_protocols = vec![b"h2".to_vec(), b"http/1.1".to_vec()]; config.alpn_protocols = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
let config = Arc::new(config); let config = Arc::new(config);
let tls_accepter = TlsAcceptor::from(config); let tls_accepter = TlsAcceptor::from(config);
let handshake_timeout = Duration::from_secs(10);
let handle = tokio::spawn(async move { let handle = tokio::spawn(async move {
loop { loop {
let (cnx, addr) = listener.accept().await.unwrap(); let Ok((stream, addr)) = listener.accept().await else {
let Ok(stream) = tls_accepter.accept(cnx).await else { continue;
warn!("During cls handshake connection from {}", addr); };
let Some(stream) =
timeout(handshake_timeout, tls_accepter.accept(stream))
.await
.ok()
.and_then(|v| v.ok())
else {
continue; continue;
}; };
let stream = TokioIo::new(stream); let stream = TokioIo::new(stream);
@@ -113,8 +122,10 @@ fn serve(args: Args, running: Arc<AtomicBool>) -> Result<Vec<JoinHandle<()>>> {
(None, None) => { (None, None) => {
let handle = tokio::spawn(async move { let handle = tokio::spawn(async move {
loop { loop {
let (cnx, addr) = listener.accept().await.unwrap(); let Ok((stream, addr)) = listener.accept().await else {
let stream = TokioIo::new(cnx); continue;
};
let stream = TokioIo::new(stream);
tokio::spawn(handle_stream( tokio::spawn(handle_stream(
server_handle.clone(), server_handle.clone(),
stream, stream,
@@ -139,8 +150,10 @@ fn serve(args: Args, running: Arc<AtomicBool>) -> Result<Vec<JoinHandle<()>>> {
.with_context(|| format!("Failed to bind `{}`", path.display()))?; .with_context(|| format!("Failed to bind `{}`", path.display()))?;
let handle = tokio::spawn(async move { let handle = tokio::spawn(async move {
loop { loop {
let (cnx, _) = listener.accept().await.unwrap(); let Ok((stream, _addr)) = listener.accept().await else {
let stream = TokioIo::new(cnx); continue;
};
let stream = TokioIo::new(stream);
tokio::spawn(handle_stream(server_handle.clone(), stream, None)); tokio::spawn(handle_stream(server_handle.clone(), stream, None));
} }
}); });
@@ -160,18 +173,15 @@ where
let hyper_service = let hyper_service =
service_fn(move |request: Request<Incoming>| handle.clone().call(request, addr)); service_fn(move |request: Request<Incoming>| handle.clone().call(request, addr));
let ret = Builder::new(TokioExecutor::new()) match Builder::new(TokioExecutor::new())
.serve_connection_with_upgrades(stream, hyper_service) .serve_connection_with_upgrades(stream, hyper_service)
.await; .await
{
if let Err(err) = ret { Ok(()) => {}
let scope = match addr { Err(_err) => {
Some(addr) => format!(" from {}", addr), // This error only appears when the client doesn't send a request and terminate the connection.
None => String::new(), //
}; // If client sends one request then terminate connection whenever, it doesn't appear.
match err.downcast_ref::<std::io::Error>() {
Some(err) if err.kind() == std::io::ErrorKind::UnexpectedEof => {}
_ => warn!("Serving connection{}: {}", scope, err),
} }
} }
} }

View File

@@ -23,12 +23,13 @@ use hyper::body::Frame;
use hyper::{ use hyper::{
body::Incoming, body::Incoming,
header::{ header::{
HeaderValue, AUTHORIZATION, CONTENT_DISPOSITION, CONTENT_LENGTH, CONTENT_RANGE, HeaderValue, AUTHORIZATION, CONNECTION, CONTENT_DISPOSITION, CONTENT_LENGTH, CONTENT_RANGE,
CONTENT_TYPE, RANGE, CONTENT_TYPE, RANGE,
}, },
Method, StatusCode, Uri, Method, StatusCode, Uri,
}; };
use serde::Serialize; use serde::Serialize;
use sha2::{Digest, Sha256};
use std::borrow::Cow; use std::borrow::Cow;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::HashMap; use std::collections::HashMap;
@@ -106,12 +107,18 @@ impl Server {
let uri = req.uri().clone(); let uri = req.uri().clone();
let assets_prefix = &self.assets_prefix; let assets_prefix = &self.assets_prefix;
let enable_cors = self.args.enable_cors; let enable_cors = self.args.enable_cors;
let is_microsoft_webdav = req
.headers()
.get("user-agent")
.and_then(|v| v.to_str().ok())
.map(|v| v.starts_with("Microsoft-WebDAV-MiniRedir/"))
.unwrap_or_default();
let mut http_log_data = self.args.http_logger.data(&req); let mut http_log_data = self.args.http_logger.data(&req);
if let Some(addr) = addr { if let Some(addr) = addr {
http_log_data.insert("remote_addr".to_string(), addr.ip().to_string()); http_log_data.insert("remote_addr".to_string(), addr.ip().to_string());
} }
let mut res = match self.clone().handle(req).await { let mut res = match self.clone().handle(req, is_microsoft_webdav).await {
Ok(res) => { Ok(res) => {
http_log_data.insert("status".to_string(), res.status().as_u16().to_string()); http_log_data.insert("status".to_string(), res.status().as_u16().to_string());
if !uri.path().starts_with(assets_prefix) { if !uri.path().starts_with(assets_prefix) {
@@ -131,13 +138,22 @@ impl Server {
} }
}; };
if is_microsoft_webdav {
// microsoft webdav requires this.
res.headers_mut()
.insert(CONNECTION, HeaderValue::from_static("close"));
}
if enable_cors { if enable_cors {
add_cors(&mut res); add_cors(&mut res);
} }
Ok(res) Ok(res)
} }
pub async fn handle(self: Arc<Self>, req: Request) -> Result<Response> { pub async fn handle(
self: Arc<Self>,
req: Request,
is_microsoft_webdav: bool,
) -> Result<Response> {
let mut res = Response::default(); let mut res = Response::default();
let req_path = req.uri().path(); let req_path = req.uri().path();
@@ -161,7 +177,10 @@ impl Server {
} }
let authorization = headers.get(AUTHORIZATION); let authorization = headers.get(AUTHORIZATION);
let guard = self.args.auth.guard(&relative_path, &method, authorization); let guard =
self.args
.auth
.guard(&relative_path, &method, authorization, is_microsoft_webdav);
let (user, access_paths) = match guard { let (user, access_paths) = match guard {
(None, None) => { (None, None) => {
@@ -307,6 +326,8 @@ impl Server {
} else if query_params.contains_key("view") { } else if query_params.contains_key("view") {
self.handle_edit_file(path, DataKind::View, head_only, user, &mut res) self.handle_edit_file(path, DataKind::View, head_only, user, &mut res)
.await?; .await?;
} else if query_params.contains_key("hash") {
self.handle_hash_file(path, head_only, &mut res).await?;
} else { } else {
self.handle_send_file(path, headers, head_only, &mut res) self.handle_send_file(path, headers, head_only, &mut res)
.await?; .await?;
@@ -915,6 +936,24 @@ impl Server {
Ok(()) Ok(())
} }
async fn handle_hash_file(
&self,
path: &Path,
head_only: bool,
res: &mut Response,
) -> Result<()> {
let output = sha256_file(path).await?;
res.headers_mut()
.typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8));
res.headers_mut()
.typed_insert(ContentLength(output.as_bytes().len() as u64));
if head_only {
return Ok(());
}
*res.body_mut() = body_full(output);
Ok(())
}
async fn handle_propfind_dir( async fn handle_propfind_dir(
&self, &self,
path: &Path, path: &Path,
@@ -1183,7 +1222,7 @@ impl Server {
let guard = self let guard = self
.args .args
.auth .auth
.guard(&dest_path, req.method(), authorization); .guard(&dest_path, req.method(), authorization, false);
match guard { match guard {
(_, Some(_)) => {} (_, Some(_)) => {}
@@ -1362,7 +1401,7 @@ impl PathItem {
pub fn to_dav_xml(&self, prefix: &str) -> String { pub fn to_dav_xml(&self, prefix: &str) -> String {
let mtime = match Utc.timestamp_millis_opt(self.mtime as i64) { let mtime = match Utc.timestamp_millis_opt(self.mtime as i64) {
LocalResult::Single(v) => v.to_rfc2822(), LocalResult::Single(v) => format!("{}", v.format("%a, %d %b %Y %H:%M:%S GMT")),
_ => String::new(), _ => String::new(),
}; };
let mut href = encode_uri(&format!("{}{}", prefix, &self.name)); let mut href = encode_uri(&format!("{}{}", prefix, &self.name));
@@ -1535,7 +1574,6 @@ async fn zip_dir<W: AsyncWrite + Unpin>(
) -> Result<()> { ) -> Result<()> {
let mut writer = ZipFileWriter::with_tokio(writer); let mut writer = ZipFileWriter::with_tokio(writer);
let hidden = Arc::new(hidden.to_vec()); let hidden = Arc::new(hidden.to_vec());
let hidden = hidden.clone();
let dir_clone = dir.to_path_buf(); let dir_clone = dir.to_path_buf();
let zip_paths = tokio::task::spawn_blocking(move || { let zip_paths = tokio::task::spawn_blocking(move || {
let mut paths: Vec<PathBuf> = vec![]; let mut paths: Vec<PathBuf> = vec![];
@@ -1717,3 +1755,20 @@ fn parse_upload_offset(headers: &HeaderMap<HeaderValue>, size: u64) -> Result<Op
let (start, _) = parse_range(value, size).ok_or_else(err)?; let (start, _) = parse_range(value, size).ok_or_else(err)?;
Ok(Some(start)) Ok(Some(start))
} }
async fn sha256_file(path: &Path) -> Result<String> {
let mut file = fs::File::open(path).await?;
let mut hasher = Sha256::new();
let mut buffer = [0u8; 8192];
loop {
let bytes_read = file.read(&mut buffer).await?;
if bytes_read == 0 {
break;
}
hasher.update(&buffer[..bytes_read]);
}
let result = hasher.finalize();
Ok(format!("{:x}", result))
}

View File

@@ -1,7 +1,8 @@
mod digest_auth_util;
mod fixtures; mod fixtures;
mod utils; mod utils;
use diqwest::blocking::WithDigestAuth; use digest_auth_util::send_with_digest_auth;
use fixtures::{server, Error, TestServer}; use fixtures::{server, Error, TestServer};
use indexmap::IndexSet; use indexmap::IndexSet;
use rstest::rstest; use rstest::rstest;
@@ -32,9 +33,7 @@ fn auth(#[case] server: TestServer, #[case] user: &str, #[case] pass: &str) -> R
let url = format!("{}file1", server.url()); let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?; let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
let resp = fetch!(b"PUT", &url) let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), user, pass)?;
.body(b"abc".to_vec())
.send_with_digest_auth(user, pass)?;
assert_eq!(resp.status(), 201); assert_eq!(resp.status(), 201);
Ok(()) Ok(())
} }
@@ -67,13 +66,12 @@ fn auth_hashed_password(
let url = format!("{}file1", server.url()); let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?; let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
if let Err(err) = fetch!(b"PUT", &url) if let Err(err) =
.body(b"abc".to_vec()) send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user", "pass")
.send_with_digest_auth("user", "pass")
{ {
assert_eq!( assert_eq!(
format!("{err:?}"), err.to_string(),
r#"DigestAuth(MissingRequired("realm", "Basic realm=\"DUFS\""))"# r#"Missing "realm" in header: Basic realm="DUFS""#
); );
} }
let resp = fetch!(b"PUT", &url) let resp = fetch!(b"PUT", &url)
@@ -91,9 +89,7 @@ fn auth_and_public(
let url = format!("{}file1", server.url()); let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?; let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
let resp = fetch!(b"PUT", &url) let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user", "pass")?;
.body(b"abc".to_vec())
.send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 201); assert_eq!(resp.status(), 201);
let resp = fetch!(b"GET", &url).send()?; let resp = fetch!(b"GET", &url).send()?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
@@ -125,9 +121,9 @@ fn auth_check(
let url = format!("{}index.html", server.url()); let url = format!("{}index.html", server.url());
let resp = fetch!(b"WRITEABLE", &url).send()?; let resp = fetch!(b"WRITEABLE", &url).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
let resp = fetch!(b"WRITEABLE", &url).send_with_digest_auth("user2", "pass2")?; let resp = send_with_digest_auth(fetch!(b"WRITEABLE", &url), "user2", "pass2")?;
assert_eq!(resp.status(), 403); assert_eq!(resp.status(), 403);
let resp = fetch!(b"WRITEABLE", &url).send_with_digest_auth("user", "pass")?; let resp = send_with_digest_auth(fetch!(b"WRITEABLE", &url), "user", "pass")?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
Ok(()) Ok(())
} }
@@ -139,9 +135,9 @@ fn auth_compact_rules(
let url = format!("{}index.html", server.url()); let url = format!("{}index.html", server.url());
let resp = fetch!(b"WRITEABLE", &url).send()?; let resp = fetch!(b"WRITEABLE", &url).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
let resp = fetch!(b"WRITEABLE", &url).send_with_digest_auth("user2", "pass2")?; let resp = send_with_digest_auth(fetch!(b"WRITEABLE", &url), "user2", "pass2")?;
assert_eq!(resp.status(), 403); assert_eq!(resp.status(), 403);
let resp = fetch!(b"WRITEABLE", &url).send_with_digest_auth("user", "pass")?; let resp = send_with_digest_auth(fetch!(b"WRITEABLE", &url), "user", "pass")?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
Ok(()) Ok(())
} }
@@ -153,12 +149,10 @@ fn auth_readonly(
let url = format!("{}index.html", server.url()); let url = format!("{}index.html", server.url());
let resp = fetch!(b"GET", &url).send()?; let resp = fetch!(b"GET", &url).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
let resp = fetch!(b"GET", &url).send_with_digest_auth("user2", "pass2")?; let resp = send_with_digest_auth(fetch!(b"GET", &url), "user2", "pass2")?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let url = format!("{}file1", server.url()); let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url) let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user2", "pass2")?;
.body(b"abc".to_vec())
.send_with_digest_auth("user2", "pass2")?;
assert_eq!(resp.status(), 403); assert_eq!(resp.status(), 403);
Ok(()) Ok(())
} }
@@ -171,13 +165,9 @@ fn auth_nest(
let url = format!("{}dir1/file1", server.url()); let url = format!("{}dir1/file1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?; let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
let resp = fetch!(b"PUT", &url) let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user3", "pass3")?;
.body(b"abc".to_vec())
.send_with_digest_auth("user3", "pass3")?;
assert_eq!(resp.status(), 201); assert_eq!(resp.status(), 201);
let resp = fetch!(b"PUT", &url) let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user", "pass")?;
.body(b"abc".to_vec())
.send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 201); assert_eq!(resp.status(), 201);
Ok(()) Ok(())
} }
@@ -219,9 +209,11 @@ fn auth_webdav_move(
) -> Result<(), Error> { ) -> Result<(), Error> {
let origin_url = format!("{}dir1/test.html", server.url()); let origin_url = format!("{}dir1/test.html", server.url());
let new_url = format!("{}test2.html", server.url()); let new_url = format!("{}test2.html", server.url());
let resp = fetch!(b"MOVE", &origin_url) let resp = send_with_digest_auth(
.header("Destination", &new_url) fetch!(b"MOVE", &origin_url).header("Destination", &new_url),
.send_with_digest_auth("user3", "pass3")?; "user3",
"pass3",
)?;
assert_eq!(resp.status(), 403); assert_eq!(resp.status(), 403);
Ok(()) Ok(())
} }
@@ -233,9 +225,11 @@ fn auth_webdav_copy(
) -> Result<(), Error> { ) -> Result<(), Error> {
let origin_url = format!("{}dir1/test.html", server.url()); let origin_url = format!("{}dir1/test.html", server.url());
let new_url = format!("{}test2.html", server.url()); let new_url = format!("{}test2.html", server.url());
let resp = fetch!(b"COPY", &origin_url) let resp = send_with_digest_auth(
.header("Destination", &new_url) fetch!(b"COPY", &origin_url).header("Destination", &new_url),
.send_with_digest_auth("user3", "pass3")?; "user3",
"pass3",
)?;
assert_eq!(resp.status(), 403); assert_eq!(resp.status(), 403);
Ok(()) Ok(())
} }
@@ -247,7 +241,7 @@ fn auth_path_prefix(
let url = format!("{}xyz/index.html", server.url()); let url = format!("{}xyz/index.html", server.url());
let resp = fetch!(b"GET", &url).send()?; let resp = fetch!(b"GET", &url).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
let resp = fetch!(b"GET", &url).send_with_digest_auth("user", "pass")?; let resp = send_with_digest_auth(fetch!(b"GET", &url), "user", "pass")?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
Ok(()) Ok(())
} }
@@ -256,12 +250,15 @@ fn auth_path_prefix(
fn auth_partial_index( fn auth_partial_index(
#[with(&["--auth", "user:pass@/dir1:rw,/dir2:rw", "-A"])] server: TestServer, #[with(&["--auth", "user:pass@/dir1:rw,/dir2:rw", "-A"])] server: TestServer,
) -> Result<(), Error> { ) -> Result<(), Error> {
let resp = fetch!(b"GET", server.url()).send_with_digest_auth("user", "pass")?; let resp = send_with_digest_auth(fetch!(b"GET", server.url()), "user", "pass")?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let paths = utils::retrieve_index_paths(&resp.text()?); let paths = utils::retrieve_index_paths(&resp.text()?);
assert_eq!(paths, IndexSet::from(["dir1/".into(), "dir2/".into()])); assert_eq!(paths, IndexSet::from(["dir1/".into(), "dir2/".into()]));
let resp = fetch!(b"GET", format!("{}?q={}", server.url(), "test.html")) let resp = send_with_digest_auth(
.send_with_digest_auth("user", "pass")?; fetch!(b"GET", format!("{}?q={}", server.url(), "test.html")),
"user",
"pass",
)?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let paths = utils::retrieve_index_paths(&resp.text()?); let paths = utils::retrieve_index_paths(&resp.text()?);
assert_eq!( assert_eq!(
@@ -288,7 +285,7 @@ fn auth_propfind_dir(
#[with(&["--auth", "admin:admin@/:rw", "--auth", "user:pass@/dir-assets", "-A"])] #[with(&["--auth", "admin:admin@/:rw", "--auth", "user:pass@/dir-assets", "-A"])]
server: TestServer, server: TestServer,
) -> Result<(), Error> { ) -> Result<(), Error> {
let resp = fetch!(b"PROPFIND", server.url()).send_with_digest_auth("user", "pass")?; let resp = send_with_digest_auth(fetch!(b"PROPFIND", server.url()), "user", "pass")?;
assert_eq!(resp.status(), 207); assert_eq!(resp.status(), 207);
let body = resp.text()?; let body = resp.text()?;
assert!(body.contains("<D:href>/dir-assets/</D:href>")); assert!(body.contains("<D:href>/dir-assets/</D:href>"));
@@ -302,14 +299,14 @@ fn auth_data(
) -> Result<(), Error> { ) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;
let content = resp.text()?; let content = resp.text()?;
let json = utils::retrive_json(&content).unwrap(); let json = utils::retrieve_json(&content).unwrap();
assert_eq!(json["allow_delete"], serde_json::Value::Bool(false)); assert_eq!(json["allow_delete"], serde_json::Value::Bool(false));
assert_eq!(json["allow_upload"], serde_json::Value::Bool(false)); assert_eq!(json["allow_upload"], serde_json::Value::Bool(false));
let resp = fetch!(b"GET", server.url()) let resp = fetch!(b"GET", server.url())
.basic_auth("user", Some("pass")) .basic_auth("user", Some("pass"))
.send()?; .send()?;
let content = resp.text()?; let content = resp.text()?;
let json = utils::retrive_json(&content).unwrap(); let json = utils::retrieve_json(&content).unwrap();
assert_eq!(json["allow_delete"], serde_json::Value::Bool(true)); assert_eq!(json["allow_delete"], serde_json::Value::Bool(true));
assert_eq!(json["allow_upload"], serde_json::Value::Bool(true)); assert_eq!(json["allow_upload"], serde_json::Value::Bool(true));
Ok(()) Ok(())
@@ -320,15 +317,11 @@ fn auth_precedence(
#[with(&["--auth", "user:pass@/dir1:rw,/dir1/test.txt", "-A"])] server: TestServer, #[with(&["--auth", "user:pass@/dir1:rw,/dir1/test.txt", "-A"])] server: TestServer,
) -> Result<(), Error> { ) -> Result<(), Error> {
let url = format!("{}dir1/test.txt", server.url()); let url = format!("{}dir1/test.txt", server.url());
let resp = fetch!(b"PUT", &url) let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user", "pass")?;
.body(b"abc".to_vec())
.send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 403); assert_eq!(resp.status(), 403);
let url = format!("{}dir1/file1", server.url()); let url = format!("{}dir1/file1", server.url());
let resp = fetch!(b"PUT", &url) let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user", "pass")?;
.body(b"abc".to_vec())
.send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 201); assert_eq!(resp.status(), 201);
Ok(()) Ok(())

View File

@@ -1,9 +1,10 @@
mod digest_auth_util;
mod fixtures; mod fixtures;
mod utils; mod utils;
use assert_cmd::prelude::*; use assert_cmd::prelude::*;
use assert_fs::TempDir; use assert_fs::TempDir;
use diqwest::blocking::WithDigestAuth; use digest_auth_util::send_with_digest_auth;
use fixtures::{port, tmpdir, wait_for_port, Error}; use fixtures::{port, tmpdir, wait_for_port, Error};
use rstest::rstest; use rstest::rstest;
use std::path::PathBuf; use std::path::PathBuf;
@@ -27,20 +28,18 @@ fn use_config_file(tmpdir: TempDir, port: u16) -> Result<(), Error> {
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
let url = format!("http://localhost:{port}/dufs/index.html"); let url = format!("http://localhost:{port}/dufs/index.html");
let resp = fetch!(b"GET", &url).send_with_digest_auth("user", "pass")?; let resp = send_with_digest_auth(fetch!(b"GET", &url), "user", "pass")?;
assert_eq!(resp.text()?, "This is index.html"); assert_eq!(resp.text()?, "This is index.html");
let url = format!("http://localhost:{port}/dufs?simple"); let url = format!("http://localhost:{port}/dufs?simple");
let resp = fetch!(b"GET", &url).send_with_digest_auth("user", "pass")?; let resp = send_with_digest_auth(fetch!(b"GET", &url), "user", "pass")?;
let text: String = resp.text().unwrap(); let text: String = resp.text().unwrap();
assert!(text.split('\n').any(|c| c == "dir1/")); assert!(text.split('\n').any(|c| c == "dir1/"));
assert!(!text.split('\n').any(|c| c == "dir3/")); assert!(!text.split('\n').any(|c| c == "dir3/"));
assert!(!text.split('\n').any(|c| c == "test.txt")); assert!(!text.split('\n').any(|c| c == "test.txt"));
let url = format!("http://localhost:{port}/dufs/dir1/upload.txt"); let url = format!("http://localhost:{port}/dufs/dir1/upload.txt");
let resp = fetch!(b"PUT", &url) let resp = send_with_digest_auth(fetch!(b"PUT", &url).body("Hello"), "user", "pass")?;
.body("Hello")
.send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 201); assert_eq!(resp.status(), 201);
child.kill()?; child.kill()?;

91
tests/digest_auth_util.rs Normal file
View File

@@ -0,0 +1,91 @@
/// Refs https://github.dev/maoertel/diqwest/blob/main/src/blocking.rs
use anyhow::{anyhow, Result};
use digest_auth::{AuthContext, AuthorizationHeader, HttpMethod};
use hyper::{header::AUTHORIZATION, HeaderMap, StatusCode};
use reqwest::blocking::{RequestBuilder, Response};
use url::Position;
pub fn send_with_digest_auth(
request_builder: RequestBuilder,
username: &str,
password: &str,
) -> Result<Response> {
let first_response = try_clone_request_builder(&request_builder)?.send()?;
match first_response.status() {
StatusCode::UNAUTHORIZED => {
try_digest_auth(request_builder, first_response, username, password)
}
_ => Ok(first_response),
}
}
fn try_digest_auth(
request_builder: RequestBuilder,
first_response: Response,
username: &str,
password: &str,
) -> Result<Response> {
if let Some(answer) = get_answer(
&request_builder,
first_response.headers(),
username,
password,
)? {
return Ok(request_builder
.header(AUTHORIZATION, answer.to_header_string())
.send()?);
};
Ok(first_response)
}
fn try_clone_request_builder(request_builder: &RequestBuilder) -> Result<RequestBuilder> {
request_builder
.try_clone()
.ok_or_else(|| anyhow!("Request body must not be a stream"))
}
fn get_answer(
request_builder: &RequestBuilder,
first_response: &HeaderMap,
username: &str,
password: &str,
) -> Result<Option<AuthorizationHeader>> {
let answer = calculate_answer(request_builder, first_response, username, password);
match answer {
Ok(answer) => Ok(Some(answer)),
Err(error) => Err(error),
}
}
fn calculate_answer(
request_builder: &RequestBuilder,
headers: &HeaderMap,
username: &str,
password: &str,
) -> Result<AuthorizationHeader> {
let request = try_clone_request_builder(request_builder)?.build()?;
let path = &request.url()[Position::AfterPort..];
let method = HttpMethod::from(request.method().as_str());
let body = request.body().and_then(|b| b.as_bytes());
parse_digest_auth_header(headers, path, method, body, username, password)
}
fn parse_digest_auth_header(
header: &HeaderMap,
path: &str,
method: HttpMethod,
body: Option<&[u8]>,
username: &str,
password: &str,
) -> Result<AuthorizationHeader> {
let www_auth = header
.get("www-authenticate")
.ok_or_else(|| anyhow!("The header 'www-authenticate' is missing."))?
.to_str()?;
let context = AuthContext::new_with_method(username, password, path, body, method);
let mut prompt = digest_auth::parse(www_auth)?;
Ok(prompt.respond(&context)?)
}

View File

@@ -4,7 +4,7 @@ mod utils;
use fixtures::{server, Error, TestServer, BIN_FILE}; use fixtures::{server, Error, TestServer, BIN_FILE};
use rstest::rstest; use rstest::rstest;
use serde_json::Value; use serde_json::Value;
use utils::retrive_edit_file; use utils::retrieve_edit_file;
#[rstest] #[rstest]
fn get_dir(server: TestServer) -> Result<(), Error> { fn get_dir(server: TestServer) -> Result<(), Error> {
@@ -189,6 +189,21 @@ fn head_file(server: TestServer) -> Result<(), Error> {
Ok(()) Ok(())
} }
#[rstest]
fn hash_file(server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}index.html?hash", server.url()))?;
assert_eq!(
resp.headers().get("content-type").unwrap(),
"text/html; charset=utf-8"
);
assert_eq!(resp.status(), 200);
assert_eq!(
resp.text()?,
"c8dd395e3202674b9512f7b7f956e0d96a8ba8f572e785b0d5413ab83766dbc4"
);
Ok(())
}
#[rstest] #[rstest]
fn get_file_404(server: TestServer) -> Result<(), Error> { fn get_file_404(server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}404", server.url()))?; let resp = reqwest::blocking::get(format!("{}404", server.url()))?;
@@ -223,7 +238,7 @@ fn get_file_newline_path(server: TestServer) -> Result<(), Error> {
fn get_file_edit(server: TestServer) -> Result<(), Error> { fn get_file_edit(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"GET", format!("{}index.html?edit", server.url())).send()?; let resp = fetch!(b"GET", format!("{}index.html?edit", server.url())).send()?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let editable = retrive_edit_file(&resp.text().unwrap()).unwrap(); let editable = retrieve_edit_file(&resp.text().unwrap()).unwrap();
assert!(editable); assert!(editable);
Ok(()) Ok(())
} }
@@ -232,7 +247,7 @@ fn get_file_edit(server: TestServer) -> Result<(), Error> {
fn get_file_edit_bin(server: TestServer) -> Result<(), Error> { fn get_file_edit_bin(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"GET", format!("{}{BIN_FILE}?edit", server.url())).send()?; let resp = fetch!(b"GET", format!("{}{BIN_FILE}?edit", server.url())).send()?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let editable = retrive_edit_file(&resp.text().unwrap()).unwrap(); let editable = retrieve_edit_file(&resp.text().unwrap()).unwrap();
assert!(!editable); assert!(!editable);
Ok(()) Ok(())
} }

View File

@@ -1,7 +1,8 @@
mod digest_auth_util;
mod fixtures; mod fixtures;
mod utils; mod utils;
use diqwest::blocking::WithDigestAuth; use digest_auth_util::send_with_digest_auth;
use fixtures::{port, tmpdir, wait_for_port, Error}; use fixtures::{port, tmpdir, wait_for_port, Error};
use assert_cmd::prelude::*; use assert_cmd::prelude::*;
@@ -31,12 +32,12 @@ fn log_remote_user(
let stdout = child.stdout.as_mut().expect("Failed to get stdout"); let stdout = child.stdout.as_mut().expect("Failed to get stdout");
let req = fetch!(b"GET", &format!("http://localhost:{port}")); let req_builder = fetch!(b"GET", &format!("http://localhost:{port}"));
let resp = if is_basic { let resp = if is_basic {
req.basic_auth("user", Some("pass")).send()? req_builder.basic_auth("user", Some("pass")).send()?
} else { } else {
req.send_with_digest_auth("user", "pass")? send_with_digest_auth(req_builder, "user", "pass")?
}; };
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);

View File

@@ -26,7 +26,7 @@ macro_rules! fetch {
#[allow(dead_code)] #[allow(dead_code)]
pub fn retrieve_index_paths(content: &str) -> IndexSet<String> { pub fn retrieve_index_paths(content: &str) -> IndexSet<String> {
let value = retrive_json(content).unwrap(); let value = retrieve_json(content).unwrap();
let paths = value let paths = value
.get("paths") .get("paths")
.unwrap() .unwrap()
@@ -47,8 +47,8 @@ pub fn retrieve_index_paths(content: &str) -> IndexSet<String> {
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn retrive_edit_file(content: &str) -> Option<bool> { pub fn retrieve_edit_file(content: &str) -> Option<bool> {
let value = retrive_json(content)?; let value = retrieve_json(content)?;
let value = value.get("editable").unwrap(); let value = value.get("editable").unwrap();
Some(value.as_bool().unwrap()) Some(value.as_bool().unwrap())
} }
@@ -60,7 +60,7 @@ pub fn encode_uri(v: &str) -> String {
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn retrive_json(content: &str) -> Option<Value> { pub fn retrieve_json(content: &str) -> Option<Value> {
let lines: Vec<&str> = content.lines().collect(); let lines: Vec<&str> = content.lines().collect();
let line = lines.iter().find(|v| v.contains("DATA ="))?; let line = lines.iter().find(|v| v.contains("DATA ="))?;
let line_col = line.find("DATA =").unwrap() + 6; let line_col = line.find("DATA =").unwrap() + 6;