mirror of
https://github.com/sigoden/dufs.git
synced 2026-04-09 00:59:02 +03:00
Compare commits
325 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a118c1348e | ||
|
|
db7a0530a2 | ||
|
|
bc27c8c479 | ||
|
|
2b2c7bd5f7 | ||
|
|
ca18df1a36 | ||
|
|
7cfb97dfdf | ||
|
|
23619033ae | ||
|
|
db75ba4357 | ||
|
|
4016715187 | ||
|
|
f8a7873582 | ||
|
|
7f8269881d | ||
|
|
b2f244a4cf | ||
|
|
6cc8a18a3d | ||
|
|
a387d727b2 | ||
|
|
19d65a5aa4 | ||
|
|
d37762d2b9 | ||
|
|
9c9fca75d3 | ||
|
|
089d30c5a5 | ||
|
|
459a4d4f4a | ||
|
|
f8b69f4df8 | ||
|
|
53f064c73b | ||
|
|
8a92a0cf1a | ||
|
|
59685da06e | ||
|
|
09200860b4 | ||
|
|
4fbdec2878 | ||
|
|
d0453b7591 | ||
|
|
eda9769b2a | ||
|
|
d255f1376a | ||
|
|
669c4f8811 | ||
|
|
e576ddcbea | ||
|
|
af95ea1cd7 | ||
|
|
cbc620481d | ||
|
|
f1c9776962 | ||
|
|
ac15ae4e8e | ||
|
|
ab4ef06cb8 | ||
|
|
bc6c573acb | ||
|
|
f27f9e997f | ||
|
|
835438fc2a | ||
|
|
d445b78f96 | ||
|
|
881a67e1a4 | ||
|
|
bb5a5564b4 | ||
|
|
2cf6d39032 | ||
|
|
c500ce7acc | ||
|
|
f87c52fda2 | ||
|
|
2c5cc60965 | ||
|
|
972cf2377f | ||
|
|
5b338c40da | ||
|
|
964bf61c37 | ||
|
|
4bf92cc47a | ||
|
|
7d17d9c415 | ||
|
|
1db263efae | ||
|
|
5d26103ea2 | ||
|
|
3727dec115 | ||
|
|
0311c9fb90 | ||
|
|
e9ce4b2dc3 | ||
|
|
7aba3fe0b6 | ||
|
|
ca5c3d7c54 | ||
|
|
ec2b064a9a | ||
|
|
cadea9a3bf | ||
|
|
3e0e6b2e8a | ||
|
|
632f7a41bf | ||
|
|
f1e90686dc | ||
|
|
dc7a7cbb3f | ||
|
|
ce740b1fb1 | ||
|
|
1eb69f6806 | ||
|
|
5f0369aa39 | ||
|
|
fe2358506d | ||
|
|
6b6d69a8ef | ||
|
|
cb7d417fd3 | ||
|
|
75f06f749c | ||
|
|
d0c79a95e5 | ||
|
|
ffc0991a12 | ||
|
|
51f9c87e65 | ||
|
|
529bb33f0b | ||
|
|
3d3bb822ee | ||
|
|
9353b2e759 | ||
|
|
a277698322 | ||
|
|
0ff2b15c9a | ||
|
|
319333cd22 | ||
|
|
d66c9de8c8 | ||
|
|
7c0fa3dab7 | ||
|
|
48066d79e0 | ||
|
|
1c41db0c2d | ||
|
|
76ef7ba0fb | ||
|
|
3deac84cc9 | ||
|
|
638b715bc2 | ||
|
|
920b70abc4 | ||
|
|
015713bc6d | ||
|
|
3c75a9c4cc | ||
|
|
871e8276ff | ||
|
|
f92c8ee91d | ||
|
|
95eb648411 | ||
|
|
3354b1face | ||
|
|
9b348fc945 | ||
|
|
e1fabc7349 | ||
|
|
58a46f7c3a | ||
|
|
ef757281b3 | ||
|
|
de0614816a | ||
|
|
81d2c49e3f | ||
|
|
ee21894452 | ||
|
|
0ac0c048ec | ||
|
|
17063454d3 | ||
|
|
af347f9cf0 | ||
|
|
006e03ed30 | ||
|
|
77f86a4c60 | ||
|
|
a66f95b39f | ||
|
|
52506bc01f | ||
|
|
270cc0cba2 | ||
|
|
5988442d5c | ||
|
|
3873f4794a | ||
|
|
cd84dff87f | ||
|
|
8590f3e841 | ||
|
|
44a4ddf973 | ||
|
|
37800f630d | ||
|
|
5c850256f4 | ||
|
|
0cec573579 | ||
|
|
073b098111 | ||
|
|
6ff8b29b69 | ||
|
|
7584fe3d08 | ||
|
|
653cd167d0 | ||
|
|
ab29e39148 | ||
|
|
f8d6859354 | ||
|
|
130435c387 | ||
|
|
afdfde01f0 | ||
|
|
ae97c714d6 | ||
|
|
c352dab470 | ||
|
|
743db47f90 | ||
|
|
a476c15a09 | ||
|
|
0d74fa3ec5 | ||
|
|
b83cc6938b | ||
|
|
a187b14885 | ||
|
|
d3de3db0d9 | ||
|
|
80ac9afe68 | ||
|
|
4ef07737e1 | ||
|
|
5782c5f413 | ||
|
|
8b4cab1e69 | ||
|
|
70300b133c | ||
|
|
7ea4bb808d | ||
|
|
6766e0d437 | ||
|
|
53c9bc8bea | ||
|
|
60df3b473c | ||
|
|
6510ae8be9 | ||
|
|
9545fb6e37 | ||
|
|
0fd0f11298 | ||
|
|
46aa8fcc02 | ||
|
|
09bb738866 | ||
|
|
3612ef10d1 | ||
|
|
7ac2039a36 | ||
|
|
7f83de765a | ||
|
|
9b3779b13a | ||
|
|
11a52f29c4 | ||
|
|
10204c723f | ||
|
|
204421643d | ||
|
|
d9706d75ef | ||
|
|
40df0bd2f9 | ||
|
|
a53411b4d6 | ||
|
|
609017b2f5 | ||
|
|
7dc0b0e218 | ||
|
|
6be36b8e51 | ||
|
|
8be545d3da | ||
|
|
4f3a8d275b | ||
|
|
9c412f4276 | ||
|
|
27c269d6a0 | ||
|
|
57b4a74279 | ||
|
|
1112b936b8 | ||
|
|
033d37c4d4 | ||
|
|
577eea5fa4 | ||
|
|
d22be95dda | ||
|
|
8d7c1fbf53 | ||
|
|
4622c48120 | ||
|
|
f8ea41638f | ||
|
|
2890b3929d | ||
|
|
f5c0aefd8e | ||
|
|
8a1e7674df | ||
|
|
3c6206849f | ||
|
|
652f836c23 | ||
|
|
fb5b50f059 | ||
|
|
e43554b795 | ||
|
|
10ec34872d | ||
|
|
3ff16d254b | ||
|
|
29a04c8d74 | ||
|
|
c92e45f2da | ||
|
|
8d7a9053e2 | ||
|
|
0e12b285cd | ||
|
|
45f4f5fc58 | ||
|
|
6dcb4dcd76 | ||
|
|
65da9bedee | ||
|
|
e468d823cc | ||
|
|
902a60563d | ||
|
|
f6c2ed2974 | ||
|
|
8f4cbb4826 | ||
|
|
2064d7803a | ||
|
|
ad0be71557 | ||
|
|
6d9758c71d | ||
|
|
a61fda6e80 | ||
|
|
6625c4d3d0 | ||
|
|
dd6973468c | ||
|
|
c6c78a16c5 | ||
|
|
111103f26b | ||
|
|
7d6d7d49ca | ||
|
|
c6dcaf95d4 | ||
|
|
b7c5119c2e | ||
|
|
0000bd27f5 | ||
|
|
47883376c1 | ||
|
|
fea9bf988a | ||
|
|
b6d555158c | ||
|
|
628d863d2e | ||
|
|
8d9705caa4 | ||
|
|
7eef4407fc | ||
|
|
f061365587 | ||
|
|
d35cea4c36 | ||
|
|
1329e42b9a | ||
|
|
6ebf619430 | ||
|
|
8b4727c3a4 | ||
|
|
604ccc6556 | ||
|
|
1a9990f04e | ||
|
|
bd07783cde | ||
|
|
dbf2de9cb9 | ||
|
|
3b3ea718d9 | ||
|
|
3debf88da1 | ||
|
|
7eaa6f2484 | ||
|
|
68def1c1d9 | ||
|
|
868f4158f5 | ||
|
|
3063dca0a6 | ||
|
|
a74e40aee5 | ||
|
|
bde06fef94 | ||
|
|
31c832a742 | ||
|
|
9f8171a22f | ||
|
|
0fb9f3b2c8 | ||
|
|
3ae75d3558 | ||
|
|
dff489398e | ||
|
|
64e397d18a | ||
|
|
cc0014c183 | ||
|
|
a489c5647a | ||
|
|
0918fb3fe4 | ||
|
|
14efeb6360 | ||
|
|
30b8f75bba | ||
|
|
a39065beff | ||
|
|
a493c13734 | ||
|
|
ae2f878e62 | ||
|
|
277d9d22d4 | ||
|
|
c62926d19c | ||
|
|
cccbbe9ea4 | ||
|
|
147048690f | ||
|
|
9cfd66dab9 | ||
|
|
b791549ec7 | ||
|
|
f148817c52 | ||
|
|
00ae36d486 | ||
|
|
4e823e8bba | ||
|
|
4e84e6c532 | ||
|
|
f49b590a56 | ||
|
|
cb1f3cddea | ||
|
|
05dbcfb2df | ||
|
|
76e967fa59 | ||
|
|
140a360e37 | ||
|
|
604cbb7412 | ||
|
|
c6541b1c36 | ||
|
|
b6729a3d64 | ||
|
|
4f1a35de5d | ||
|
|
2ffdcdf106 | ||
|
|
1e0cdafbcf | ||
|
|
0a03941e05 | ||
|
|
07a7322748 | ||
|
|
936d08545b | ||
|
|
2e6af671ca | ||
|
|
583117c01f | ||
|
|
6e1df040b4 | ||
|
|
f5aa3354e1 | ||
|
|
3ed0d885fe | ||
|
|
542e9a4ec5 | ||
|
|
5ee2c5504c | ||
|
|
fd02a53823 | ||
|
|
6554c1c308 | ||
|
|
fe71600bd2 | ||
|
|
9cfeee0df0 | ||
|
|
eb7a536a3f | ||
|
|
c1c6dbc356 | ||
|
|
e29cf4c752 | ||
|
|
7f062b6705 | ||
|
|
ea8b9e9cce | ||
|
|
d2270be8fb | ||
|
|
2d0dfed456 | ||
|
|
4058a2db72 | ||
|
|
069cb64889 | ||
|
|
c85ea06785 | ||
|
|
68139c6263 | ||
|
|
deb6365a28 | ||
|
|
0d3acb8ae6 | ||
|
|
a67da8bdd3 | ||
|
|
3260b52c47 | ||
|
|
7194ebf248 | ||
|
|
b1b0fdd4db | ||
|
|
db71f75236 | ||
|
|
e66951fd11 | ||
|
|
051ff8da2d | ||
|
|
c3ac2a21c9 | ||
|
|
9c2e9d1503 | ||
|
|
9384cc8587 | ||
|
|
df48021757 | ||
|
|
af866aaaf4 | ||
|
|
68d238d34d | ||
|
|
a10150f2f8 | ||
|
|
5b11bb75dd | ||
|
|
6d7da0363c | ||
|
|
d8f7335053 | ||
|
|
7fc8fc6236 | ||
|
|
c7d42a3f1c | ||
|
|
3c4bb77023 | ||
|
|
12aafa00d8 | ||
|
|
5142430e93 | ||
|
|
dd8b21f3a6 | ||
|
|
471bca86c6 | ||
|
|
6b01c143d9 | ||
|
|
127c90a45e | ||
|
|
b3890ea094 | ||
|
|
99f0de6ca0 | ||
|
|
b48a7473fc | ||
|
|
a84c3b353d | ||
|
|
e9383d71ed | ||
|
|
8258dabe4a | ||
|
|
0e236b61f6 | ||
|
|
09788ed031 | ||
|
|
46ebe978ae | ||
|
|
e01f2030e1 | ||
|
|
8d03ec151a |
28
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
28
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
---
|
||||
|
||||
**Problem**
|
||||
|
||||
<!-- Provide a clear and concise description of the bug you're experiencing. What did you expect to happen, and what actually happened? -->
|
||||
|
||||
**Configuration**
|
||||
|
||||
<!-- Please specify the Dufs command-line arguments or configuration used. -->
|
||||
|
||||
<!-- If the issue is related to authentication/permissions, include auth configurations while concealing sensitive information (e.g., passwords). -->
|
||||
|
||||
**Log**
|
||||
|
||||
<!-- Attach relevant log outputs that can help diagnose the issue. -->
|
||||
|
||||
**Screenshots/Media**
|
||||
|
||||
<!-- If applicable, add screenshots or videos that help illustrate the issue, especially for WebUI problems. -->
|
||||
|
||||
**Environment Information**
|
||||
- Dufs version:
|
||||
- Browser/Webdav info:
|
||||
- OS info:
|
||||
- Proxy server (if any): <!-- e.g. nginx, cloudflare -->
|
||||
16
.github/ISSUE_TEMPLATE/feature_requst.md
vendored
Normal file
16
.github/ISSUE_TEMPLATE/feature_requst.md
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
---
|
||||
name: Feature Request
|
||||
about: If you have any interesting advice, you can tell us.
|
||||
---
|
||||
|
||||
## Specific Demand
|
||||
|
||||
<!--
|
||||
What feature do you need, please describe it in detail.
|
||||
-->
|
||||
|
||||
## Implement Suggestion
|
||||
|
||||
<!--
|
||||
If you have any suggestion for complete this feature, you can tell us.
|
||||
-->
|
||||
10
.github/workflows/ci.yaml
vendored
10
.github/workflows/ci.yaml
vendored
@@ -29,16 +29,12 @@ jobs:
|
||||
RUSTFLAGS: --deny warnings
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust Toolchain Components
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
components: clippy, rustfmt
|
||||
override: true
|
||||
toolchain: stable
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- uses: Swatinem/rust-cache@v1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Test
|
||||
run: cargo test --all
|
||||
|
||||
134
.github/workflows/release.yaml
vendored
134
.github/workflows/release.yaml
vendored
@@ -7,66 +7,90 @@ on:
|
||||
|
||||
jobs:
|
||||
release:
|
||||
name: Publish to Github Reelases
|
||||
name: Publish to Github Releases
|
||||
permissions:
|
||||
contents: write
|
||||
outputs:
|
||||
rc: ${{ steps.check-tag.outputs.rc }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
- aarch64-unknown-linux-musl
|
||||
- aarch64-apple-darwin
|
||||
- x86_64-apple-darwin
|
||||
- x86_64-pc-windows-msvc
|
||||
- x86_64-unknown-linux-musl
|
||||
include:
|
||||
- target: aarch64-unknown-linux-musl
|
||||
os: ubuntu-latest
|
||||
use-cross: true
|
||||
cargo-flags: ""
|
||||
- target: aarch64-apple-darwin
|
||||
os: macos-latest
|
||||
use-cross: true
|
||||
cargo-flags: ""
|
||||
- target: aarch64-pc-windows-msvc
|
||||
os: windows-latest
|
||||
use-cross: true
|
||||
cargo-flags: ""
|
||||
- target: x86_64-apple-darwin
|
||||
os: macos-latest
|
||||
cargo-flags: ""
|
||||
- target: x86_64-pc-windows-msvc
|
||||
os: windows-latest
|
||||
cargo-flags: ""
|
||||
- target: x86_64-unknown-linux-musl
|
||||
os: ubuntu-latest
|
||||
use-cross: true
|
||||
cargo-flags: ""
|
||||
- target: i686-unknown-linux-musl
|
||||
os: ubuntu-latest
|
||||
use-cross: true
|
||||
cargo-flags: ""
|
||||
- target: i686-pc-windows-msvc
|
||||
os: windows-latest
|
||||
use-cross: true
|
||||
cargo-flags: ""
|
||||
- target: armv7-unknown-linux-musleabihf
|
||||
os: ubuntu-latest
|
||||
use-cross: true
|
||||
cargo-flags: ""
|
||||
- target: arm-unknown-linux-musleabihf
|
||||
os: ubuntu-latest
|
||||
use-cross: true
|
||||
cargo-flags: ""
|
||||
|
||||
runs-on: ${{matrix.os}}
|
||||
env:
|
||||
BUILD_CMD: cargo
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check Tag
|
||||
id: check-tag
|
||||
shell: bash
|
||||
run: |
|
||||
tag=${GITHUB_REF##*/}
|
||||
echo "::set-output name=version::$tag"
|
||||
if [[ "$tag" =~ [0-9]+.[0-9]+.[0-9]+$ ]]; then
|
||||
echo "::set-output name=rc::false"
|
||||
ver=${GITHUB_REF##*/}
|
||||
echo "version=$ver" >> $GITHUB_OUTPUT
|
||||
if [[ "$ver" =~ [0-9]+.[0-9]+.[0-9]+$ ]]; then
|
||||
echo "rc=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "::set-output name=rc::true"
|
||||
echo "rc=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
|
||||
- name: Install Rust Toolchain Components
|
||||
uses: actions-rs/toolchain@v1
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
override: true
|
||||
target: ${{ matrix.target }}
|
||||
toolchain: stable
|
||||
profile: minimal # minimal component installation (ie, no documentation)
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- name: Install prerequisites
|
||||
- name: Install cross
|
||||
if: matrix.use-cross
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cross
|
||||
|
||||
- name: Overwrite build command env variable
|
||||
if: matrix.use-cross
|
||||
shell: bash
|
||||
run: |
|
||||
case ${{ matrix.target }} in
|
||||
aarch64-unknown-linux-musl) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;;
|
||||
esac
|
||||
|
||||
run: echo "BUILD_CMD=cross" >> $GITHUB_ENV
|
||||
|
||||
- name: Show Version Information (Rust, cargo, GCC)
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -78,11 +102,8 @@ jobs:
|
||||
rustc -V
|
||||
|
||||
- name: Build
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
use-cross: ${{ matrix.use-cross }}
|
||||
command: build
|
||||
args: --locked --release --target=${{ matrix.target }}
|
||||
shell: bash
|
||||
run: $BUILD_CMD build --locked --release --target=${{ matrix.target }} ${{ matrix.cargo-flags }}
|
||||
|
||||
- name: Build Archive
|
||||
shell: bash
|
||||
@@ -94,8 +115,7 @@ jobs:
|
||||
set -euxo pipefail
|
||||
|
||||
bin=${GITHUB_REPOSITORY##*/}
|
||||
src=`pwd`
|
||||
dist=$src/dist
|
||||
dist_dir=`pwd`/dist
|
||||
name=$bin-$version-$target
|
||||
executable=target/$target/release/$bin
|
||||
|
||||
@@ -103,29 +123,27 @@ jobs:
|
||||
executable=$executable.exe
|
||||
fi
|
||||
|
||||
mkdir $dist
|
||||
cp $executable $dist
|
||||
cd $dist
|
||||
mkdir $dist_dir
|
||||
cp $executable $dist_dir
|
||||
cd $dist_dir
|
||||
|
||||
if [[ "$RUNNER_OS" == "Windows" ]]; then
|
||||
archive=$dist/$name.zip
|
||||
archive=$dist_dir/$name.zip
|
||||
7z a $archive *
|
||||
echo "::set-output name=archive::`pwd -W`/$name.zip"
|
||||
echo "archive=dist/$name.zip" >> $GITHUB_OUTPUT
|
||||
else
|
||||
archive=$dist/$name.tar.gz
|
||||
tar czf $archive *
|
||||
echo "::set-output name=archive::$archive"
|
||||
archive=$dist_dir/$name.tar.gz
|
||||
tar -czf $archive *
|
||||
echo "archive=dist/$name.tar.gz" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Publish Archive
|
||||
uses: softprops/action-gh-release@v0.1.5
|
||||
uses: softprops/action-gh-release@v2
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
with:
|
||||
draft: false
|
||||
files: ${{ steps.package.outputs.archive }}
|
||||
prerelease: ${{ steps.check-tag.outputs.rc == 'true' }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
docker:
|
||||
name: Publish to Docker Hub
|
||||
@@ -133,33 +151,41 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: release
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
file: Dockerfile-release
|
||||
build-args: |
|
||||
REPO=${{ github.repository }}
|
||||
VER=${{ github.ref_name }}
|
||||
platforms: |
|
||||
linux/amd64
|
||||
linux/arm64
|
||||
linux/386
|
||||
linux/arm/v7
|
||||
push: ${{ needs.release.outputs.rc == 'false' }}
|
||||
tags: ${{ github.repository }}:latest, ${{ github.repository }}:${{ github.ref_name }}
|
||||
|
||||
|
||||
publish-crate:
|
||||
name: Publish to crates.io
|
||||
if: ${{ needs.release.outputs.rc == 'false' }}
|
||||
runs-on: ubuntu-latest
|
||||
needs: release
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
- name: Publish
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- name: Publish
|
||||
env:
|
||||
CARGO_REGISTRY_TOKEN: ${{ secrets.CRATES_IO_API_TOKEN }}
|
||||
run: cargo publish
|
||||
501
CHANGELOG.md
501
CHANGELOG.md
@@ -2,16 +2,497 @@
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
## [0.45.0] - 2025-09-03
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Perms on `dufs -A -a @/:ro` ([#619](https://github.com/sigoden/dufs/issues/619))
|
||||
- Login btn does not work for readonly anonymous ([#620](https://github.com/sigoden/dufs/issues/620))
|
||||
- Verify token length ([#627](https://github.com/sigoden/dufs/issues/627))
|
||||
|
||||
### Features
|
||||
|
||||
- Make dir urls inherit `?noscript` params ([#614](https://github.com/sigoden/dufs/issues/614))
|
||||
- Log decoded uri ([#615](https://github.com/sigoden/dufs/issues/615))
|
||||
|
||||
## [0.44.0] - 2025-08-02
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- No authentication check if no auth users ([#497](https://github.com/sigoden/dufs/issues/497))
|
||||
- Webui can't handle hash property of URL well ([#515](https://github.com/sigoden/dufs/issues/515))
|
||||
- Incorrect dir size due to hidden files ([#529](https://github.com/sigoden/dufs/issues/529))
|
||||
- Webui formatDirSize ([#568](https://github.com/sigoden/dufs/issues/568))
|
||||
- Follow symlinks when searching/archiving ([#572](https://github.com/sigoden/dufs/issues/572))
|
||||
- Incorrect separator for zip archives under windows ([#577](https://github.com/sigoden/dufs/issues/577))
|
||||
- Unexpected public auth asking for login info ([#583](https://github.com/sigoden/dufs/issues/583))
|
||||
|
||||
### Features
|
||||
|
||||
- Higher perm auth path shadows lower one ([#521](https://github.com/sigoden/dufs/issues/521))
|
||||
- Add cache-control:no-cache while sending file and index ([#528](https://github.com/sigoden/dufs/issues/528))
|
||||
- Support multipart ranges ([#535](https://github.com/sigoden/dufs/issues/535))
|
||||
- Limit sub directory item counting ([#556](https://github.com/sigoden/dufs/issues/556))
|
||||
- Tolerate the absence of mtime ([#559](https://github.com/sigoden/dufs/issues/559))
|
||||
- Support noscript fallback ([#602](https://github.com/sigoden/dufs/issues/602))
|
||||
- Support downloading via token auth ([#603](https://github.com/sigoden/dufs/issues/603))
|
||||
|
||||
### Refactor
|
||||
|
||||
- Change description for `--allow-archive` ([#511](https://github.com/sigoden/dufs/issues/511))
|
||||
- Removes clippy warnings ([#601](https://github.com/sigoden/dufs/issues/601))
|
||||
- Update deps ([#604](https://github.com/sigoden/dufs/issues/604))
|
||||
- Fix typos ([#605](https://github.com/sigoden/dufs/issues/605))
|
||||
|
||||
## [0.43.0] - 2024-11-04
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Auth failed if password contains `:` ([#449](https://github.com/sigoden/dufs/issues/449))
|
||||
- Resolve speed bottleneck in 10G network ([#451](https://github.com/sigoden/dufs/issues/451))
|
||||
|
||||
### Features
|
||||
|
||||
- Webui displays subdirectory items ([#457](https://github.com/sigoden/dufs/issues/457))
|
||||
- Support binding abstract unix socket ([#468](https://github.com/sigoden/dufs/issues/468))
|
||||
- Provide healthcheck API ([#474](https://github.com/sigoden/dufs/issues/474))
|
||||
|
||||
### Refactor
|
||||
|
||||
- Do not show size for Dir ([#447](https://github.com/sigoden/dufs/issues/447))
|
||||
|
||||
## [0.42.0] - 2024-09-01
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Garbled characters caused by atob ([#422](https://github.com/sigoden/dufs/issues/422))
|
||||
- Webui unexpected save-btn when file is non-editable ([#429](https://github.com/sigoden/dufs/issues/429))
|
||||
- Login succeeded but popup `Forbidden` ([#437](https://github.com/sigoden/dufs/issues/437))
|
||||
|
||||
### Features
|
||||
|
||||
- Implements remaining http cache conditionalss ([#407](https://github.com/sigoden/dufs/issues/407))
|
||||
- Base64 index-data to avoid misencoding ([#421](https://github.com/sigoden/dufs/issues/421))
|
||||
- Webui support logout ([#439](https://github.com/sigoden/dufs/issues/439))
|
||||
|
||||
### Refactor
|
||||
|
||||
- No inline scripts in HTML ([#391](https://github.com/sigoden/dufs/issues/391))
|
||||
- Return 400 for propfind request when depth is neither 0 nor 1 ([#403](https://github.com/sigoden/dufs/issues/403))
|
||||
- Remove sabredav-partialupdate from DAV res header ([#415](https://github.com/sigoden/dufs/issues/415))
|
||||
- Date formatting in cache tests ([#428](https://github.com/sigoden/dufs/issues/428))
|
||||
- Some query params work as flag and must not accept a value ([#431](https://github.com/sigoden/dufs/issues/431))
|
||||
- Improve logout at asserts/index.js ([#440](https://github.com/sigoden/dufs/issues/440))
|
||||
- Make logout works on safari ([#442](https://github.com/sigoden/dufs/issues/442))
|
||||
|
||||
## [0.41.0] - 2024-05-22
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Timestamp format of getlastmodified in dav xml ([#366](https://github.com/sigoden/dufs/issues/366))
|
||||
- Strange issue that occurs only on Microsoft WebDAV ([#382](https://github.com/sigoden/dufs/issues/382))
|
||||
- Head div overlap main contents when wrap ([#386](https://github.com/sigoden/dufs/issues/386))
|
||||
|
||||
### Features
|
||||
|
||||
- Tls handshake timeout ([#368](https://github.com/sigoden/dufs/issues/368))
|
||||
- Add api to get the hash of a file ([#375](https://github.com/sigoden/dufs/issues/375))
|
||||
- Add log-file option ([#383](https://github.com/sigoden/dufs/issues/383))
|
||||
|
||||
### Refactor
|
||||
|
||||
- Digest_auth related tests ([#372](https://github.com/sigoden/dufs/issues/372))
|
||||
- Add fixed-width numerals to date and size on file list page ([#378](https://github.com/sigoden/dufs/issues/378))
|
||||
|
||||
## [0.40.0] - 2024-02-13
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Guard req and destination path ([#359](https://github.com/sigoden/dufs/issues/359))
|
||||
|
||||
### Features
|
||||
|
||||
- Revert supporting for forbidden permission ([#352](https://github.com/sigoden/dufs/issues/352))
|
||||
|
||||
### Refactor
|
||||
|
||||
- Do not try to bind ipv6 if no ipv6 ([#348](https://github.com/sigoden/dufs/issues/348))
|
||||
- Improve invalid auth ([#356](https://github.com/sigoden/dufs/issues/356))
|
||||
- Improve resolve_path and handle_assets, abandon guard_path ([#360](https://github.com/sigoden/dufs/issues/360))
|
||||
|
||||
## [0.39.0] - 2024-01-11
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Upload more than 100 files in directory ([#317](https://github.com/sigoden/dufs/issues/317))
|
||||
- Auth precedence ([#325](https://github.com/sigoden/dufs/issues/325))
|
||||
- Serve files with names containing newline char ([#328](https://github.com/sigoden/dufs/issues/328))
|
||||
- Corrupted zip when downloading large folders ([#337](https://github.com/sigoden/dufs/issues/337))
|
||||
|
||||
### Features
|
||||
|
||||
- Empty search `?q=` list all paths ([#311](https://github.com/sigoden/dufs/issues/311))
|
||||
- Add `--compress` option ([#319](https://github.com/sigoden/dufs/issues/319))
|
||||
- Upgrade to hyper 1.0 ([#321](https://github.com/sigoden/dufs/issues/321))
|
||||
- Auth supports forbidden permissions ([#329](https://github.com/sigoden/dufs/issues/329))
|
||||
- Supports resumable uploads ([#343](https://github.com/sigoden/dufs/issues/343))
|
||||
|
||||
### Refactor
|
||||
|
||||
- Change the format of www-authenticate ([#312](https://github.com/sigoden/dufs/issues/312))
|
||||
- Change the value name of `--config` ([#313](https://github.com/sigoden/dufs/issues/313))
|
||||
- Optimize http range parsing and handling ([#323](https://github.com/sigoden/dufs/issues/323))
|
||||
- Propfind with auth no need to list all ([#344](https://github.com/sigoden/dufs/issues/344))
|
||||
|
||||
## [0.38.0] - 2023-11-28
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Unable to start if config file omit bind/port fields ([#294](https://github.com/sigoden/dufs/issues/294))
|
||||
|
||||
### Features
|
||||
|
||||
- Password can contain `:` `@` `|` ([#297](https://github.com/sigoden/dufs/issues/297))
|
||||
- Deprecate the use of `|` to separate auth rules ([#298](https://github.com/sigoden/dufs/issues/298))
|
||||
- More flexible config values ([#299](https://github.com/sigoden/dufs/issues/299))
|
||||
- Ui supports view file ([#301](https://github.com/sigoden/dufs/issues/301))
|
||||
|
||||
### Refactor
|
||||
|
||||
- Take improvements from the edge browser ([#289](https://github.com/sigoden/dufs/issues/289))
|
||||
- Ui change the cursor for upload-btn to a pointer ([#291](https://github.com/sigoden/dufs/issues/291))
|
||||
- Ui improve uploading progress ([#296](https://github.com/sigoden/dufs/issues/296))
|
||||
|
||||
## [0.37.1] - 2023-11-08
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Use DUFS_CONFIG to specify the config file path ([#286](https://github.com/sigoden/dufs/issues/286)
|
||||
|
||||
## [0.37.0] - 2023-11-08
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Sort path ignore case ([#264](https://github.com/sigoden/dufs/issues/264))
|
||||
- Ui show user-name next to the user-icon ([#278](https://github.com/sigoden/dufs/issues/278))
|
||||
- Auto delete half-uploaded files ([#280](https://github.com/sigoden/dufs/issues/280))
|
||||
|
||||
### Features
|
||||
|
||||
- Deprecate `--auth-method`, as both options are available ([#279](https://github.com/sigoden/dufs/issues/279))
|
||||
- Support config file with `--config` option ([#281](https://github.com/sigoden/dufs/issues/281))
|
||||
- Support hashed password ([#283](https://github.com/sigoden/dufs/issues/283))
|
||||
|
||||
### Refactor
|
||||
|
||||
- Remove one clone on `assets_prefix` ([#270](https://github.com/sigoden/dufs/issues/270))
|
||||
- Optimize tests
|
||||
- Improve code quality ([#282](https://github.com/sigoden/dufs/issues/282))
|
||||
|
||||
## [0.36.0] - 2023-08-24
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Ui readonly if no write perm ([#258](https://github.com/sigoden/dufs/issues/258))
|
||||
|
||||
### Testing
|
||||
|
||||
- Remove dependency on native tls ([#255](https://github.com/sigoden/dufs/issues/255))
|
||||
|
||||
## [0.35.0] - 2023-08-14
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Search should ignore entry path ([#235](https://github.com/sigoden/dufs/issues/235))
|
||||
- Typo __ASSERTS_PREFIX__ ([#252](https://github.com/sigoden/dufs/issues/252))
|
||||
|
||||
### Features
|
||||
|
||||
- Sort by type first, then sort by name/mtime/size ([#241](https://github.com/sigoden/dufs/issues/241))
|
||||
|
||||
## [0.34.2] - 2023-06-05
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Ui refresh page after login ([#230](https://github.com/sigoden/dufs/issues/230))
|
||||
- Webdav only see public folder even logging in ([#231](https://github.com/sigoden/dufs/issues/231))
|
||||
|
||||
## [0.34.1] - 2023-06-02
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Auth logic ([#224](https://github.com/sigoden/dufs/issues/224))
|
||||
- Allow all cors headers and methods ([#225](https://github.com/sigoden/dufs/issues/225))
|
||||
|
||||
### Refactor
|
||||
|
||||
- Ui checkAuth ([#226](https://github.com/sigoden/dufs/issues/226))
|
||||
|
||||
## [0.34.0] - 2023-06-01
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- URL-encoded filename when downloading in safari ([#203](https://github.com/sigoden/dufs/issues/203))
|
||||
- Ui path table show move action ([#219](https://github.com/sigoden/dufs/issues/219))
|
||||
- Ui set default max uploading to 1 ([#220](https://github.com/sigoden/dufs/issues/220))
|
||||
|
||||
### Features
|
||||
|
||||
- Webui editing support multiple encodings ([#197](https://github.com/sigoden/dufs/issues/197))
|
||||
- Add timestamp metadata to generated zip file ([#204](https://github.com/sigoden/dufs/issues/204))
|
||||
- Show precise file size with decimal ([#210](https://github.com/sigoden/dufs/issues/210))
|
||||
- [**breaking**] New auth ([#218](https://github.com/sigoden/dufs/issues/218))
|
||||
|
||||
### Refactor
|
||||
|
||||
- Cli positional rename root => SERVE_PATH([#215](https://github.com/sigoden/dufs/issues/215))
|
||||
|
||||
## [0.33.0] - 2023-03-17
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Cors allow-request-header add content-type ([#184](https://github.com/sigoden/dufs/issues/184))
|
||||
- Hidden don't works on some files ([#188](https://github.com/sigoden/dufs/issues/188))
|
||||
- Basic auth sometimes does not work ([#194](https://github.com/sigoden/dufs/issues/194))
|
||||
|
||||
### Features
|
||||
|
||||
- Guess plain text encoding then set content-type charset ([#186](https://github.com/sigoden/dufs/issues/186))
|
||||
|
||||
### Refactor
|
||||
|
||||
- Improve error handle ([#195](https://github.com/sigoden/dufs/issues/195))
|
||||
|
||||
## [0.32.0] - 2023-02-22
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Set the STOPSIGNAL to SIGINT for Dockerfile
|
||||
- Remove Method::Options auth check ([#168](https://github.com/sigoden/dufs/issues/168))
|
||||
- Clear search input also clear query ([#178](https://github.com/sigoden/dufs/issues/178))
|
||||
|
||||
### Features
|
||||
|
||||
- [**breaking**] Add option --allow-archive ([#152](https://github.com/sigoden/dufs/issues/152))
|
||||
- Use env var for args ([#170](https://github.com/sigoden/dufs/issues/170))
|
||||
- Hiding only directories instead of files ([#175](https://github.com/sigoden/dufs/issues/175))
|
||||
- API to search and list directories ([#177](https://github.com/sigoden/dufs/issues/177))
|
||||
- Support edit files ([#179](https://github.com/sigoden/dufs/issues/179))
|
||||
- Support new file ([#180](https://github.com/sigoden/dufs/issues/180))
|
||||
- Ui improves the login experience ([#182](https://github.com/sigoden/dufs/issues/182))
|
||||
|
||||
## [0.31.0] - 2022-11-11
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Auth not works with --path-prefix ([#138](https://github.com/sigoden/dufs/issues/138))
|
||||
- Don't search on empty query string ([#140](https://github.com/sigoden/dufs/issues/140))
|
||||
- Status code for MKCOL on existing resource ([#142](https://github.com/sigoden/dufs/issues/142))
|
||||
- Panic on PROPFIND // ([#144](https://github.com/sigoden/dufs/issues/144))
|
||||
|
||||
### Features
|
||||
|
||||
- Support unix sockets ([#145](https://github.com/sigoden/dufs/issues/145))
|
||||
|
||||
## [0.30.0] - 2022-09-09
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Hide path by ext name ([#126](https://github.com/sigoden/dufs/issues/126))
|
||||
|
||||
### Features
|
||||
|
||||
- Support sort by name, mtime, size ([#128](https://github.com/sigoden/dufs/issues/128))
|
||||
- Add --assets options to override assets ([#134](https://github.com/sigoden/dufs/issues/134))
|
||||
|
||||
## [0.29.0] - 2022-08-03
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Table row hover highlighting in dark mode ([#122](https://github.com/sigoden/dufs/issues/122))
|
||||
|
||||
### Features
|
||||
|
||||
- Support ecdsa tls cert ([#119](https://github.com/sigoden/dufs/issues/119))
|
||||
|
||||
## [0.28.0] - 2022-08-01
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- File path contains special characters ([#114](https://github.com/sigoden/dufs/issues/114))
|
||||
|
||||
### Features
|
||||
|
||||
- Add table row hover ([#115](https://github.com/sigoden/dufs/issues/115))
|
||||
- Support customize http log format ([#116](https://github.com/sigoden/dufs/issues/116))
|
||||
|
||||
## [0.27.0] - 2022-07-25
|
||||
|
||||
### Features
|
||||
|
||||
- Improve hidden to support glob ([#108](https://github.com/sigoden/dufs/issues/108))
|
||||
- Adjust digest auth timeout to 1day ([#110](https://github.com/sigoden/dufs/issues/110))
|
||||
|
||||
## [0.26.0] - 2022-07-11
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Cors headers ([#100](https://github.com/sigoden/dufs/issues/100))
|
||||
|
||||
### Features
|
||||
|
||||
- Make --path-prefix works on serving single file ([#102](https://github.com/sigoden/dufs/issues/102))
|
||||
|
||||
## [0.25.0] - 2022-07-06
|
||||
|
||||
### Features
|
||||
|
||||
- Ui supports creating folder ([#91](https://github.com/sigoden/dufs/issues/91))
|
||||
- Ui supports move folder/file to new path ([#92](https://github.com/sigoden/dufs/issues/92))
|
||||
- Check permission on move/copy destination ([#93](https://github.com/sigoden/dufs/issues/93))
|
||||
- Add completions ([#97](https://github.com/sigoden/dufs/issues/97))
|
||||
- Limit the number of concurrent uploads ([#98](https://github.com/sigoden/dufs/issues/98))
|
||||
|
||||
## [0.24.0] - 2022-07-02
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Unexpected stack overflow when searching a lot ([#87](https://github.com/sigoden/dufs/issues/87))
|
||||
|
||||
### Features
|
||||
|
||||
- Allow search with --render-try-index ([#88](https://github.com/sigoden/dufs/issues/88))
|
||||
|
||||
## [0.23.1] - 2022-06-30
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Safari layout and compatibility ([#83](https://github.com/sigoden/dufs/issues/83))
|
||||
- Permissions of unzipped files ([#84](https://github.com/sigoden/dufs/issues/84))
|
||||
|
||||
## [0.23.0] - 2022-06-29
|
||||
|
||||
### Features
|
||||
|
||||
- Use feature to conditional support tls ([#77](https://github.com/sigoden/dufs/issues/77))
|
||||
|
||||
### Ci
|
||||
|
||||
- Support more platforms ([#76](https://github.com/sigoden/dufs/issues/76))
|
||||
|
||||
## [0.22.0] - 2022-06-26
|
||||
|
||||
### Features
|
||||
|
||||
- Support hiding folders with --hidden ([#73](https://github.com/sigoden/dufs/issues/73))
|
||||
|
||||
## [0.21.0] - 2022-06-23
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Escape name contains html escape code ([#65](https://github.com/sigoden/dufs/issues/65))
|
||||
|
||||
### Features
|
||||
|
||||
- Use custom logger with timestamp in rfc3339 ([#67](https://github.com/sigoden/dufs/issues/67))
|
||||
|
||||
### Refactor
|
||||
|
||||
- Split css/js from index.html ([#68](https://github.com/sigoden/dufs/issues/68))
|
||||
|
||||
## [0.20.0] - 2022-06-20
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- DecodeURI searching string ([#61](https://github.com/sigoden/dufs/issues/61))
|
||||
|
||||
### Features
|
||||
|
||||
- Added basic auth ([#60](https://github.com/sigoden/dufs/issues/60))
|
||||
- Add option --allow-search ([#62](https://github.com/sigoden/dufs/issues/62))
|
||||
|
||||
## [0.19.0] - 2022-06-19
|
||||
|
||||
### Features
|
||||
|
||||
- [**breaking**] Path level access control ([#52](https://github.com/sigoden/dufs/issues/52))
|
||||
- Serve single file ([#54](https://github.com/sigoden/dufs/issues/54))
|
||||
- Ui hidden root dirname ([#58](https://github.com/sigoden/dufs/issues/58))
|
||||
- Reactive webpage ([#51](https://github.com/sigoden/dufs/issues/51))
|
||||
- [**breaking**] Rename to dufs ([#59](https://github.com/sigoden/dufs/issues/59))
|
||||
|
||||
### Refactor
|
||||
|
||||
- [**breaking**] Rename --cors to --enable-cors ([#57](https://github.com/sigoden/dufs/issues/57))
|
||||
|
||||
## [0.18.0] - 2022-06-18
|
||||
|
||||
### Features
|
||||
|
||||
- Add option --render-try-index ([#47](https://github.com/sigoden/dufs/issues/47))
|
||||
- Add slash to end of dir href
|
||||
|
||||
## [0.17.1] - 2022-06-16
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Range request ([#44](https://github.com/sigoden/dufs/issues/44))
|
||||
|
||||
## [0.17.0] - 2022-06-15
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Webdav propfind dir with slash ([#42](https://github.com/sigoden/dufs/issues/42))
|
||||
|
||||
### Features
|
||||
|
||||
- Listen both ipv4 and ipv6 by default ([#40](https://github.com/sigoden/dufs/issues/40))
|
||||
|
||||
### Refactor
|
||||
|
||||
- Trivial changes ([#41](https://github.com/sigoden/dufs/issues/41))
|
||||
|
||||
## [0.16.0] - 2022-06-12
|
||||
|
||||
### Features
|
||||
|
||||
- Implement head method ([#33](https://github.com/sigoden/dufs/issues/33))
|
||||
- Display upload speed and time left ([#34](https://github.com/sigoden/dufs/issues/34))
|
||||
- Support tls-key in pkcs#8 format ([#35](https://github.com/sigoden/dufs/issues/35))
|
||||
- Options method return status 200
|
||||
|
||||
### Testing
|
||||
|
||||
- Add integration tests ([#36](https://github.com/sigoden/dufs/issues/36))
|
||||
|
||||
## [0.15.1] - 2022-06-11
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Cannot upload ([#32](https://github.com/sigoden/dufs/issues/32))
|
||||
|
||||
## [0.15.0] - 2022-06-10
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Encode webdav href as uri ([#28](https://github.com/sigoden/dufs/issues/28))
|
||||
- Query dir param
|
||||
|
||||
### Features
|
||||
|
||||
- Add basic dark theme ([#29](https://github.com/sigoden/dufs/issues/29))
|
||||
- Add empty state placeholder to page([#30](https://github.com/sigoden/dufs/issues/30))
|
||||
|
||||
## [0.14.0] - 2022-06-07
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Send index page with content-type ([#26](https://github.com/sigoden/duf/issues/26))
|
||||
- Send index page with content-type ([#26](https://github.com/sigoden/dufs/issues/26))
|
||||
|
||||
### Features
|
||||
|
||||
- Support ipv6 ([#25](https://github.com/sigoden/duf/issues/25))
|
||||
- Add favicon ([#27](https://github.com/sigoden/duf/issues/27))
|
||||
- Support ipv6 ([#25](https://github.com/sigoden/dufs/issues/25))
|
||||
- Add favicon ([#27](https://github.com/sigoden/dufs/issues/27))
|
||||
|
||||
## [0.13.2] - 2022-06-06
|
||||
|
||||
@@ -24,11 +505,11 @@ All notable changes to this project will be documented in this file.
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Escape filename ([#21](https://github.com/sigoden/duf/issues/21))
|
||||
- Escape filename ([#21](https://github.com/sigoden/dufs/issues/21))
|
||||
|
||||
### Refactor
|
||||
|
||||
- Use logger ([#22](https://github.com/sigoden/duf/issues/22))
|
||||
- Use logger ([#22](https://github.com/sigoden/dufs/issues/22))
|
||||
|
||||
## [0.13.0] - 2022-06-05
|
||||
|
||||
@@ -38,16 +519,16 @@ All notable changes to this project will be documented in this file.
|
||||
|
||||
### Features
|
||||
|
||||
- Implement more webdav methods ([#13](https://github.com/sigoden/duf/issues/13))
|
||||
- Use digest auth ([#14](https://github.com/sigoden/duf/issues/14))
|
||||
- Add webdav proppatch handler ([#18](https://github.com/sigoden/duf/issues/18))
|
||||
- Implement more webdav methods ([#13](https://github.com/sigoden/dufs/issues/13))
|
||||
- Use digest auth ([#14](https://github.com/sigoden/dufs/issues/14))
|
||||
- Add webdav proppatch handler ([#18](https://github.com/sigoden/dufs/issues/18))
|
||||
|
||||
## [0.12.1] - 2022-06-04
|
||||
|
||||
### Features
|
||||
|
||||
- Support webdav ([#10](https://github.com/sigoden/duf/issues/10))
|
||||
- Remove unzip uploaded feature ([#11](https://github.com/sigoden/duf/issues/11))
|
||||
- Support webdav ([#10](https://github.com/sigoden/dufs/issues/10))
|
||||
- Remove unzip uploaded feature ([#11](https://github.com/sigoden/dufs/issues/11))
|
||||
|
||||
## [0.11.0] - 2022-06-03
|
||||
|
||||
|
||||
2877
Cargo.lock
generated
2877
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
97
Cargo.toml
97
Cargo.toml
@@ -1,45 +1,78 @@
|
||||
[package]
|
||||
name = "duf"
|
||||
version = "0.14.0"
|
||||
name = "dufs"
|
||||
version = "0.45.0"
|
||||
edition = "2021"
|
||||
authors = ["sigoden <sigoden@gmail.com>"]
|
||||
description = "Duf is a simple file server."
|
||||
description = "Dufs is a distinctive utility file server"
|
||||
license = "MIT OR Apache-2.0"
|
||||
homepage = "https://github.com/sigoden/duf"
|
||||
repository = "https://github.com/sigoden/duf"
|
||||
autotests = false
|
||||
homepage = "https://github.com/sigoden/dufs"
|
||||
repository = "https://github.com/sigoden/dufs"
|
||||
categories = ["command-line-utilities", "web-programming::http-server"]
|
||||
keywords = ["static", "file", "server", "webdav", "cli"]
|
||||
|
||||
[dependencies]
|
||||
clap = { version = "3", default-features = false, features = ["std", "cargo"] }
|
||||
chrono = "0.4"
|
||||
tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util", "signal"]}
|
||||
tokio-rustls = "0.23"
|
||||
tokio-stream = { version = "0.1", features = ["net"] }
|
||||
tokio-util = { version = "0.7", features = ["codec", "io-util"] }
|
||||
hyper = { version = "0.14", features = ["http1", "server", "tcp", "stream"] }
|
||||
percent-encoding = "2.1"
|
||||
clap = { version = "4.5", features = ["wrap_help", "env"] }
|
||||
clap_complete = "4.5"
|
||||
chrono = { version = "0.4", default-features = false, features = ["clock"] }
|
||||
tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util", "signal", "net"]}
|
||||
tokio-util = { version = "0.7", features = ["io-util", "compat"] }
|
||||
hyper = { version = "1", features = ["http1", "server"] }
|
||||
percent-encoding = "2.3"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
futures = "0.3"
|
||||
base64 = "0.13"
|
||||
async_zip = "0.0.7"
|
||||
async-walkdir = "0.2.0"
|
||||
headers = "0.3.7"
|
||||
mime_guess = "2.0.4"
|
||||
get_if_addrs = "0.5.3"
|
||||
rustls = { version = "0.20", default-features = false, features = ["tls12"] }
|
||||
rustls-pemfile = "1"
|
||||
md5 = "0.7.0"
|
||||
lazy_static = "1.4.0"
|
||||
uuid = { version = "1.1.1", features = ["v4", "fast-rng"] }
|
||||
urlencoding = "2.1.0"
|
||||
xml-rs = "0.8"
|
||||
env_logger = { version = "0.9.0", default-features = false, features = ["humantime"] }
|
||||
log = "0.4.17"
|
||||
futures-util = { version = "0.3", default-features = false, features = ["alloc"] }
|
||||
async_zip = { version = "0.0.18", default-features = false, features = ["deflate", "bzip2", "xz", "chrono", "tokio"] }
|
||||
headers = "0.4"
|
||||
mime_guess = "2.0"
|
||||
if-addrs = "0.14"
|
||||
tokio-rustls = { version = "0.26", optional = true }
|
||||
md5 = "0.8"
|
||||
lazy_static = "1.4"
|
||||
uuid = { version = "1.7", features = ["v4", "fast-rng"] }
|
||||
urlencoding = "2.1"
|
||||
xml-rs = "1.0.0"
|
||||
log = { version = "0.4", features = ["std"] }
|
||||
socket2 = "0.6"
|
||||
async-stream = "0.3"
|
||||
walkdir = "2.3"
|
||||
form_urlencoded = "1.2"
|
||||
alphanumeric-sort = "1.4"
|
||||
content_inspector = "0.2"
|
||||
anyhow = "1.0"
|
||||
chardetng = "0.1"
|
||||
glob = "0.3"
|
||||
indexmap = "2.2"
|
||||
serde_yaml = "0.9"
|
||||
sha-crypt = "0.5"
|
||||
base64 = "0.22"
|
||||
smart-default = "0.7"
|
||||
rustls-pki-types = "1.2"
|
||||
hyper-util = { version = "0.1", features = ["server-auto", "tokio"] }
|
||||
http-body-util = "0.1"
|
||||
bytes = "1.5"
|
||||
pin-project-lite = "0.2"
|
||||
sha2 = "0.10.8"
|
||||
ed25519-dalek = "2.2.0"
|
||||
hex = "0.4.3"
|
||||
|
||||
[features]
|
||||
default = ["tls"]
|
||||
tls = ["tokio-rustls"]
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = "2"
|
||||
reqwest = { version = "0.13", features = ["blocking", "multipart", "rustls"], default-features = false }
|
||||
assert_fs = "1"
|
||||
port_check = "0.3"
|
||||
rstest = "0.26.1"
|
||||
regex = "1"
|
||||
url = "2"
|
||||
predicates = "3"
|
||||
digest_auth = "0.3.1"
|
||||
|
||||
[profile.release]
|
||||
opt-level = 3
|
||||
lto = true
|
||||
strip = true
|
||||
opt-level = "z"
|
||||
codegen-units = 1
|
||||
panic = "abort"
|
||||
strip = "symbols"
|
||||
|
||||
18
Dockerfile
18
Dockerfile
@@ -1,10 +1,14 @@
|
||||
FROM rust:1.61 as builder
|
||||
RUN rustup target add x86_64-unknown-linux-musl
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y musl-tools
|
||||
WORKDIR /app
|
||||
FROM --platform=linux/amd64 messense/rust-musl-cross:x86_64-musl AS amd64
|
||||
COPY . .
|
||||
RUN cargo build --target x86_64-unknown-linux-musl --release
|
||||
RUN cargo install --path . --root /
|
||||
|
||||
FROM --platform=linux/amd64 messense/rust-musl-cross:aarch64-musl AS arm64
|
||||
COPY . .
|
||||
RUN cargo install --path . --root /
|
||||
|
||||
FROM ${TARGETARCH} AS builder
|
||||
|
||||
FROM scratch
|
||||
COPY --from=builder /app/target/x86_64-unknown-linux-musl/release/duf /bin/
|
||||
ENTRYPOINT ["/bin/duf"]
|
||||
COPY --from=builder /bin/dufs /bin/dufs
|
||||
STOPSIGNAL SIGINT
|
||||
ENTRYPOINT ["/bin/dufs"]
|
||||
|
||||
19
Dockerfile-release
Normal file
19
Dockerfile-release
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM alpine as builder
|
||||
ARG REPO VER TARGETPLATFORM
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \
|
||||
TARGET="x86_64-unknown-linux-musl"; \
|
||||
elif [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
|
||||
TARGET="aarch64-unknown-linux-musl"; \
|
||||
elif [ "$TARGETPLATFORM" = "linux/386" ]; then \
|
||||
TARGET="i686-unknown-linux-musl"; \
|
||||
elif [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then \
|
||||
TARGET="armv7-unknown-linux-musleabihf"; \
|
||||
fi && \
|
||||
wget https://github.com/${REPO}/releases/download/${VER}/dufs-${VER}-${TARGET}.tar.gz && \
|
||||
tar -xf dufs-${VER}-${TARGET}.tar.gz && \
|
||||
mv dufs /bin/
|
||||
|
||||
FROM scratch
|
||||
COPY --from=builder /bin/dufs /bin/dufs
|
||||
STOPSIGNAL SIGINT
|
||||
ENTRYPOINT ["/bin/dufs"]
|
||||
409
README.md
409
README.md
@@ -1,20 +1,21 @@
|
||||
# Duf
|
||||
# Dufs
|
||||
|
||||
[](https://github.com/sigoden/duf/actions/workflows/ci.yaml)
|
||||
[](https://crates.io/crates/duf)
|
||||
[](https://github.com/sigoden/dufs/actions/workflows/ci.yaml)
|
||||
[](https://crates.io/crates/dufs)
|
||||
[](https://hub.docker.com/r/sigoden/dufs)
|
||||
|
||||
Duf is a simple file server. Support static serve, search, upload, webdav...
|
||||
Dufs is a distinctive utility file server that supports static serving, uploading, searching, accessing control, webdav...
|
||||
|
||||

|
||||

|
||||
|
||||
## Features
|
||||
|
||||
- Serve static files
|
||||
- Download folder as zip file
|
||||
- Upload files and folders (Drag & Drop)
|
||||
- Search files
|
||||
- Partial responses (Parallel/Resume download)
|
||||
- Authentication
|
||||
- Create/Edit/Search files
|
||||
- Resumable/partial uploads/downloads
|
||||
- Access control
|
||||
- Support https
|
||||
- Support webdav
|
||||
- Easy to use with curl
|
||||
@@ -24,116 +25,402 @@ Duf is a simple file server. Support static serve, search, upload, webdav...
|
||||
### With cargo
|
||||
|
||||
```
|
||||
cargo install duf
|
||||
cargo install dufs
|
||||
```
|
||||
|
||||
### With docker
|
||||
|
||||
```
|
||||
docker run -v /tmp:/tmp -p 5000:5000 --rm -it docker.io/sigoden/duf /tmp
|
||||
docker run -v `pwd`:/data -p 5000:5000 --rm sigoden/dufs /data -A
|
||||
```
|
||||
|
||||
### With [Homebrew](https://brew.sh)
|
||||
|
||||
```
|
||||
brew install dufs
|
||||
```
|
||||
|
||||
### Binaries on macOS, Linux, Windows
|
||||
|
||||
Download from [Github Releases](https://github.com/sigoden/duf/releases), unzip and add duf to your $PATH.
|
||||
Download from [Github Releases](https://github.com/sigoden/dufs/releases), unzip and add dufs to your $PATH.
|
||||
|
||||
## CLI
|
||||
|
||||
```
|
||||
Duf is a simple file server.
|
||||
Dufs is a distinctive utility file server - https://github.com/sigoden/dufs
|
||||
|
||||
USAGE:
|
||||
duf [OPTIONS] [path]
|
||||
Usage: dufs [OPTIONS] [serve-path]
|
||||
|
||||
ARGS:
|
||||
<path> Path to a root directory for serving files [default: .]
|
||||
Arguments:
|
||||
[serve-path] Specific path to serve [default: .]
|
||||
|
||||
OPTIONS:
|
||||
-a, --auth <user:pass> Use HTTP authentication
|
||||
--no-auth-access Not required auth when access static files
|
||||
-A, --allow-all Allow all operations
|
||||
--allow-delete Allow delete files/folders
|
||||
--allow-symlink Allow symlink to files/folders outside root directory
|
||||
--allow-upload Allow upload files/folders
|
||||
-b, --bind <address> Specify bind address [default: 0.0.0.0]
|
||||
--cors Enable CORS, sets `Access-Control-Allow-Origin: *`
|
||||
-h, --help Print help information
|
||||
-p, --port <port> Specify port to listen on [default: 5000]
|
||||
--path-prefix <path> Specify an url path prefix
|
||||
--render-index Render index.html when requesting a directory
|
||||
--render-spa Render for single-page application
|
||||
--tls-cert <path> Path to an SSL/TLS certificate to serve with HTTPS
|
||||
--tls-key <path> Path to the SSL/TLS certificate's private key
|
||||
-V, --version Print version information
|
||||
Options:
|
||||
-c, --config <file> Specify configuration file
|
||||
-b, --bind <addrs> Specify bind address or unix socket
|
||||
-p, --port <port> Specify port to listen on [default: 5000]
|
||||
--path-prefix <path> Specify a path prefix
|
||||
--hidden <value> Hide paths from directory listings, e.g. tmp,*.log,*.lock
|
||||
-a, --auth <rules> Add auth roles, e.g. user:pass@/dir1:rw,/dir2
|
||||
-A, --allow-all Allow all operations
|
||||
--allow-upload Allow upload files/folders
|
||||
--allow-delete Allow delete files/folders
|
||||
--allow-search Allow search files/folders
|
||||
--allow-symlink Allow symlink to files/folders outside root directory
|
||||
--allow-archive Allow download folders as archive file
|
||||
--allow-hash Allow ?hash query to get file sha256 hash
|
||||
--enable-cors Enable CORS, sets `Access-Control-Allow-Origin: *`
|
||||
--render-index Serve index.html when requesting a directory, returns 404 if not found index.html
|
||||
--render-try-index Serve index.html when requesting a directory, returns directory listing if not found index.html
|
||||
--render-spa Serve SPA(Single Page Application)
|
||||
--assets <path> Set the path to the assets directory for overriding the built-in assets
|
||||
--log-format <format> Customize http log format
|
||||
--log-file <file> Specify the file to save logs to, other than stdout/stderr
|
||||
--compress <level> Set zip compress level [default: low] [possible values: none, low, medium, high]
|
||||
--completions <shell> Print shell completion script for <shell> [possible values: bash, elvish, fish, powershell, zsh]
|
||||
--tls-cert <path> Path to an SSL/TLS certificate to serve with HTTPS
|
||||
--tls-key <path> Path to the SSL/TLS certificate's private key
|
||||
-h, --help Print help
|
||||
-V, --version Print version
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
You can run this command to start serving your current working directory on 127.0.0.1:5000 by default.
|
||||
Serve current working directory in read-only mode
|
||||
|
||||
```
|
||||
duf
|
||||
dufs
|
||||
```
|
||||
|
||||
...or specify which folder you want to serve.
|
||||
Allow all operations like upload/delete/search/create/edit...
|
||||
|
||||
```
|
||||
duf folder_name
|
||||
```
|
||||
|
||||
Allow all operations such as upload, delete
|
||||
|
||||
```sh
|
||||
duf --allow-all
|
||||
dufs -A
|
||||
```
|
||||
|
||||
Only allow upload operation
|
||||
|
||||
```
|
||||
duf --allow-upload
|
||||
dufs --allow-upload
|
||||
```
|
||||
|
||||
Serve a single page application (SPA)
|
||||
Serve a specific directory
|
||||
|
||||
```
|
||||
duf --render-spa
|
||||
dufs Downloads
|
||||
```
|
||||
|
||||
Serve a single file
|
||||
|
||||
```
|
||||
dufs linux-distro.iso
|
||||
```
|
||||
|
||||
Serve a single-page application like react/vue
|
||||
|
||||
```
|
||||
dufs --render-spa
|
||||
```
|
||||
|
||||
Serve a static website with index.html
|
||||
|
||||
```
|
||||
dufs --render-index
|
||||
```
|
||||
|
||||
Require username/password
|
||||
|
||||
```
|
||||
dufs -a admin:123@/:rw
|
||||
```
|
||||
|
||||
Listen on specific host:ip
|
||||
|
||||
```
|
||||
dufs -b 127.0.0.1 -p 80
|
||||
```
|
||||
|
||||
Listen on unix socket
|
||||
```
|
||||
dufs -b /tmp/dufs.socket
|
||||
```
|
||||
|
||||
Use https
|
||||
|
||||
```
|
||||
duf --tls-cert my.crt --tls-key my.key
|
||||
dufs --tls-cert my.crt --tls-key my.key
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
Download a file
|
||||
Upload a file
|
||||
|
||||
```sh
|
||||
curl -T path-to-file http://127.0.0.1:5000/new-path/path-to-file
|
||||
```
|
||||
curl http://127.0.0.1:5000/some-file
|
||||
|
||||
Download a file
|
||||
```sh
|
||||
curl http://127.0.0.1:5000/path-to-file # download the file
|
||||
curl http://127.0.0.1:5000/path-to-file?hash # retrieve the sha256 hash of the file
|
||||
```
|
||||
|
||||
Download a folder as zip file
|
||||
|
||||
```
|
||||
curl -o some-folder.zip http://127.0.0.1:5000/some-folder?zip
|
||||
```
|
||||
|
||||
Upload a file
|
||||
|
||||
```
|
||||
curl --upload-file some-file http://127.0.0.1:5000/some-file
|
||||
```sh
|
||||
curl -o path-to-folder.zip http://127.0.0.1:5000/path-to-folder?zip
|
||||
```
|
||||
|
||||
Delete a file/folder
|
||||
|
||||
```sh
|
||||
curl -X DELETE http://127.0.0.1:5000/path-to-file-or-folder
|
||||
```
|
||||
curl -X DELETE http://127.0.0.1:5000/some-file
|
||||
|
||||
Create a directory
|
||||
|
||||
```sh
|
||||
curl -X MKCOL http://127.0.0.1:5000/path-to-folder
|
||||
```
|
||||
|
||||
Move the file/folder to the new path
|
||||
|
||||
```sh
|
||||
curl -X MOVE http://127.0.0.1:5000/path -H "Destination: http://127.0.0.1:5000/new-path"
|
||||
```
|
||||
|
||||
List/search directory contents
|
||||
|
||||
```sh
|
||||
curl http://127.0.0.1:5000?q=Dockerfile # search for files, similar to `find -name Dockerfile`
|
||||
curl http://127.0.0.1:5000?simple # output names only, similar to `ls -1`
|
||||
curl http://127.0.0.1:5000?json # output paths in json format
|
||||
```
|
||||
|
||||
With authorization (Both basic or digest auth works)
|
||||
|
||||
```sh
|
||||
curl http://127.0.0.1:5000/file --user user:pass # basic auth
|
||||
curl http://127.0.0.1:5000/file --user user:pass --digest # digest auth
|
||||
```
|
||||
|
||||
Resumable downloads
|
||||
|
||||
```sh
|
||||
curl -C- -o file http://127.0.0.1:5000/file
|
||||
```
|
||||
|
||||
Resumable uploads
|
||||
|
||||
```sh
|
||||
upload_offset=$(curl -I -s http://127.0.0.1:5000/file | tr -d '\r' | sed -n 's/content-length: //p')
|
||||
dd skip=$upload_offset if=file status=none ibs=1 | \
|
||||
curl -X PATCH -H "X-Update-Range: append" --data-binary @- http://127.0.0.1:5000/file
|
||||
```
|
||||
|
||||
Health checks
|
||||
|
||||
```sh
|
||||
curl http://127.0.0.1:5000/__dufs__/health
|
||||
```
|
||||
|
||||
<details>
|
||||
<summary><h2>Advanced Topics</h2></summary>
|
||||
|
||||
### Access Control
|
||||
|
||||
Dufs supports account based access control. You can control who can do what on which path with `--auth`/`-a`.
|
||||
|
||||
```
|
||||
dufs -a admin:admin@/:rw -a guest:guest@/
|
||||
dufs -a user:pass@/:rw,/dir1 -a @/
|
||||
```
|
||||
|
||||
1. Use `@` to separate the account and paths. No account means anonymous user.
|
||||
2. Use `:` to separate the username and password of the account.
|
||||
3. Use `,` to separate paths.
|
||||
4. Use path suffix `:rw`/`:ro` set permissions: `read-write`/`read-only`. `:ro` can be omitted.
|
||||
|
||||
- `-a admin:admin@/:rw`: `admin` has complete permissions for all paths.
|
||||
- `-a guest:guest@/`: `guest` has read-only permissions for all paths.
|
||||
- `-a user:pass@/:rw,/dir1`: `user` has read-write permissions for `/*`, has read-only permissions for `/dir1/*`.
|
||||
- `-a @/`: All paths is publicly accessible, everyone can view/download it.
|
||||
|
||||
**Auth permissions are restricted by dufs global permissions.** If dufs does not enable upload permissions via `--allow-upload`, then the account will not have upload permissions even if it is granted `read-write`(`:rw`) permissions.
|
||||
|
||||
#### Hashed Password
|
||||
|
||||
DUFS supports the use of sha-512 hashed password.
|
||||
|
||||
Create hashed password:
|
||||
|
||||
```sh
|
||||
$ openssl passwd -6 123456 # or `mkpasswd -m sha-512 123456`
|
||||
$6$tWMB51u6Kb2ui3wd$5gVHP92V9kZcMwQeKTjyTRgySsYJu471Jb1I6iHQ8iZ6s07GgCIO69KcPBRuwPE5tDq05xMAzye0NxVKuJdYs/
|
||||
```
|
||||
|
||||
Use hashed password:
|
||||
|
||||
```sh
|
||||
dufs -a 'admin:$6$tWMB51u6Kb2ui3wd$5gVHP92V9kZcMwQeKTjyTRgySsYJu471Jb1I6iHQ8iZ6s07GgCIO69KcPBRuwPE5tDq05xMAzye0NxVKuJdYs/@/:rw'
|
||||
```
|
||||
> The hashed password contains `$6`, which can expand to a variable in some shells, so you have to use **single quotes** to wrap it.
|
||||
|
||||
Two important things for hashed passwords:
|
||||
|
||||
1. Dufs only supports sha-512 hashed passwords, so ensure that the password string always starts with `$6$`.
|
||||
2. Digest authentication does not function properly with hashed passwords.
|
||||
|
||||
|
||||
### Hide Paths
|
||||
|
||||
Dufs supports hiding paths from directory listings via option `--hidden <glob>,...`.
|
||||
|
||||
```
|
||||
dufs --hidden .git,.DS_Store,tmp
|
||||
```
|
||||
|
||||
> The glob used in --hidden only matches file and directory names, not paths. So `--hidden dir1/file` is invalid.
|
||||
|
||||
```sh
|
||||
dufs --hidden '.*' # hidden dotfiles
|
||||
dufs --hidden '*/' # hidden all folders
|
||||
dufs --hidden '*.log,*.lock' # hidden by exts
|
||||
dufs --hidden '*.log' --hidden '*.lock'
|
||||
```
|
||||
|
||||
### Log Format
|
||||
|
||||
Dufs supports customize http log format with option `--log-format`.
|
||||
|
||||
The log format can use following variables.
|
||||
|
||||
| variable | description |
|
||||
| ------------ | ------------------------------------------------------------------------- |
|
||||
| $remote_addr | client address |
|
||||
| $remote_user | user name supplied with authentication |
|
||||
| $request | full original request line |
|
||||
| $status | response status |
|
||||
| $http_ | arbitrary request header field. examples: $http_user_agent, $http_referer |
|
||||
|
||||
|
||||
The default log format is `'$remote_addr "$request" $status'`.
|
||||
```
|
||||
2022-08-06T06:59:31+08:00 INFO - 127.0.0.1 "GET /" 200
|
||||
```
|
||||
|
||||
Disable http log
|
||||
```
|
||||
dufs --log-format=''
|
||||
```
|
||||
|
||||
Log user-agent
|
||||
```
|
||||
dufs --log-format '$remote_addr "$request" $status $http_user_agent'
|
||||
```
|
||||
```
|
||||
2022-08-06T06:53:55+08:00 INFO - 127.0.0.1 "GET /" 200 Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36
|
||||
```
|
||||
|
||||
Log remote-user
|
||||
```
|
||||
dufs --log-format '$remote_addr $remote_user "$request" $status' -a /@admin:admin -a /folder1@user1:pass1
|
||||
```
|
||||
```
|
||||
2022-08-06T07:04:37+08:00 INFO - 127.0.0.1 admin "GET /" 200
|
||||
```
|
||||
|
||||
## Environment variables
|
||||
|
||||
All options can be set using environment variables prefixed with `DUFS_`.
|
||||
|
||||
```
|
||||
[serve-path] DUFS_SERVE_PATH="."
|
||||
--config <file> DUFS_CONFIG=config.yaml
|
||||
-b, --bind <addrs> DUFS_BIND=0.0.0.0
|
||||
-p, --port <port> DUFS_PORT=5000
|
||||
--path-prefix <path> DUFS_PATH_PREFIX=/dufs
|
||||
--hidden <value> DUFS_HIDDEN=tmp,*.log,*.lock
|
||||
-a, --auth <rules> DUFS_AUTH="admin:admin@/:rw|@/"
|
||||
-A, --allow-all DUFS_ALLOW_ALL=true
|
||||
--allow-upload DUFS_ALLOW_UPLOAD=true
|
||||
--allow-delete DUFS_ALLOW_DELETE=true
|
||||
--allow-search DUFS_ALLOW_SEARCH=true
|
||||
--allow-symlink DUFS_ALLOW_SYMLINK=true
|
||||
--allow-archive DUFS_ALLOW_ARCHIVE=true
|
||||
--allow-hash DUFS_ALLOW_HASH=true
|
||||
--enable-cors DUFS_ENABLE_CORS=true
|
||||
--render-index DUFS_RENDER_INDEX=true
|
||||
--render-try-index DUFS_RENDER_TRY_INDEX=true
|
||||
--render-spa DUFS_RENDER_SPA=true
|
||||
--assets <path> DUFS_ASSETS=./assets
|
||||
--log-format <format> DUFS_LOG_FORMAT=""
|
||||
--log-file <file> DUFS_LOG_FILE=./dufs.log
|
||||
--compress <compress> DUFS_COMPRESS=low
|
||||
--tls-cert <path> DUFS_TLS_CERT=cert.pem
|
||||
--tls-key <path> DUFS_TLS_KEY=key.pem
|
||||
```
|
||||
|
||||
## Configuration File
|
||||
|
||||
You can specify and use the configuration file by selecting the option `--config <path-to-config.yaml>`.
|
||||
|
||||
The following are the configuration items:
|
||||
|
||||
```yaml
|
||||
serve-path: '.'
|
||||
bind: 0.0.0.0
|
||||
port: 5000
|
||||
path-prefix: /dufs
|
||||
hidden:
|
||||
- tmp
|
||||
- '*.log'
|
||||
- '*.lock'
|
||||
auth:
|
||||
- admin:admin@/:rw
|
||||
- user:pass@/src:rw,/share
|
||||
- '@/' # According to the YAML spec, quoting is required.
|
||||
allow-all: false
|
||||
allow-upload: true
|
||||
allow-delete: true
|
||||
allow-search: true
|
||||
allow-symlink: true
|
||||
allow-archive: true
|
||||
allow-hash: true
|
||||
enable-cors: true
|
||||
render-index: true
|
||||
render-try-index: true
|
||||
render-spa: true
|
||||
assets: ./assets/
|
||||
log-format: '$remote_addr "$request" $status $http_user_agent'
|
||||
log-file: ./dufs.log
|
||||
compress: low
|
||||
tls-cert: tests/data/cert.pem
|
||||
tls-key: tests/data/key_pkcs1.pem
|
||||
```
|
||||
|
||||
### Customize UI
|
||||
|
||||
Dufs allows users to customize the UI with your own assets.
|
||||
|
||||
```
|
||||
dufs --assets my-assets-dir/
|
||||
```
|
||||
|
||||
> If you only need to make slight adjustments to the current UI, you copy dufs's [assets](https://github.com/sigoden/dufs/tree/main/assets) directory and modify it accordingly. The current UI doesn't use any frameworks, just plain HTML/JS/CSS. As long as you have some basic knowledge of web development, it shouldn't be difficult to modify.
|
||||
|
||||
Your assets folder must contains a `index.html` file.
|
||||
|
||||
`index.html` can use the following placeholder variables to retrieve internal data.
|
||||
|
||||
- `__INDEX_DATA__`: directory listing data
|
||||
- `__ASSETS_PREFIX__`: assets url prefix
|
||||
|
||||
</details>
|
||||
|
||||
## License
|
||||
|
||||
Copyright (c) 2022 duf-developers.
|
||||
Copyright (c) 2022-2024 dufs-developers.
|
||||
|
||||
duf is made available under the terms of either the MIT License or the Apache License 2.0, at your option.
|
||||
dufs is made available under the terms of either the MIT License or the Apache License 2.0, at your option.
|
||||
|
||||
See the LICENSE-APACHE and LICENSE-MIT files for license details.
|
||||
See the LICENSE-APACHE and LICENSE-MIT files for license details.
|
||||
|
||||
21
SECURITY.md
Normal file
21
SECURITY.md
Normal file
@@ -0,0 +1,21 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
The latest release of *dufs* is supported. The fixes for any security issues found will be included
|
||||
in the next release.
|
||||
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please [use *dufs*'s security advisory reporting tool provided by
|
||||
GitHub](https://github.com/sigoden/dufs/security/advisories/new) to report security issues.
|
||||
|
||||
We strive to fix security issues as quickly as possible. Across the industry, often the developers'
|
||||
slowness in developing and releasing a fix is the biggest delay in the process; we take pride in
|
||||
minimizing this delay as much as we practically can. We encourage you to also minimize the delay
|
||||
between when you find an issue and when you contact us. You do not need to convince us to take your
|
||||
report seriously. You don't need to create a PoC or a patch if that would slow down your reporting.
|
||||
You don't need an elaborate write-up. A short, informal note about the issue is good. We can always
|
||||
communicate later to fill in any details we need after that first note is shared with us.
|
||||
|
||||
BIN
assets/favicon.ico
Executable file → Normal file
BIN
assets/favicon.ico
Executable file → Normal file
Binary file not shown.
|
Before Width: | Height: | Size: 4.2 KiB After Width: | Height: | Size: 9.1 KiB |
204
assets/index.css
204
assets/index.css
@@ -1,56 +1,81 @@
|
||||
html {
|
||||
font-family: -apple-system,BlinkMacSystemFont,Helvetica,Arial,sans-serif;
|
||||
font-family: -apple-system, BlinkMacSystemFont, Roboto, Helvetica, Arial, sans-serif;
|
||||
line-height: 1.5;
|
||||
color: #24292e;
|
||||
}
|
||||
|
||||
body {
|
||||
width: 700px;
|
||||
/* prevent premature breadcrumb wrapping on mobile */
|
||||
min-width: 538px;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.hidden {
|
||||
display: none;
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
.head {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
align-items: center;
|
||||
padding: 1em 1em 0;
|
||||
padding: 0.6em 1em;
|
||||
position: sticky;
|
||||
top: 0;
|
||||
background-color: white;
|
||||
}
|
||||
|
||||
.breadcrumb {
|
||||
font-size: 1.25em;
|
||||
padding-right: 0.6em;
|
||||
word-break: break-all;
|
||||
}
|
||||
|
||||
.breadcrumb > a {
|
||||
.breadcrumb>a {
|
||||
color: #0366d6;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.breadcrumb > a:hover {
|
||||
.breadcrumb>a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
/* final breadcrumb */
|
||||
.breadcrumb > b {
|
||||
.breadcrumb>b {
|
||||
color: #24292e;
|
||||
}
|
||||
|
||||
.breadcrumb > .separator {
|
||||
.breadcrumb>.separator {
|
||||
color: #586069;
|
||||
padding: 0 0.25em;
|
||||
}
|
||||
|
||||
.breadcrumb svg {
|
||||
height: 100%;
|
||||
fill: rgba(3,47,98,0.5);
|
||||
padding-right: 0.5em;
|
||||
padding-left: 0.5em;
|
||||
fill: rgba(3, 47, 98, 0.5);
|
||||
}
|
||||
|
||||
.toolbox {
|
||||
display: flex;
|
||||
margin-right: 10px;
|
||||
}
|
||||
|
||||
.toolbox>a,
|
||||
.toolbox>div {
|
||||
/* vertically align with breadcrumb text */
|
||||
height: 1.1rem;
|
||||
}
|
||||
|
||||
.toolbox .control {
|
||||
cursor: pointer;
|
||||
padding-left: 0.25em;
|
||||
}
|
||||
|
||||
.upload-file input {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.upload-file label {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.searchbar {
|
||||
@@ -62,7 +87,7 @@ body {
|
||||
transition: all .15s;
|
||||
border: 1px #ddd solid;
|
||||
border-radius: 15px;
|
||||
margin: 0 0 2px 10px;
|
||||
margin-bottom: 2px;
|
||||
}
|
||||
|
||||
.searchbar #search {
|
||||
@@ -72,7 +97,6 @@ body {
|
||||
font-size: 16px;
|
||||
line-height: 16px;
|
||||
padding: 1px;
|
||||
font-family: helvetica neue,luxi sans,Tahoma,hiragino sans gb,STHeiti,sans-serif;
|
||||
background-color: transparent;
|
||||
border: none;
|
||||
outline: none;
|
||||
@@ -84,19 +108,14 @@ body {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.upload-control {
|
||||
cursor: pointer;
|
||||
padding-left: 0.25em;
|
||||
}
|
||||
|
||||
.upload-control input {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.main {
|
||||
padding: 0 1em;
|
||||
}
|
||||
|
||||
.empty-folder {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.uploaders-table th,
|
||||
.paths-table th {
|
||||
text-align: left;
|
||||
@@ -110,42 +129,55 @@ body {
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.uploaders-table .cell-name,
|
||||
.paths-table .cell-name {
|
||||
width: 500px;
|
||||
}
|
||||
|
||||
.uploaders-table .cell-status {
|
||||
width: 80px;
|
||||
padding-left: 0.6em;
|
||||
}
|
||||
|
||||
.cell-status span {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.paths-table thead a {
|
||||
color: unset;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.paths-table thead a>span {
|
||||
padding-left: 2px;
|
||||
}
|
||||
|
||||
.paths-table tbody tr:hover {
|
||||
background-color: #fafafa;
|
||||
}
|
||||
|
||||
.paths-table .cell-actions {
|
||||
width: 60px;
|
||||
width: 90px;
|
||||
display: flex;
|
||||
padding-left: 0.6em;
|
||||
padding-left: 0.5em;
|
||||
}
|
||||
|
||||
.paths-table .cell-mtime {
|
||||
width: 120px;
|
||||
padding-left: 0.6em;
|
||||
padding-left: 0.5em;
|
||||
font-variant-numeric: tabular-nums;
|
||||
}
|
||||
|
||||
.paths-table .cell-size {
|
||||
text-align: right;
|
||||
width: 70px;
|
||||
padding-left: 0.6em;
|
||||
padding-left: 0.5em;
|
||||
font-variant-numeric: tabular-nums;
|
||||
}
|
||||
|
||||
|
||||
.path svg {
|
||||
height: 100%;
|
||||
fill: rgba(3,47,98,0.5);
|
||||
height: 16px;
|
||||
fill: rgba(3, 47, 98, 0.5);
|
||||
padding-right: 0.5em;
|
||||
vertical-align: text-top;
|
||||
}
|
||||
|
||||
.path {
|
||||
display: flex;
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
@@ -156,6 +188,8 @@ body {
|
||||
overflow: hidden;
|
||||
display: block;
|
||||
text-decoration: none;
|
||||
max-width: calc(100vw - 375px);
|
||||
min-width: 170px;
|
||||
}
|
||||
|
||||
.path a:hover {
|
||||
@@ -163,7 +197,8 @@ body {
|
||||
}
|
||||
|
||||
.action-btn {
|
||||
padding-left: 0.4em;
|
||||
padding-right: 0.3em;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.uploaders-table {
|
||||
@@ -173,3 +208,100 @@ body {
|
||||
.uploader {
|
||||
padding-right: 1em;
|
||||
}
|
||||
|
||||
.editor {
|
||||
width: 100%;
|
||||
height: calc(100vh - 5rem);
|
||||
border: 1px solid #ced4da;
|
||||
outline: none;
|
||||
padding: 5px;
|
||||
}
|
||||
|
||||
.toolbox-right {
|
||||
margin-left: auto;
|
||||
margin-right: 2em;
|
||||
}
|
||||
|
||||
.login-btn {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.save-btn {
|
||||
cursor: pointer;
|
||||
-webkit-user-select: none;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.logout-btn {
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.user-name {
|
||||
padding-left: 3px;
|
||||
}
|
||||
|
||||
.not-editable {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.retry-btn {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
@media (min-width: 768px) {
|
||||
.path a {
|
||||
min-width: 400px;
|
||||
}
|
||||
}
|
||||
|
||||
/* dark theme */
|
||||
@media (prefers-color-scheme: dark) {
|
||||
body {
|
||||
background-color: #000;
|
||||
}
|
||||
|
||||
html,
|
||||
.breadcrumb>b,
|
||||
.searchbar #search {
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.uploaders-table th,
|
||||
.paths-table th {
|
||||
color: #ddd;
|
||||
}
|
||||
|
||||
svg,
|
||||
.path svg,
|
||||
.breadcrumb svg {
|
||||
fill: #fff;
|
||||
}
|
||||
|
||||
.head {
|
||||
background-color: #111;
|
||||
}
|
||||
|
||||
.searchbar {
|
||||
background-color: #111;
|
||||
border-color: #fff6;
|
||||
}
|
||||
|
||||
.searchbar svg {
|
||||
fill: #fff6;
|
||||
}
|
||||
|
||||
.path a {
|
||||
color: #3191ff;
|
||||
}
|
||||
|
||||
.paths-table tbody tr:hover {
|
||||
background-color: #1a1a1a;
|
||||
}
|
||||
|
||||
.editor {
|
||||
background: black;
|
||||
color: white;
|
||||
}
|
||||
}
|
||||
@@ -1,59 +1,132 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<html lang="en-US">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width" />
|
||||
__SLOT__
|
||||
<noscript>
|
||||
<meta http-equiv="refresh" content="0; url=?noscript">
|
||||
</noscript>
|
||||
<link rel="icon" type="image/x-icon" href="__ASSETS_PREFIX__favicon.ico">
|
||||
<link rel="stylesheet" href="__ASSETS_PREFIX__index.css">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div class="head">
|
||||
<div class="breadcrumb"></div>
|
||||
<div class="toolbox">
|
||||
<div>
|
||||
<a href="?zip" title="Download folder as a .zip file">
|
||||
<svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg>
|
||||
</a>
|
||||
<a href="" class="control download hidden" title="Download file" download="">
|
||||
<svg width="16" height="16" viewBox="0 0 16 16">
|
||||
<path
|
||||
d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z" />
|
||||
<path
|
||||
d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z" />
|
||||
</svg>
|
||||
</a>
|
||||
<div class="control move-file hidden" title="Move & Rename">
|
||||
<svg class="icon-move" width="16" height="16" viewBox="0 0 16 16">
|
||||
<path fill-rule="evenodd"
|
||||
d="M1.5 1.5A.5.5 0 0 0 1 2v4.8a2.5 2.5 0 0 0 2.5 2.5h9.793l-3.347 3.346a.5.5 0 0 0 .708.708l4.2-4.2a.5.5 0 0 0 0-.708l-4-4a.5.5 0 0 0-.708.708L13.293 8.3H3.5A1.5 1.5 0 0 1 2 6.8V2a.5.5 0 0 0-.5-.5z">
|
||||
</path>
|
||||
</svg>
|
||||
</div>
|
||||
<div class="upload-control hidden" title="Upload files">
|
||||
<div class="control delete-file hidden" title="Delete">
|
||||
<svg class="icon-delete" width="16" height="16" fill="currentColor" viewBox="0 0 16 16">
|
||||
<path
|
||||
d="M6.854 7.146a.5.5 0 1 0-.708.708L7.293 9l-1.147 1.146a.5.5 0 0 0 .708.708L8 9.707l1.146 1.147a.5.5 0 0 0 .708-.708L8.707 9l1.147-1.146a.5.5 0 0 0-.708-.708L8 8.293 6.854 7.146z" />
|
||||
<path
|
||||
d="M14 14V4.5L9.5 0H4a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h8a2 2 0 0 0 2-2zM9.5 3A1.5 1.5 0 0 0 11 4.5h2V14a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1V2a1 1 0 0 1 1-1h5.5v2z" />
|
||||
</svg>
|
||||
</div>
|
||||
<div class="control upload-file hidden" title="Upload files/folders">
|
||||
<label for="file">
|
||||
<svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 1.146a.5.5 0 0 1 .708 0l3 3a.5.5 0 0 1-.708.708L8.5 2.707V11.5a.5.5 0 0 1-1 0V2.707L5.354 4.854a.5.5 0 1 1-.708-.708l3-3z"/></svg>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16">
|
||||
<path
|
||||
d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z" />
|
||||
<path
|
||||
d="M7.646 1.146a.5.5 0 0 1 .708 0l3 3a.5.5 0 0 1-.708.708L8.5 2.707V11.5a.5.5 0 0 1-1 0V2.707L5.354 4.854a.5.5 0 1 1-.708-.708l3-3z" />
|
||||
</svg>
|
||||
</label>
|
||||
<input type="file" id="file" name="file" multiple>
|
||||
<input type="file" id="file" title="Upload files/folders" name="file" multiple>
|
||||
</div>
|
||||
<div class="control new-folder hidden" title="New folder">
|
||||
<svg width="16" height="16" viewBox="0 0 16 16">
|
||||
<path
|
||||
d="m.5 3 .04.87a1.99 1.99 0 0 0-.342 1.311l.637 7A2 2 0 0 0 2.826 14H9v-1H2.826a1 1 0 0 1-.995-.91l-.637-7A1 1 0 0 1 2.19 4h11.62a1 1 0 0 1 .996 1.09L14.54 8h1.005l.256-2.819A2 2 0 0 0 13.81 3H9.828a2 2 0 0 1-1.414-.586l-.828-.828A2 2 0 0 0 6.172 1H2.5a2 2 0 0 0-2 2zm5.672-1a1 1 0 0 1 .707.293L7.586 3H2.19c-.24 0-.47.042-.683.12L1.5 2.98a1 1 0 0 1 1-.98h3.672z" />
|
||||
<path
|
||||
d="M13.5 10a.5.5 0 0 1 .5.5V12h1.5a.5.5 0 1 1 0 1H14v1.5a.5.5 0 1 1-1 0V13h-1.5a.5.5 0 0 1 0-1H13v-1.5a.5.5 0 0 1 .5-.5z" />
|
||||
</svg>
|
||||
</div>
|
||||
<div class="control new-file hidden" title="New File">
|
||||
<svg width="16" height="16" viewBox="0 0 16 16">
|
||||
<path
|
||||
d="M8 6.5a.5.5 0 0 1 .5.5v1.5H10a.5.5 0 0 1 0 1H8.5V11a.5.5 0 0 1-1 0V9.5H6a.5.5 0 0 1 0-1h1.5V7a.5.5 0 0 1 .5-.5z" />
|
||||
<path
|
||||
d="M14 4.5V14a2 2 0 0 1-2 2H4a2 2 0 0 1-2-2V2a2 2 0 0 1 2-2h5.5L14 4.5zm-3 0A1.5 1.5 0 0 1 9.5 3V1H4a1 1 0 0 0-1 1v12a1 1 0 0 0 1 1h8a1 1 0 0 0 1-1V4.5h-2z" />
|
||||
</svg>
|
||||
</div>
|
||||
</div>
|
||||
<form class="searchbar">
|
||||
<form class="searchbar hidden">
|
||||
<div class="icon">
|
||||
<svg width="16" height="16" fill="currentColor" viewBox="0 0 16 16"><path d="M11.742 10.344a6.5 6.5 0 1 0-1.397 1.398h-.001c.03.04.062.078.098.115l3.85 3.85a1 1 0 0 0 1.415-1.414l-3.85-3.85a1.007 1.007 0 0 0-.115-.1zM12 6.5a5.5 5.5 0 1 1-11 0 5.5 5.5 0 0 1 11 0z"/></svg>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16">
|
||||
<path
|
||||
d="M11.742 10.344a6.5 6.5 0 1 0-1.397 1.398h-.001c.03.04.062.078.098.115l3.85 3.85a1 1 0 0 0 1.415-1.414l-3.85-3.85a1.007 1.007 0 0 0-.115-.1zM12 6.5a5.5 5.5 0 1 1-11 0 5.5 5.5 0 0 1 11 0z" />
|
||||
</svg>
|
||||
</div>
|
||||
<input id="search" name="q" type="text" maxlength="128" autocomplete="off" tabindex="1">
|
||||
<input id="search" title="Searching for folders or files" name="q" type="text" maxlength="128" autocomplete="off"
|
||||
tabindex="1">
|
||||
<input type="submit" hidden />
|
||||
</form>
|
||||
<div class="toolbox-right">
|
||||
<div class="login-btn hidden" title="Login">
|
||||
<svg width="16" height="16" viewBox="0 0 16 16">
|
||||
<path fill-rule="evenodd"
|
||||
d="M6 3.5a.5.5 0 0 1 .5-.5h8a.5.5 0 0 1 .5.5v9a.5.5 0 0 1-.5.5h-8a.5.5 0 0 1-.5-.5v-2a.5.5 0 0 0-1 0v2A1.5 1.5 0 0 0 6.5 14h8a1.5 1.5 0 0 0 1.5-1.5v-9A1.5 1.5 0 0 0 14.5 2h-8A1.5 1.5 0 0 0 5 3.5v2a.5.5 0 0 0 1 0v-2z" />
|
||||
<path fill-rule="evenodd"
|
||||
d="M11.854 8.354a.5.5 0 0 0 0-.708l-3-3a.5.5 0 1 0-.708.708L10.293 7.5H1.5a.5.5 0 0 0 0 1h8.793l-2.147 2.146a.5.5 0 0 0 .708.708l3-3z" />
|
||||
</svg>
|
||||
</div>
|
||||
<div class="logout-btn hidden" title="Logout">
|
||||
<svg width="16" height="16" viewBox="0 0 16 16">
|
||||
<path fill-rule="evenodd" d="M10 3.5a.5.5 0 0 0-.5-.5h-8a.5.5 0 0 0-.5.5v9a.5.5 0 0 0 .5.5h8a.5.5 0 0 0 .5-.5v-2a.5.5 0 0 1 1 0v2A1.5 1.5 0 0 1 9.5 14h-8A1.5 1.5 0 0 1 0 12.5v-9A1.5 1.5 0 0 1 1.5 2h8A1.5 1.5 0 0 1 11 3.5v2a.5.5 0 0 1-1 0z"/>
|
||||
<path fill-rule="evenodd" d="M4.146 8.354a.5.5 0 0 1 0-.708l3-3a.5.5 0 1 1 .708.708L5.707 7.5H14.5a.5.5 0 0 1 0 1H5.707l2.147 2.146a.5.5 0 0 1-.708.708z"/>
|
||||
</svg>
|
||||
<span class="user-name"></span>
|
||||
</div>
|
||||
<div class="save-btn hidden" title="Save file">
|
||||
<svg viewBox="0 0 1024 1024" width="24" height="24">
|
||||
<path
|
||||
d="M426.666667 682.666667v42.666666h170.666666v-42.666666h-170.666666z m-42.666667-85.333334h298.666667v128h42.666666V418.133333L605.866667 298.666667H298.666667v426.666666h42.666666v-128h42.666667z m260.266667-384L810.666667 379.733333V810.666667H213.333333V213.333333h430.933334zM341.333333 341.333333h85.333334v170.666667H341.333333V341.333333z"
|
||||
fill="#444444" p-id="8311"></path>
|
||||
</svg>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="main">
|
||||
<table class="uploaders-table hidden">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="cell-name">Name</th>
|
||||
<th class="cell-status">Status</th>
|
||||
</tr>
|
||||
</thead>
|
||||
</table>
|
||||
<table class="paths-table hidden">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="cell-name">Name</th>
|
||||
<th class="cell-mtime">Date modify</th>
|
||||
<th class="cell-size">Size</th>
|
||||
<th class="cell-actions">Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
</tbody>
|
||||
</table>
|
||||
<div class="index-page hidden">
|
||||
<div class="empty-folder hidden"></div>
|
||||
<table class="uploaders-table hidden">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="cell-name" colspan="2">Name</th>
|
||||
<th class="cell-status">Progress</th>
|
||||
</tr>
|
||||
</thead>
|
||||
</table>
|
||||
<table class="paths-table hidden">
|
||||
<thead>
|
||||
</thead>
|
||||
<tbody>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div class="editor-page hidden">
|
||||
<div class="not-editable hidden"></div>
|
||||
<textarea id="editor" class="editor hidden" aria-label="Editor" cols="10"></textarea>
|
||||
</div>
|
||||
</div>
|
||||
<script>
|
||||
window.addEventListener("DOMContentLoaded", ready);
|
||||
</script>
|
||||
<template id="index-data">__INDEX_DATA__</template>
|
||||
<script src="__ASSETS_PREFIX__index.js"></script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
984
assets/index.js
984
assets/index.js
File diff suppressed because it is too large
Load Diff
792
src/args.rs
792
src/args.rs
@@ -1,192 +1,457 @@
|
||||
use clap::crate_description;
|
||||
use clap::{Arg, ArgMatches};
|
||||
use rustls::{Certificate, PrivateKey};
|
||||
use std::net::{IpAddr, SocketAddr};
|
||||
use anyhow::{bail, Context, Result};
|
||||
use async_zip::Compression;
|
||||
use clap::builder::{PossibleValue, PossibleValuesParser};
|
||||
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command, ValueEnum};
|
||||
use clap_complete::{generate, Generator, Shell};
|
||||
use serde::{Deserialize, Deserializer};
|
||||
use smart_default::SmartDefault;
|
||||
use std::env;
|
||||
use std::net::IpAddr;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::{env, fs, io};
|
||||
|
||||
use crate::auth::parse_auth;
|
||||
use crate::BoxResult;
|
||||
use crate::auth::AccessControl;
|
||||
use crate::http_logger::HttpLogger;
|
||||
use crate::utils::encode_uri;
|
||||
|
||||
const ABOUT: &str = concat!("\n", crate_description!()); // Add extra newline.
|
||||
|
||||
fn app() -> clap::Command<'static> {
|
||||
clap::command!()
|
||||
.about(ABOUT)
|
||||
pub fn build_cli() -> Command {
|
||||
let app = Command::new(env!("CARGO_CRATE_NAME"))
|
||||
.version(env!("CARGO_PKG_VERSION"))
|
||||
.author(env!("CARGO_PKG_AUTHORS"))
|
||||
.about(concat!(
|
||||
env!("CARGO_PKG_DESCRIPTION"),
|
||||
" - ",
|
||||
env!("CARGO_PKG_REPOSITORY")
|
||||
))
|
||||
.arg(
|
||||
Arg::new("address")
|
||||
Arg::new("serve-path")
|
||||
.env("DUFS_SERVE_PATH")
|
||||
.hide_env(true)
|
||||
.value_parser(value_parser!(PathBuf))
|
||||
.help("Specific path to serve [default: .]"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("config")
|
||||
.env("DUFS_CONFIG")
|
||||
.hide_env(true)
|
||||
.short('c')
|
||||
.long("config")
|
||||
.value_parser(value_parser!(PathBuf))
|
||||
.help("Specify configuration file")
|
||||
.value_name("file"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("bind")
|
||||
.env("DUFS_BIND")
|
||||
.hide_env(true)
|
||||
.short('b')
|
||||
.long("bind")
|
||||
.default_value("0.0.0.0")
|
||||
.help("Specify bind address")
|
||||
.value_name("address"),
|
||||
.help("Specify bind address or unix socket")
|
||||
.action(ArgAction::Append)
|
||||
.value_delimiter(',')
|
||||
.value_name("addrs"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("port")
|
||||
.env("DUFS_PORT")
|
||||
.hide_env(true)
|
||||
.short('p')
|
||||
.long("port")
|
||||
.default_value("5000")
|
||||
.help("Specify port to listen on")
|
||||
.value_parser(value_parser!(u16))
|
||||
.help("Specify port to listen on [default: 5000]")
|
||||
.value_name("port"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("path")
|
||||
.default_value(".")
|
||||
.allow_invalid_utf8(true)
|
||||
.help("Path to a root directory for serving files"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("path-prefix")
|
||||
.env("DUFS_PATH_PREFIX")
|
||||
.hide_env(true)
|
||||
.long("path-prefix")
|
||||
.value_name("path")
|
||||
.help("Specify an url path prefix"),
|
||||
.help("Specify a path prefix"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("hidden")
|
||||
.env("DUFS_HIDDEN")
|
||||
.hide_env(true)
|
||||
.long("hidden")
|
||||
.action(ArgAction::Append)
|
||||
.value_delimiter(',')
|
||||
.help("Hide paths from directory listings, e.g. tmp,*.log,*.lock")
|
||||
.value_name("value"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("auth")
|
||||
.env("DUFS_AUTH")
|
||||
.hide_env(true)
|
||||
.short('a')
|
||||
.long("auth")
|
||||
.help("Add auth roles, e.g. user:pass@/dir1:rw,/dir2")
|
||||
.action(ArgAction::Append)
|
||||
.value_name("rules"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("auth-method")
|
||||
.hide(true)
|
||||
.env("DUFS_AUTH_METHOD")
|
||||
.hide_env(true)
|
||||
.long("auth-method")
|
||||
.help("Select auth method")
|
||||
.value_parser(PossibleValuesParser::new(["basic", "digest"]))
|
||||
.default_value("digest")
|
||||
.value_name("value"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("allow-all")
|
||||
.env("DUFS_ALLOW_ALL")
|
||||
.hide_env(true)
|
||||
.short('A')
|
||||
.long("allow-all")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("Allow all operations"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("allow-upload")
|
||||
.env("DUFS_ALLOW_UPLOAD")
|
||||
.hide_env(true)
|
||||
.long("allow-upload")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("Allow upload files/folders"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("allow-delete")
|
||||
.env("DUFS_ALLOW_DELETE")
|
||||
.hide_env(true)
|
||||
.long("allow-delete")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("Allow delete files/folders"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("allow-search")
|
||||
.env("DUFS_ALLOW_SEARCH")
|
||||
.hide_env(true)
|
||||
.long("allow-search")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("Allow search files/folders"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("allow-symlink")
|
||||
.env("DUFS_ALLOW_SYMLINK")
|
||||
.hide_env(true)
|
||||
.long("allow-symlink")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("Allow symlink to files/folders outside root directory"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("render-index")
|
||||
.long("render-index")
|
||||
.help("Render index.html when requesting a directory"),
|
||||
Arg::new("allow-archive")
|
||||
.env("DUFS_ALLOW_ARCHIVE")
|
||||
.hide_env(true)
|
||||
.long("allow-archive")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("Allow download folders as archive file"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("render-spa")
|
||||
.long("render-spa")
|
||||
.help("Render for single-page application"),
|
||||
Arg::new("allow-hash")
|
||||
.env("DUFS_ALLOW_HASH")
|
||||
.hide_env(true)
|
||||
.long("allow-hash")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("Allow ?hash query to get file sha256 hash"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("auth")
|
||||
.short('a')
|
||||
.display_order(1)
|
||||
.long("auth")
|
||||
.help("Use HTTP authentication")
|
||||
.value_name("user:pass"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("no-auth-access")
|
||||
.display_order(1)
|
||||
.long("no-auth-access")
|
||||
.help("Not required auth when access static files"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("cors")
|
||||
.long("cors")
|
||||
Arg::new("enable-cors")
|
||||
.env("DUFS_ENABLE_CORS")
|
||||
.hide_env(true)
|
||||
.long("enable-cors")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("Enable CORS, sets `Access-Control-Allow-Origin: *`"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("render-index")
|
||||
.env("DUFS_RENDER_INDEX")
|
||||
.hide_env(true)
|
||||
.long("render-index")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("Serve index.html when requesting a directory, returns 404 if not found index.html"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("render-try-index")
|
||||
.env("DUFS_RENDER_TRY_INDEX")
|
||||
.hide_env(true)
|
||||
.long("render-try-index")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("Serve index.html when requesting a directory, returns directory listing if not found index.html"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("render-spa")
|
||||
.env("DUFS_RENDER_SPA")
|
||||
.hide_env(true)
|
||||
.long("render-spa")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("Serve SPA(Single Page Application)"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("assets")
|
||||
.env("DUFS_ASSETS")
|
||||
.hide_env(true)
|
||||
.long("assets")
|
||||
.help("Set the path to the assets directory for overriding the built-in assets")
|
||||
.value_parser(value_parser!(PathBuf))
|
||||
.value_name("path")
|
||||
)
|
||||
.arg(
|
||||
Arg::new("log-format")
|
||||
.env("DUFS_LOG_FORMAT")
|
||||
.hide_env(true)
|
||||
.long("log-format")
|
||||
.value_name("format")
|
||||
.help("Customize http log format"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("log-file")
|
||||
.env("DUFS_LOG_FILE")
|
||||
.hide_env(true)
|
||||
.long("log-file")
|
||||
.value_name("file")
|
||||
.value_parser(value_parser!(PathBuf))
|
||||
.help("Specify the file to save logs to, other than stdout/stderr"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("compress")
|
||||
.env("DUFS_COMPRESS")
|
||||
.hide_env(true)
|
||||
.value_parser(clap::builder::EnumValueParser::<Compress>::new())
|
||||
.long("compress")
|
||||
.value_name("level")
|
||||
.help("Set zip compress level [default: low]")
|
||||
)
|
||||
.arg(
|
||||
Arg::new("completions")
|
||||
.long("completions")
|
||||
.value_name("shell")
|
||||
.value_parser(value_parser!(Shell))
|
||||
.help("Print shell completion script for <shell>"),
|
||||
);
|
||||
|
||||
#[cfg(feature = "tls")]
|
||||
let app = app
|
||||
.arg(
|
||||
Arg::new("tls-cert")
|
||||
.env("DUFS_TLS_CERT")
|
||||
.hide_env(true)
|
||||
.long("tls-cert")
|
||||
.value_name("path")
|
||||
.value_parser(value_parser!(PathBuf))
|
||||
.help("Path to an SSL/TLS certificate to serve with HTTPS"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("tls-key")
|
||||
.env("DUFS_TLS_KEY")
|
||||
.hide_env(true)
|
||||
.long("tls-key")
|
||||
.value_name("path")
|
||||
.value_parser(value_parser!(PathBuf))
|
||||
.help("Path to the SSL/TLS certificate's private key"),
|
||||
)
|
||||
);
|
||||
|
||||
app
|
||||
}
|
||||
|
||||
pub fn matches() -> ArgMatches {
|
||||
app().get_matches()
|
||||
pub fn print_completions<G: Generator>(gen: G, cmd: &mut Command) {
|
||||
generate(gen, cmd, cmd.get_name().to_string(), &mut std::io::stdout());
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
#[derive(Debug, Deserialize, SmartDefault, PartialEq)]
|
||||
#[serde(default)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct Args {
|
||||
pub addr: SocketAddr,
|
||||
pub path: PathBuf,
|
||||
#[serde(default = "default_serve_path")]
|
||||
#[default(default_serve_path())]
|
||||
pub serve_path: PathBuf,
|
||||
#[serde(deserialize_with = "deserialize_bind_addrs")]
|
||||
#[serde(rename = "bind")]
|
||||
#[serde(default = "default_addrs")]
|
||||
#[default(default_addrs())]
|
||||
pub addrs: Vec<BindAddr>,
|
||||
#[serde(default = "default_port")]
|
||||
#[default(default_port())]
|
||||
pub port: u16,
|
||||
#[serde(skip)]
|
||||
pub path_is_file: bool,
|
||||
pub path_prefix: String,
|
||||
#[serde(skip)]
|
||||
pub uri_prefix: String,
|
||||
pub auth: Option<(String, String)>,
|
||||
pub no_auth_access: bool,
|
||||
#[serde(deserialize_with = "deserialize_string_or_vec")]
|
||||
pub hidden: Vec<String>,
|
||||
#[serde(deserialize_with = "deserialize_access_control")]
|
||||
pub auth: AccessControl,
|
||||
pub allow_all: bool,
|
||||
pub allow_upload: bool,
|
||||
pub allow_delete: bool,
|
||||
pub allow_search: bool,
|
||||
pub allow_symlink: bool,
|
||||
pub allow_archive: bool,
|
||||
pub allow_hash: bool,
|
||||
pub render_index: bool,
|
||||
pub render_spa: bool,
|
||||
pub cors: bool,
|
||||
pub tls: Option<(Vec<Certificate>, PrivateKey)>,
|
||||
pub render_try_index: bool,
|
||||
pub enable_cors: bool,
|
||||
pub assets: Option<PathBuf>,
|
||||
#[serde(deserialize_with = "deserialize_log_http")]
|
||||
#[serde(rename = "log-format")]
|
||||
pub http_logger: HttpLogger,
|
||||
pub log_file: Option<PathBuf>,
|
||||
pub compress: Compress,
|
||||
pub tls_cert: Option<PathBuf>,
|
||||
pub tls_key: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl Args {
|
||||
/// Parse command-line arguments.
|
||||
///
|
||||
/// If a parsing error ocurred, exit the process and print out informative
|
||||
/// If a parsing error occurred, exit the process and print out informative
|
||||
/// error message to user.
|
||||
pub fn parse(matches: ArgMatches) -> BoxResult<Args> {
|
||||
let ip = matches.value_of("address").unwrap_or_default();
|
||||
let port = matches.value_of_t::<u16>("port")?;
|
||||
let addr = to_addr(ip, port)?;
|
||||
let path = Args::parse_path(matches.value_of_os("path").unwrap_or_default())?;
|
||||
let path_prefix = matches
|
||||
.value_of("path-prefix")
|
||||
.map(|v| v.trim_matches('/').to_owned())
|
||||
.unwrap_or_default();
|
||||
let uri_prefix = if path_prefix.is_empty() {
|
||||
pub fn parse(matches: ArgMatches) -> Result<Args> {
|
||||
let mut args = Self::default();
|
||||
|
||||
if let Some(config_path) = matches.get_one::<PathBuf>("config") {
|
||||
let contents = std::fs::read_to_string(config_path)
|
||||
.with_context(|| format!("Failed to read config at {}", config_path.display()))?;
|
||||
args = serde_yaml::from_str(&contents)
|
||||
.with_context(|| format!("Failed to load config at {}", config_path.display()))?;
|
||||
}
|
||||
|
||||
if let Some(path) = matches.get_one::<PathBuf>("serve-path") {
|
||||
args.serve_path.clone_from(path)
|
||||
}
|
||||
|
||||
args.serve_path = Self::sanitize_path(args.serve_path)?;
|
||||
|
||||
if let Some(port) = matches.get_one::<u16>("port") {
|
||||
args.port = *port
|
||||
}
|
||||
|
||||
if let Some(addrs) = matches.get_many::<String>("bind") {
|
||||
let addrs: Vec<_> = addrs.map(|v| v.as_str()).collect();
|
||||
args.addrs = BindAddr::parse_addrs(&addrs)?;
|
||||
}
|
||||
|
||||
args.path_is_file = args.serve_path.metadata()?.is_file();
|
||||
if let Some(path_prefix) = matches.get_one::<String>("path-prefix") {
|
||||
args.path_prefix.clone_from(path_prefix)
|
||||
}
|
||||
args.path_prefix = args.path_prefix.trim_matches('/').to_string();
|
||||
|
||||
args.uri_prefix = if args.path_prefix.is_empty() {
|
||||
"/".to_owned()
|
||||
} else {
|
||||
format!("/{}/", &path_prefix)
|
||||
};
|
||||
let cors = matches.is_present("cors");
|
||||
let auth = match matches.value_of("auth") {
|
||||
Some(auth) => Some(parse_auth(auth)?),
|
||||
None => None,
|
||||
};
|
||||
let no_auth_access = matches.is_present("no-auth-access");
|
||||
let allow_upload = matches.is_present("allow-all") || matches.is_present("allow-upload");
|
||||
let allow_delete = matches.is_present("allow-all") || matches.is_present("allow-delete");
|
||||
let allow_symlink = matches.is_present("allow-all") || matches.is_present("allow-symlink");
|
||||
let render_index = matches.is_present("render-index");
|
||||
let render_spa = matches.is_present("render-spa");
|
||||
let tls = match (matches.value_of("tls-cert"), matches.value_of("tls-key")) {
|
||||
(Some(certs_file), Some(key_file)) => {
|
||||
let certs = load_certs(certs_file)?;
|
||||
let key = load_private_key(key_file)?;
|
||||
Some((certs, key))
|
||||
}
|
||||
_ => None,
|
||||
format!("/{}/", &encode_uri(&args.path_prefix))
|
||||
};
|
||||
|
||||
Ok(Args {
|
||||
addr,
|
||||
path,
|
||||
path_prefix,
|
||||
uri_prefix,
|
||||
auth,
|
||||
no_auth_access,
|
||||
cors,
|
||||
allow_delete,
|
||||
allow_upload,
|
||||
allow_symlink,
|
||||
render_index,
|
||||
render_spa,
|
||||
tls,
|
||||
})
|
||||
if let Some(hidden) = matches.get_many::<String>("hidden") {
|
||||
args.hidden = hidden.cloned().collect();
|
||||
} else {
|
||||
let mut hidden = vec![];
|
||||
std::mem::swap(&mut args.hidden, &mut hidden);
|
||||
args.hidden = hidden
|
||||
.into_iter()
|
||||
.flat_map(|v| v.split(',').map(|v| v.to_string()).collect::<Vec<String>>())
|
||||
.collect();
|
||||
}
|
||||
|
||||
if !args.enable_cors {
|
||||
args.enable_cors = matches.get_flag("enable-cors");
|
||||
}
|
||||
|
||||
if let Some(rules) = matches.get_many::<String>("auth") {
|
||||
let rules: Vec<_> = rules.map(|v| v.as_str()).collect();
|
||||
args.auth = AccessControl::new(&rules)?;
|
||||
}
|
||||
|
||||
if !args.allow_all {
|
||||
args.allow_all = matches.get_flag("allow-all");
|
||||
}
|
||||
|
||||
let allow_all = args.allow_all;
|
||||
|
||||
if !args.allow_upload {
|
||||
args.allow_upload = allow_all || matches.get_flag("allow-upload");
|
||||
}
|
||||
if !args.allow_delete {
|
||||
args.allow_delete = allow_all || matches.get_flag("allow-delete");
|
||||
}
|
||||
if !args.allow_search {
|
||||
args.allow_search = allow_all || matches.get_flag("allow-search");
|
||||
}
|
||||
if !args.allow_symlink {
|
||||
args.allow_symlink = allow_all || matches.get_flag("allow-symlink");
|
||||
}
|
||||
if !args.allow_hash {
|
||||
args.allow_hash = allow_all || matches.get_flag("allow-hash");
|
||||
}
|
||||
if !args.allow_archive {
|
||||
args.allow_archive = allow_all || matches.get_flag("allow-archive");
|
||||
}
|
||||
if !args.render_index {
|
||||
args.render_index = matches.get_flag("render-index");
|
||||
}
|
||||
|
||||
if !args.render_try_index {
|
||||
args.render_try_index = matches.get_flag("render-try-index");
|
||||
}
|
||||
|
||||
if !args.render_spa {
|
||||
args.render_spa = matches.get_flag("render-spa");
|
||||
}
|
||||
|
||||
if let Some(assets_path) = matches.get_one::<PathBuf>("assets") {
|
||||
args.assets = Some(assets_path.clone());
|
||||
}
|
||||
|
||||
if let Some(assets_path) = &args.assets {
|
||||
args.assets = Some(Args::sanitize_assets_path(assets_path)?);
|
||||
}
|
||||
|
||||
if let Some(log_format) = matches.get_one::<String>("log-format") {
|
||||
args.http_logger = log_format.parse()?;
|
||||
}
|
||||
|
||||
if let Some(log_file) = matches.get_one::<PathBuf>("log-file") {
|
||||
args.log_file = Some(log_file.clone());
|
||||
}
|
||||
|
||||
if let Some(compress) = matches.get_one::<Compress>("compress") {
|
||||
args.compress = *compress;
|
||||
}
|
||||
|
||||
#[cfg(feature = "tls")]
|
||||
{
|
||||
if let Some(tls_cert) = matches.get_one::<PathBuf>("tls-cert") {
|
||||
args.tls_cert = Some(tls_cert.clone())
|
||||
}
|
||||
|
||||
if let Some(tls_key) = matches.get_one::<PathBuf>("tls-key") {
|
||||
args.tls_key = Some(tls_key.clone())
|
||||
}
|
||||
|
||||
match (&args.tls_cert, &args.tls_key) {
|
||||
(Some(_), Some(_)) => {}
|
||||
(Some(_), _) => bail!("No tls-key set"),
|
||||
(_, Some(_)) => bail!("No tls-cert set"),
|
||||
(None, None) => {}
|
||||
}
|
||||
}
|
||||
#[cfg(not(feature = "tls"))]
|
||||
{
|
||||
args.tls_cert = None;
|
||||
args.tls_key = None;
|
||||
}
|
||||
|
||||
Ok(args)
|
||||
}
|
||||
|
||||
/// Parse path.
|
||||
fn parse_path<P: AsRef<Path>>(path: P) -> BoxResult<PathBuf> {
|
||||
fn sanitize_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
|
||||
let path = path.as_ref();
|
||||
if !path.exists() {
|
||||
return Err(format!("Path `{}` doesn't exist", path.display()).into());
|
||||
bail!("Path `{}` doesn't exist", path.display());
|
||||
}
|
||||
|
||||
env::current_dir()
|
||||
@@ -194,43 +459,300 @@ impl Args {
|
||||
p.push(path); // If path is absolute, it replaces the current path.
|
||||
std::fs::canonicalize(p)
|
||||
})
|
||||
.map_err(|err| format!("Failed to access path `{}`: {}", path.display(), err,).into())
|
||||
.with_context(|| format!("Failed to access path `{}`", path.display()))
|
||||
}
|
||||
|
||||
fn sanitize_assets_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
|
||||
let path = Self::sanitize_path(path)?;
|
||||
if !path.join("index.html").exists() {
|
||||
bail!("Path `{}` doesn't contains index.html", path.display());
|
||||
}
|
||||
Ok(path)
|
||||
}
|
||||
}
|
||||
|
||||
fn to_addr(ip: &str, port: u16) -> BoxResult<SocketAddr> {
|
||||
let ip: IpAddr = ip.parse()?;
|
||||
Ok(SocketAddr::new(ip, port))
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum BindAddr {
|
||||
IpAddr(IpAddr),
|
||||
#[cfg(unix)]
|
||||
SocketPath(String),
|
||||
}
|
||||
|
||||
// Load public certificate from file.
|
||||
fn load_certs(filename: &str) -> BoxResult<Vec<Certificate>> {
|
||||
// Open certificate file.
|
||||
let certfile =
|
||||
fs::File::open(&filename).map_err(|e| format!("Failed to open {}: {}", &filename, e))?;
|
||||
let mut reader = io::BufReader::new(certfile);
|
||||
|
||||
// Load and return certificate.
|
||||
let certs = rustls_pemfile::certs(&mut reader).map_err(|_| "Failed to load certificate")?;
|
||||
if certs.is_empty() {
|
||||
return Err("Expected at least one certificate".into());
|
||||
impl BindAddr {
|
||||
fn parse_addrs(addrs: &[&str]) -> Result<Vec<Self>> {
|
||||
let mut bind_addrs = vec![];
|
||||
#[cfg(not(unix))]
|
||||
let mut invalid_addrs = vec![];
|
||||
for addr in addrs {
|
||||
match addr.parse::<IpAddr>() {
|
||||
Ok(v) => {
|
||||
bind_addrs.push(BindAddr::IpAddr(v));
|
||||
}
|
||||
Err(_) => {
|
||||
#[cfg(unix)]
|
||||
bind_addrs.push(BindAddr::SocketPath(addr.to_string()));
|
||||
#[cfg(not(unix))]
|
||||
invalid_addrs.push(*addr);
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(not(unix))]
|
||||
if !invalid_addrs.is_empty() {
|
||||
bail!("Invalid bind address `{}`", invalid_addrs.join(","));
|
||||
}
|
||||
Ok(bind_addrs)
|
||||
}
|
||||
Ok(certs.into_iter().map(Certificate).collect())
|
||||
}
|
||||
|
||||
// Load private key from file.
|
||||
fn load_private_key(filename: &str) -> BoxResult<PrivateKey> {
|
||||
// Open keyfile.
|
||||
let keyfile =
|
||||
fs::File::open(&filename).map_err(|e| format!("Failed to open {}: {}", &filename, e))?;
|
||||
let mut reader = io::BufReader::new(keyfile);
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Deserialize, Default)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum Compress {
|
||||
None,
|
||||
#[default]
|
||||
Low,
|
||||
Medium,
|
||||
High,
|
||||
}
|
||||
|
||||
// Load and return a single private key.
|
||||
let keys = rustls_pemfile::rsa_private_keys(&mut reader)
|
||||
.map_err(|e| format!("There was a problem with reading private key: {:?}", e))?;
|
||||
|
||||
if keys.len() != 1 {
|
||||
return Err("Expected a single private key".into());
|
||||
impl ValueEnum for Compress {
|
||||
fn value_variants<'a>() -> &'a [Self] {
|
||||
&[Self::None, Self::Low, Self::Medium, Self::High]
|
||||
}
|
||||
|
||||
fn to_possible_value(&self) -> Option<clap::builder::PossibleValue> {
|
||||
Some(match self {
|
||||
Compress::None => PossibleValue::new("none"),
|
||||
Compress::Low => PossibleValue::new("low"),
|
||||
Compress::Medium => PossibleValue::new("medium"),
|
||||
Compress::High => PossibleValue::new("high"),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Compress {
|
||||
pub fn to_compression(self) -> Compression {
|
||||
match self {
|
||||
Compress::None => Compression::Stored,
|
||||
Compress::Low => Compression::Deflate,
|
||||
Compress::Medium => Compression::Bz,
|
||||
Compress::High => Compression::Xz,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_bind_addrs<'de, D>(deserializer: D) -> Result<Vec<BindAddr>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
struct StringOrVec;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for StringOrVec {
|
||||
type Value = Vec<BindAddr>;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("string or list of strings")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
BindAddr::parse_addrs(&[s]).map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
fn visit_seq<S>(self, seq: S) -> Result<Self::Value, S::Error>
|
||||
where
|
||||
S: serde::de::SeqAccess<'de>,
|
||||
{
|
||||
let addrs: Vec<&'de str> =
|
||||
Deserialize::deserialize(serde::de::value::SeqAccessDeserializer::new(seq))?;
|
||||
BindAddr::parse_addrs(&addrs).map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_any(StringOrVec)
|
||||
}
|
||||
|
||||
fn deserialize_string_or_vec<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
struct StringOrVec;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for StringOrVec {
|
||||
type Value = Vec<String>;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("string or list of strings")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(vec![s.to_owned()])
|
||||
}
|
||||
|
||||
fn visit_seq<S>(self, seq: S) -> Result<Self::Value, S::Error>
|
||||
where
|
||||
S: serde::de::SeqAccess<'de>,
|
||||
{
|
||||
Deserialize::deserialize(serde::de::value::SeqAccessDeserializer::new(seq))
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_any(StringOrVec)
|
||||
}
|
||||
|
||||
fn deserialize_access_control<'de, D>(deserializer: D) -> Result<AccessControl, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let rules: Vec<&str> = Vec::deserialize(deserializer)?;
|
||||
AccessControl::new(&rules).map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
fn deserialize_log_http<'de, D>(deserializer: D) -> Result<HttpLogger, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let value: String = Deserialize::deserialize(deserializer)?;
|
||||
value.parse().map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
fn default_serve_path() -> PathBuf {
|
||||
PathBuf::from(".")
|
||||
}
|
||||
|
||||
fn default_addrs() -> Vec<BindAddr> {
|
||||
BindAddr::parse_addrs(&["0.0.0.0", "::"]).unwrap()
|
||||
}
|
||||
|
||||
fn default_port() -> u16 {
|
||||
5000
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use assert_fs::prelude::*;
|
||||
|
||||
#[test]
|
||||
fn test_default() {
|
||||
let cli = build_cli();
|
||||
let matches = cli.try_get_matches_from(vec![""]).unwrap();
|
||||
let args = Args::parse(matches).unwrap();
|
||||
let cwd = Args::sanitize_path(std::env::current_dir().unwrap()).unwrap();
|
||||
assert_eq!(args.serve_path, cwd);
|
||||
assert_eq!(args.port, default_port());
|
||||
assert_eq!(args.addrs, default_addrs());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_args_from_cli1() {
|
||||
let tmpdir = assert_fs::TempDir::new().unwrap();
|
||||
let cli = build_cli();
|
||||
let matches = cli
|
||||
.try_get_matches_from(vec![
|
||||
"",
|
||||
"--hidden",
|
||||
"tmp,*.log,*.lock",
|
||||
&tmpdir.to_string_lossy(),
|
||||
])
|
||||
.unwrap();
|
||||
let args = Args::parse(matches).unwrap();
|
||||
assert_eq!(args.serve_path, Args::sanitize_path(&tmpdir).unwrap());
|
||||
assert_eq!(args.hidden, ["tmp", "*.log", "*.lock"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_args_from_cli2() {
|
||||
let cli = build_cli();
|
||||
let matches = cli
|
||||
.try_get_matches_from(vec![
|
||||
"", "--hidden", "tmp", "--hidden", "*.log", "--hidden", "*.lock",
|
||||
])
|
||||
.unwrap();
|
||||
let args = Args::parse(matches).unwrap();
|
||||
assert_eq!(args.hidden, ["tmp", "*.log", "*.lock"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_args_from_empty_config_file() {
|
||||
let tmpdir = assert_fs::TempDir::new().unwrap();
|
||||
let config_file = tmpdir.child("config.yaml");
|
||||
config_file.write_str("").unwrap();
|
||||
|
||||
let cli = build_cli();
|
||||
let matches = cli
|
||||
.try_get_matches_from(vec!["", "-c", &config_file.to_string_lossy()])
|
||||
.unwrap();
|
||||
let args = Args::parse(matches).unwrap();
|
||||
let cwd = Args::sanitize_path(std::env::current_dir().unwrap()).unwrap();
|
||||
assert_eq!(args.serve_path, cwd);
|
||||
assert_eq!(args.port, default_port());
|
||||
assert_eq!(args.addrs, default_addrs());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_args_from_config_file1() {
|
||||
let tmpdir = assert_fs::TempDir::new().unwrap();
|
||||
let config_file = tmpdir.child("config.yaml");
|
||||
let contents = format!(
|
||||
r#"
|
||||
serve-path: {}
|
||||
bind: 0.0.0.0
|
||||
port: 3000
|
||||
allow-upload: true
|
||||
hidden: tmp,*.log,*.lock
|
||||
"#,
|
||||
tmpdir.display()
|
||||
);
|
||||
config_file.write_str(&contents).unwrap();
|
||||
|
||||
let cli = build_cli();
|
||||
let matches = cli
|
||||
.try_get_matches_from(vec!["", "-c", &config_file.to_string_lossy()])
|
||||
.unwrap();
|
||||
let args = Args::parse(matches).unwrap();
|
||||
assert_eq!(args.serve_path, Args::sanitize_path(&tmpdir).unwrap());
|
||||
assert_eq!(
|
||||
args.addrs,
|
||||
vec![BindAddr::IpAddr("0.0.0.0".parse().unwrap())]
|
||||
);
|
||||
assert_eq!(args.hidden, ["tmp", "*.log", "*.lock"]);
|
||||
assert_eq!(args.port, 3000);
|
||||
assert!(args.allow_upload);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_args_from_config_file2() {
|
||||
let tmpdir = assert_fs::TempDir::new().unwrap();
|
||||
let config_file = tmpdir.child("config.yaml");
|
||||
let contents = r#"
|
||||
bind:
|
||||
- 127.0.0.1
|
||||
- 192.168.8.10
|
||||
hidden:
|
||||
- tmp
|
||||
- '*.log'
|
||||
- '*.lock'
|
||||
"#;
|
||||
config_file.write_str(contents).unwrap();
|
||||
|
||||
let cli = build_cli();
|
||||
let matches = cli
|
||||
.try_get_matches_from(vec!["", "-c", &config_file.to_string_lossy()])
|
||||
.unwrap();
|
||||
let args = Args::parse(matches).unwrap();
|
||||
assert_eq!(
|
||||
args.addrs,
|
||||
vec![
|
||||
BindAddr::IpAddr("127.0.0.1".parse().unwrap()),
|
||||
BindAddr::IpAddr("192.168.8.10".parse().unwrap())
|
||||
]
|
||||
);
|
||||
assert_eq!(args.hidden, ["tmp", "*.log", "*.lock"]);
|
||||
}
|
||||
Ok(PrivateKey(keys[0].to_owned()))
|
||||
}
|
||||
|
||||
702
src/auth.rs
702
src/auth.rs
@@ -1,15 +1,23 @@
|
||||
use crate::{args::Args, server::Response, utils::unix_now};
|
||||
|
||||
use anyhow::{anyhow, bail, Result};
|
||||
use base64::{engine::general_purpose::STANDARD, Engine as _};
|
||||
use ed25519_dalek::{ed25519::signature::SignerMut, Signature, SigningKey};
|
||||
use headers::HeaderValue;
|
||||
use hyper::{header::WWW_AUTHENTICATE, Method};
|
||||
use indexmap::IndexMap;
|
||||
use lazy_static::lazy_static;
|
||||
use md5::Context;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
time::{SystemTime, UNIX_EPOCH},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::BoxResult;
|
||||
|
||||
const REALM: &str = "DUF";
|
||||
const REALM: &str = "DUFS";
|
||||
const DIGEST_AUTH_TIMEOUT: u32 = 60 * 60 * 24 * 7; // 7 days
|
||||
const TOKEN_EXPIRATION: u64 = 1000 * 60 * 60 * 24 * 3; // 3 days
|
||||
|
||||
lazy_static! {
|
||||
static ref NONCESTARTHASH: Context = {
|
||||
@@ -20,129 +28,505 @@ lazy_static! {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn generate_www_auth(stale: bool) -> String {
|
||||
let str_stale = if stale { "stale=true," } else { "" };
|
||||
format!(
|
||||
"Digest realm=\"{}\",nonce=\"{}\",{}qop=\"auth\",algorithm=\"MD5\"",
|
||||
REALM,
|
||||
create_nonce(),
|
||||
str_stale
|
||||
)
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct AccessControl {
|
||||
empty: bool,
|
||||
use_hashed_password: bool,
|
||||
users: IndexMap<String, (String, AccessPaths)>,
|
||||
anonymous: Option<AccessPaths>,
|
||||
}
|
||||
|
||||
pub fn parse_auth(auth: &str) -> BoxResult<(String, String)> {
|
||||
let p: Vec<&str> = auth.trim().split(':').collect();
|
||||
let err = "Invalid auth value";
|
||||
if p.len() != 2 {
|
||||
return Err(err.into());
|
||||
impl Default for AccessControl {
|
||||
fn default() -> Self {
|
||||
AccessControl {
|
||||
empty: true,
|
||||
use_hashed_password: false,
|
||||
users: IndexMap::new(),
|
||||
anonymous: Some(AccessPaths::new(AccessPerm::ReadWrite)),
|
||||
}
|
||||
}
|
||||
let user = p[0];
|
||||
let pass = p[1];
|
||||
let mut h = Context::new();
|
||||
h.consume(format!("{}:{}:{}", user, REALM, pass).as_bytes());
|
||||
Ok((user.to_owned(), format!("{:x}", h.compute())))
|
||||
}
|
||||
|
||||
pub fn valid_digest(
|
||||
header_value: &HeaderValue,
|
||||
impl AccessControl {
|
||||
pub fn new(raw_rules: &[&str]) -> Result<Self> {
|
||||
if raw_rules.is_empty() {
|
||||
return Ok(Self::default());
|
||||
}
|
||||
let new_raw_rules = split_rules(raw_rules);
|
||||
let mut use_hashed_password = false;
|
||||
let mut annoy_paths = None;
|
||||
let mut account_paths_pairs = vec![];
|
||||
for rule in &new_raw_rules {
|
||||
let (account, paths) =
|
||||
split_account_paths(rule).ok_or_else(|| anyhow!("Invalid auth `{rule}`"))?;
|
||||
if account.is_empty() {
|
||||
if annoy_paths.is_some() {
|
||||
bail!("Invalid auth, no duplicate anonymous rules");
|
||||
}
|
||||
annoy_paths = Some(paths)
|
||||
} else if let Some((user, pass)) = account.split_once(':') {
|
||||
if user.is_empty() || pass.is_empty() {
|
||||
bail!("Invalid auth `{rule}`");
|
||||
}
|
||||
account_paths_pairs.push((user, pass, paths));
|
||||
}
|
||||
}
|
||||
let mut anonymous = None;
|
||||
if let Some(paths) = annoy_paths {
|
||||
let mut access_paths = AccessPaths::default();
|
||||
access_paths
|
||||
.merge(paths)
|
||||
.ok_or_else(|| anyhow!("Invalid auth value `@{paths}"))?;
|
||||
anonymous = Some(access_paths);
|
||||
}
|
||||
let mut users = IndexMap::new();
|
||||
for (user, pass, paths) in account_paths_pairs.into_iter() {
|
||||
let mut access_paths = AccessPaths::default();
|
||||
access_paths
|
||||
.merge(paths)
|
||||
.ok_or_else(|| anyhow!("Invalid auth value `{user}:{pass}@{paths}"))?;
|
||||
if let Some(paths) = annoy_paths {
|
||||
access_paths.merge(paths);
|
||||
}
|
||||
if pass.starts_with("$6$") {
|
||||
use_hashed_password = true;
|
||||
}
|
||||
users.insert(user.to_string(), (pass.to_string(), access_paths));
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
empty: false,
|
||||
use_hashed_password,
|
||||
users,
|
||||
anonymous,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn has_users(&self) -> bool {
|
||||
!self.users.is_empty()
|
||||
}
|
||||
|
||||
pub fn guard(
|
||||
&self,
|
||||
path: &str,
|
||||
method: &Method,
|
||||
authorization: Option<&HeaderValue>,
|
||||
token: Option<&String>,
|
||||
guard_options: bool,
|
||||
) -> (Option<String>, Option<AccessPaths>) {
|
||||
if self.empty {
|
||||
return (None, Some(AccessPaths::new(AccessPerm::ReadWrite)));
|
||||
}
|
||||
|
||||
if method == Method::GET {
|
||||
if let Some(token) = token {
|
||||
if let Ok((user, ap)) = self.verify_token(token, path) {
|
||||
return (Some(user), ap.guard(path, method));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(authorization) = authorization {
|
||||
if let Some(user) = get_auth_user(authorization) {
|
||||
if let Some((pass, ap)) = self.users.get(&user) {
|
||||
if method == Method::OPTIONS {
|
||||
return (Some(user), Some(AccessPaths::new(AccessPerm::ReadOnly)));
|
||||
}
|
||||
if check_auth(authorization, method.as_str(), &user, pass).is_some() {
|
||||
return (Some(user), ap.guard(path, method));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return (None, None);
|
||||
}
|
||||
|
||||
if !guard_options && method == Method::OPTIONS {
|
||||
return (None, Some(AccessPaths::new(AccessPerm::ReadOnly)));
|
||||
}
|
||||
|
||||
if let Some(ap) = self.anonymous.as_ref() {
|
||||
return (None, ap.guard(path, method));
|
||||
}
|
||||
|
||||
(None, None)
|
||||
}
|
||||
|
||||
pub fn generate_token(&self, path: &str, user: &str) -> Result<String> {
|
||||
let (pass, _) = self
|
||||
.users
|
||||
.get(user)
|
||||
.ok_or_else(|| anyhow!("Not found user '{user}'"))?;
|
||||
let exp = unix_now().as_millis() as u64 + TOKEN_EXPIRATION;
|
||||
let message = format!("{path}:{exp}");
|
||||
let mut signing_key = derive_secret_key(user, pass);
|
||||
let sig = signing_key.sign(message.as_bytes()).to_bytes();
|
||||
|
||||
let mut raw = Vec::with_capacity(64 + 8 + user.len());
|
||||
raw.extend_from_slice(&sig);
|
||||
raw.extend_from_slice(&exp.to_be_bytes());
|
||||
raw.extend_from_slice(user.as_bytes());
|
||||
|
||||
Ok(hex::encode(raw))
|
||||
}
|
||||
|
||||
fn verify_token<'a>(&'a self, token: &str, path: &str) -> Result<(String, &'a AccessPaths)> {
|
||||
let raw = hex::decode(token)?;
|
||||
|
||||
if raw.len() < 72 {
|
||||
bail!("Invalid token");
|
||||
}
|
||||
|
||||
let sig_bytes = &raw[..64];
|
||||
let exp_bytes = &raw[64..72];
|
||||
let user_bytes = &raw[72..];
|
||||
|
||||
let exp = u64::from_be_bytes(exp_bytes.try_into()?);
|
||||
if unix_now().as_millis() as u64 > exp {
|
||||
bail!("Token expired");
|
||||
}
|
||||
|
||||
let user = std::str::from_utf8(user_bytes)?;
|
||||
let (pass, ap) = self
|
||||
.users
|
||||
.get(user)
|
||||
.ok_or_else(|| anyhow!("Not found user '{user}'"))?;
|
||||
|
||||
let sig = Signature::from_bytes(&<[u8; 64]>::try_from(sig_bytes)?);
|
||||
|
||||
let message = format!("{path}:{exp}");
|
||||
derive_secret_key(user, pass).verify(message.as_bytes(), &sig)?;
|
||||
Ok((user.to_string(), ap))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||
pub struct AccessPaths {
|
||||
perm: AccessPerm,
|
||||
children: IndexMap<String, AccessPaths>,
|
||||
}
|
||||
|
||||
impl AccessPaths {
|
||||
pub fn new(perm: AccessPerm) -> Self {
|
||||
Self {
|
||||
perm,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn perm(&self) -> AccessPerm {
|
||||
self.perm
|
||||
}
|
||||
|
||||
pub fn set_perm(&mut self, perm: AccessPerm) {
|
||||
if self.perm < perm {
|
||||
self.perm = perm;
|
||||
self.recursively_purge_children(perm);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn merge(&mut self, paths: &str) -> Option<()> {
|
||||
for item in paths.trim_matches(',').split(',') {
|
||||
let (path, perm) = match item.split_once(':') {
|
||||
None => (item, AccessPerm::ReadOnly),
|
||||
Some((path, "ro")) => (path, AccessPerm::ReadOnly),
|
||||
Some((path, "rw")) => (path, AccessPerm::ReadWrite),
|
||||
_ => return None,
|
||||
};
|
||||
self.add(path, perm);
|
||||
}
|
||||
Some(())
|
||||
}
|
||||
|
||||
pub fn guard(&self, path: &str, method: &Method) -> Option<Self> {
|
||||
let target = self.find(path)?;
|
||||
if !is_readonly_method(method) && !target.perm().readwrite() {
|
||||
return None;
|
||||
}
|
||||
Some(target)
|
||||
}
|
||||
|
||||
fn recursively_purge_children(&mut self, perm: AccessPerm) {
|
||||
self.children.retain(|_, child| {
|
||||
if child.perm <= perm {
|
||||
false
|
||||
} else {
|
||||
child.recursively_purge_children(perm);
|
||||
true
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn add(&mut self, path: &str, perm: AccessPerm) {
|
||||
let path = path.trim_matches('/');
|
||||
if path.is_empty() {
|
||||
self.set_perm(perm);
|
||||
} else {
|
||||
let parts: Vec<&str> = path.split('/').collect();
|
||||
self.add_impl(&parts, perm);
|
||||
}
|
||||
}
|
||||
|
||||
fn add_impl(&mut self, parts: &[&str], perm: AccessPerm) {
|
||||
let parts_len = parts.len();
|
||||
if parts_len == 0 {
|
||||
self.set_perm(perm);
|
||||
return;
|
||||
}
|
||||
if self.perm >= perm {
|
||||
return;
|
||||
}
|
||||
let child = self.children.entry(parts[0].to_string()).or_default();
|
||||
child.add_impl(&parts[1..], perm)
|
||||
}
|
||||
|
||||
pub fn find(&self, path: &str) -> Option<AccessPaths> {
|
||||
let parts: Vec<&str> = path
|
||||
.trim_matches('/')
|
||||
.split('/')
|
||||
.filter(|v| !v.is_empty())
|
||||
.collect();
|
||||
self.find_impl(&parts, self.perm)
|
||||
}
|
||||
|
||||
fn find_impl(&self, parts: &[&str], perm: AccessPerm) -> Option<AccessPaths> {
|
||||
let perm = if !self.perm.indexonly() {
|
||||
self.perm
|
||||
} else {
|
||||
perm
|
||||
};
|
||||
if parts.is_empty() {
|
||||
if perm.indexonly() {
|
||||
return Some(self.clone());
|
||||
} else {
|
||||
return Some(AccessPaths::new(perm));
|
||||
}
|
||||
}
|
||||
let child = match self.children.get(parts[0]) {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
if perm.indexonly() {
|
||||
return None;
|
||||
} else {
|
||||
return Some(AccessPaths::new(perm));
|
||||
}
|
||||
}
|
||||
};
|
||||
child.find_impl(&parts[1..], perm)
|
||||
}
|
||||
|
||||
pub fn child_names(&self) -> Vec<&String> {
|
||||
self.children.keys().collect()
|
||||
}
|
||||
|
||||
pub fn entry_paths(&self, base: &Path) -> Vec<PathBuf> {
|
||||
if !self.perm().indexonly() {
|
||||
return vec![base.to_path_buf()];
|
||||
}
|
||||
let mut output = vec![];
|
||||
self.entry_paths_impl(&mut output, base);
|
||||
output
|
||||
}
|
||||
|
||||
fn entry_paths_impl(&self, output: &mut Vec<PathBuf>, base: &Path) {
|
||||
for (name, child) in self.children.iter() {
|
||||
let base = base.join(name);
|
||||
if child.perm().indexonly() {
|
||||
child.entry_paths_impl(output, &base);
|
||||
} else {
|
||||
output.push(base)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
|
||||
pub enum AccessPerm {
|
||||
#[default]
|
||||
IndexOnly,
|
||||
ReadOnly,
|
||||
ReadWrite,
|
||||
}
|
||||
|
||||
impl AccessPerm {
|
||||
pub fn indexonly(&self) -> bool {
|
||||
self == &AccessPerm::IndexOnly
|
||||
}
|
||||
|
||||
pub fn readwrite(&self) -> bool {
|
||||
self == &AccessPerm::ReadWrite
|
||||
}
|
||||
}
|
||||
|
||||
pub fn www_authenticate(res: &mut Response, args: &Args) -> Result<()> {
|
||||
if args.auth.use_hashed_password {
|
||||
let basic = HeaderValue::from_str(&format!("Basic realm=\"{REALM}\""))?;
|
||||
res.headers_mut().insert(WWW_AUTHENTICATE, basic);
|
||||
} else {
|
||||
let nonce = create_nonce()?;
|
||||
let digest = HeaderValue::from_str(&format!(
|
||||
"Digest realm=\"{REALM}\", nonce=\"{nonce}\", qop=\"auth\""
|
||||
))?;
|
||||
let basic = HeaderValue::from_str(&format!("Basic realm=\"{REALM}\""))?;
|
||||
res.headers_mut().append(WWW_AUTHENTICATE, digest);
|
||||
res.headers_mut().append(WWW_AUTHENTICATE, basic);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_auth_user(authorization: &HeaderValue) -> Option<String> {
|
||||
if let Some(value) = strip_prefix(authorization.as_bytes(), b"Basic ") {
|
||||
let value: Vec<u8> = STANDARD.decode(value).ok()?;
|
||||
let parts: Vec<&str> = std::str::from_utf8(&value).ok()?.split(':').collect();
|
||||
Some(parts[0].to_string())
|
||||
} else if let Some(value) = strip_prefix(authorization.as_bytes(), b"Digest ") {
|
||||
let digest_map = to_headermap(value).ok()?;
|
||||
let username = digest_map.get(b"username".as_ref())?;
|
||||
std::str::from_utf8(username).map(|v| v.to_string()).ok()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check_auth(
|
||||
authorization: &HeaderValue,
|
||||
method: &str,
|
||||
auth_user: &str,
|
||||
auth_pass: &str,
|
||||
) -> Option<()> {
|
||||
let digest_value = strip_prefix(header_value.as_bytes(), b"Digest ")?;
|
||||
let user_vals = to_headermap(digest_value).ok()?;
|
||||
if let (Some(username), Some(nonce), Some(user_response)) = (
|
||||
user_vals
|
||||
.get(b"username".as_ref())
|
||||
.and_then(|b| std::str::from_utf8(*b).ok()),
|
||||
user_vals.get(b"nonce".as_ref()),
|
||||
user_vals.get(b"response".as_ref()),
|
||||
) {
|
||||
match validate_nonce(nonce) {
|
||||
Ok(true) => {}
|
||||
_ => return None,
|
||||
}
|
||||
if auth_user != username {
|
||||
if let Some(value) = strip_prefix(authorization.as_bytes(), b"Basic ") {
|
||||
let value: Vec<u8> = STANDARD.decode(value).ok()?;
|
||||
let (user, pass) = std::str::from_utf8(&value).ok()?.split_once(':')?;
|
||||
|
||||
if user != auth_user {
|
||||
return None;
|
||||
}
|
||||
let mut ha = Context::new();
|
||||
ha.consume(method);
|
||||
ha.consume(b":");
|
||||
if let Some(uri) = user_vals.get(b"uri".as_ref()) {
|
||||
ha.consume(uri);
|
||||
|
||||
if auth_pass.starts_with("$6$") {
|
||||
if let Ok(()) = sha_crypt::sha512_check(pass, auth_pass) {
|
||||
return Some(());
|
||||
}
|
||||
} else if pass == auth_pass {
|
||||
return Some(());
|
||||
}
|
||||
let ha = format!("{:x}", ha.compute());
|
||||
let mut correct_response = None;
|
||||
if let Some(qop) = user_vals.get(b"qop".as_ref()) {
|
||||
if qop == &b"auth".as_ref() || qop == &b"auth-int".as_ref() {
|
||||
correct_response = Some({
|
||||
|
||||
None
|
||||
} else if let Some(value) = strip_prefix(authorization.as_bytes(), b"Digest ") {
|
||||
let digest_map = to_headermap(value).ok()?;
|
||||
if let (Some(username), Some(nonce), Some(user_response)) = (
|
||||
digest_map
|
||||
.get(b"username".as_ref())
|
||||
.and_then(|b| std::str::from_utf8(b).ok()),
|
||||
digest_map.get(b"nonce".as_ref()),
|
||||
digest_map.get(b"response".as_ref()),
|
||||
) {
|
||||
match validate_nonce(nonce) {
|
||||
Ok(true) => {}
|
||||
_ => return None,
|
||||
}
|
||||
if auth_user != username {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut h = Context::new();
|
||||
h.consume(format!("{auth_user}:{REALM}:{auth_pass}").as_bytes());
|
||||
let auth_pass = format!("{:x}", h.finalize());
|
||||
|
||||
let mut ha = Context::new();
|
||||
ha.consume(method);
|
||||
ha.consume(b":");
|
||||
if let Some(uri) = digest_map.get(b"uri".as_ref()) {
|
||||
ha.consume(uri);
|
||||
}
|
||||
let ha = format!("{:x}", ha.finalize());
|
||||
let mut correct_response = None;
|
||||
if let Some(qop) = digest_map.get(b"qop".as_ref()) {
|
||||
if qop == &b"auth".as_ref() || qop == &b"auth-int".as_ref() {
|
||||
correct_response = Some({
|
||||
let mut c = Context::new();
|
||||
c.consume(&auth_pass);
|
||||
c.consume(b":");
|
||||
c.consume(nonce);
|
||||
c.consume(b":");
|
||||
if let Some(nc) = digest_map.get(b"nc".as_ref()) {
|
||||
c.consume(nc);
|
||||
}
|
||||
c.consume(b":");
|
||||
if let Some(cnonce) = digest_map.get(b"cnonce".as_ref()) {
|
||||
c.consume(cnonce);
|
||||
}
|
||||
c.consume(b":");
|
||||
c.consume(qop);
|
||||
c.consume(b":");
|
||||
c.consume(&*ha);
|
||||
format!("{:x}", c.finalize())
|
||||
});
|
||||
}
|
||||
}
|
||||
let correct_response = match correct_response {
|
||||
Some(r) => r,
|
||||
None => {
|
||||
let mut c = Context::new();
|
||||
c.consume(&auth_pass);
|
||||
c.consume(b":");
|
||||
c.consume(nonce);
|
||||
c.consume(b":");
|
||||
if let Some(nc) = user_vals.get(b"nc".as_ref()) {
|
||||
c.consume(nc);
|
||||
}
|
||||
c.consume(b":");
|
||||
if let Some(cnonce) = user_vals.get(b"cnonce".as_ref()) {
|
||||
c.consume(cnonce);
|
||||
}
|
||||
c.consume(b":");
|
||||
c.consume(qop);
|
||||
c.consume(b":");
|
||||
c.consume(&*ha);
|
||||
format!("{:x}", c.compute())
|
||||
});
|
||||
format!("{:x}", c.finalize())
|
||||
}
|
||||
};
|
||||
if correct_response.as_bytes() == *user_response {
|
||||
return Some(());
|
||||
}
|
||||
}
|
||||
let correct_response = match correct_response {
|
||||
Some(r) => r,
|
||||
None => {
|
||||
let mut c = Context::new();
|
||||
c.consume(&auth_pass);
|
||||
c.consume(b":");
|
||||
c.consume(nonce);
|
||||
c.consume(b":");
|
||||
c.consume(&*ha);
|
||||
format!("{:x}", c.compute())
|
||||
}
|
||||
};
|
||||
if correct_response.as_bytes() == *user_response {
|
||||
// grant access
|
||||
return Some(());
|
||||
}
|
||||
None
|
||||
} else {
|
||||
None
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn derive_secret_key(user: &str, pass: &str) -> SigningKey {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(format!("{user}:{pass}").as_bytes());
|
||||
let hash = hasher.finalize();
|
||||
SigningKey::from_bytes(&hash.into())
|
||||
}
|
||||
|
||||
/// Check if a nonce is still valid.
|
||||
/// Return an error if it was never valid
|
||||
fn validate_nonce(nonce: &[u8]) -> Result<bool, ()> {
|
||||
fn validate_nonce(nonce: &[u8]) -> Result<bool> {
|
||||
if nonce.len() != 34 {
|
||||
return Err(());
|
||||
bail!("invalid nonce");
|
||||
}
|
||||
//parse hex
|
||||
if let Ok(n) = std::str::from_utf8(nonce) {
|
||||
//get time
|
||||
if let Ok(secs_nonce) = u32::from_str_radix(&n[..8], 16) {
|
||||
//check time
|
||||
let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
|
||||
let now = unix_now();
|
||||
let secs_now = now.as_secs() as u32;
|
||||
|
||||
if let Some(dur) = secs_now.checked_sub(secs_nonce) {
|
||||
//check hash
|
||||
let mut h = NONCESTARTHASH.clone();
|
||||
h.consume(secs_nonce.to_be_bytes());
|
||||
let h = format!("{:x}", h.compute());
|
||||
let h = format!("{:x}", h.finalize());
|
||||
if h[..26] == n[8..34] {
|
||||
return Ok(dur < 300); // from the last 5min
|
||||
//Authentication-Info ?
|
||||
return Ok(dur < DIGEST_AUTH_TIMEOUT);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(())
|
||||
bail!("invalid nonce");
|
||||
}
|
||||
|
||||
fn is_readonly_method(method: &Method) -> bool {
|
||||
method == Method::GET
|
||||
|| method == Method::OPTIONS
|
||||
|| method == Method::HEAD
|
||||
|| method.as_str() == "PROPFIND"
|
||||
|| method.as_str() == "CHECKAUTH"
|
||||
|| method.as_str() == "LOGOUT"
|
||||
}
|
||||
|
||||
fn strip_prefix<'a>(search: &'a [u8], prefix: &[u8]) -> Option<&'a [u8]> {
|
||||
@@ -159,12 +543,12 @@ fn strip_prefix<'a>(search: &'a [u8], prefix: &[u8]) -> Option<&'a [u8]> {
|
||||
|
||||
fn to_headermap(header: &[u8]) -> Result<HashMap<&[u8], &[u8]>, ()> {
|
||||
let mut sep = Vec::new();
|
||||
let mut asign = Vec::new();
|
||||
let mut assign = Vec::new();
|
||||
let mut i: usize = 0;
|
||||
let mut esc = false;
|
||||
for c in header {
|
||||
match (c, esc) {
|
||||
(b'=', false) => asign.push(i),
|
||||
(b'=', false) => assign.push(i),
|
||||
(b',', false) => sep.push(i),
|
||||
(b'"', false) => esc = true,
|
||||
(b'"', true) => esc = false,
|
||||
@@ -172,16 +556,16 @@ fn to_headermap(header: &[u8]) -> Result<HashMap<&[u8], &[u8]>, ()> {
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
sep.push(i); // same len for both Vecs
|
||||
sep.push(i);
|
||||
|
||||
i = 0;
|
||||
let mut ret = HashMap::new();
|
||||
for (&k, &a) in sep.iter().zip(asign.iter()) {
|
||||
for (&k, &a) in sep.iter().zip(assign.iter()) {
|
||||
while header[i] == b' ' {
|
||||
i += 1;
|
||||
}
|
||||
if a <= i || k <= 1 + a {
|
||||
//keys and vals must contain one char
|
||||
//keys and values must contain one char
|
||||
return Err(());
|
||||
}
|
||||
let key = &header[i..a];
|
||||
@@ -198,12 +582,144 @@ fn to_headermap(header: &[u8]) -> Result<HashMap<&[u8], &[u8]>, ()> {
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
fn create_nonce() -> String {
|
||||
let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
|
||||
fn create_nonce() -> Result<String> {
|
||||
let now = unix_now();
|
||||
let secs = now.as_secs() as u32;
|
||||
let mut h = NONCESTARTHASH.clone();
|
||||
h.consume(secs.to_be_bytes());
|
||||
|
||||
let n = format!("{:08x}{:032x}", secs, h.compute());
|
||||
n[..34].to_string()
|
||||
let n = format!("{:08x}{:032x}", secs, h.finalize());
|
||||
Ok(n[..34].to_string())
|
||||
}
|
||||
|
||||
fn split_account_paths(s: &str) -> Option<(&str, &str)> {
|
||||
let i = s.find("@/")?;
|
||||
Some((&s[0..i], &s[i + 1..]))
|
||||
}
|
||||
|
||||
fn split_rules(rules: &[&str]) -> Vec<String> {
|
||||
let mut output = vec![];
|
||||
for rule in rules {
|
||||
let parts: Vec<&str> = rule.split('|').collect();
|
||||
let mut rules_list = vec![];
|
||||
let mut concated_part = String::new();
|
||||
for (i, part) in parts.iter().enumerate() {
|
||||
if part.contains("@/") {
|
||||
concated_part.push_str(part);
|
||||
let mut concated_part_tmp = String::new();
|
||||
std::mem::swap(&mut concated_part_tmp, &mut concated_part);
|
||||
rules_list.push(concated_part_tmp);
|
||||
continue;
|
||||
}
|
||||
concated_part.push_str(part);
|
||||
if i < parts.len() - 1 {
|
||||
concated_part.push('|');
|
||||
}
|
||||
}
|
||||
if !concated_part.is_empty() {
|
||||
rules_list.push(concated_part)
|
||||
}
|
||||
output.extend(rules_list);
|
||||
}
|
||||
output
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_split_account_paths() {
|
||||
assert_eq!(
|
||||
split_account_paths("user:pass@/:rw"),
|
||||
Some(("user:pass", "/:rw"))
|
||||
);
|
||||
assert_eq!(
|
||||
split_account_paths("user:pass@@/:rw"),
|
||||
Some(("user:pass@", "/:rw"))
|
||||
);
|
||||
assert_eq!(
|
||||
split_account_paths("user:pass@1@/:rw"),
|
||||
Some(("user:pass@1", "/:rw"))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_compact_split_rules() {
|
||||
assert_eq!(
|
||||
split_rules(&["user1:pass1@/:rw|user2:pass2@/:rw"]),
|
||||
["user1:pass1@/:rw", "user2:pass2@/:rw"]
|
||||
);
|
||||
assert_eq!(
|
||||
split_rules(&["user1:pa|ss1@/:rw|user2:pa|ss2@/:rw"]),
|
||||
["user1:pa|ss1@/:rw", "user2:pa|ss2@/:rw"]
|
||||
);
|
||||
assert_eq!(
|
||||
split_rules(&["user1:pa|ss1@/:rw|@/"]),
|
||||
["user1:pa|ss1@/:rw", "@/"]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_access_paths() {
|
||||
let mut paths = AccessPaths::default();
|
||||
paths.add("/dir1", AccessPerm::ReadWrite);
|
||||
paths.add("/dir2/dir21", AccessPerm::ReadWrite);
|
||||
paths.add("/dir2/dir21/dir211", AccessPerm::ReadOnly);
|
||||
paths.add("/dir2/dir22", AccessPerm::ReadOnly);
|
||||
paths.add("/dir2/dir22/dir221", AccessPerm::ReadWrite);
|
||||
paths.add("/dir2/dir23/dir231", AccessPerm::ReadWrite);
|
||||
assert_eq!(
|
||||
paths.entry_paths(Path::new("/tmp")),
|
||||
[
|
||||
"/tmp/dir1",
|
||||
"/tmp/dir2/dir21",
|
||||
"/tmp/dir2/dir22",
|
||||
"/tmp/dir2/dir23/dir231",
|
||||
]
|
||||
.iter()
|
||||
.map(PathBuf::from)
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
assert_eq!(
|
||||
paths
|
||||
.find("dir2")
|
||||
.map(|v| v.entry_paths(Path::new("/tmp/dir2"))),
|
||||
Some(
|
||||
[
|
||||
"/tmp/dir2/dir21",
|
||||
"/tmp/dir2/dir22",
|
||||
"/tmp/dir2/dir23/dir231"
|
||||
]
|
||||
.iter()
|
||||
.map(PathBuf::from)
|
||||
.collect::<Vec<_>>()
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
paths.find("dir1/file"),
|
||||
Some(AccessPaths::new(AccessPerm::ReadWrite))
|
||||
);
|
||||
assert_eq!(
|
||||
paths.find("dir2/dir21/file"),
|
||||
Some(AccessPaths::new(AccessPerm::ReadWrite))
|
||||
);
|
||||
assert_eq!(
|
||||
paths.find("dir2/dir21/dir211/file"),
|
||||
Some(AccessPaths::new(AccessPerm::ReadWrite))
|
||||
);
|
||||
assert_eq!(
|
||||
paths.find("dir2/dir22/file"),
|
||||
Some(AccessPaths::new(AccessPerm::ReadOnly))
|
||||
);
|
||||
assert_eq!(
|
||||
paths.find("dir2/dir22/dir221/file"),
|
||||
Some(AccessPaths::new(AccessPerm::ReadWrite))
|
||||
);
|
||||
assert_eq!(paths.find("dir2/dir23/file"), None);
|
||||
assert_eq!(
|
||||
paths.find("dir2/dir23//dir231/file"),
|
||||
Some(AccessPaths::new(AccessPerm::ReadWrite))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
106
src/http_logger.rs
Normal file
106
src/http_logger.rs
Normal file
@@ -0,0 +1,106 @@
|
||||
use std::{collections::HashMap, str::FromStr};
|
||||
|
||||
use crate::{auth::get_auth_user, server::Request, utils::decode_uri};
|
||||
|
||||
pub const DEFAULT_LOG_FORMAT: &str = r#"$remote_addr "$request" $status"#;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct HttpLogger {
|
||||
elements: Vec<LogElement>,
|
||||
}
|
||||
|
||||
impl Default for HttpLogger {
|
||||
fn default() -> Self {
|
||||
DEFAULT_LOG_FORMAT.parse().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
enum LogElement {
|
||||
Variable(String),
|
||||
Header(String),
|
||||
Literal(String),
|
||||
}
|
||||
|
||||
impl HttpLogger {
|
||||
pub fn data(&self, req: &Request) -> HashMap<String, String> {
|
||||
let mut data = HashMap::default();
|
||||
for element in self.elements.iter() {
|
||||
match element {
|
||||
LogElement::Variable(name) => match name.as_str() {
|
||||
"request" => {
|
||||
let uri = req.uri().to_string();
|
||||
let uri = decode_uri(&uri).map(|s| s.to_string()).unwrap_or(uri);
|
||||
data.insert(name.to_string(), format!("{} {uri}", req.method()));
|
||||
}
|
||||
"remote_user" => {
|
||||
if let Some(user) =
|
||||
req.headers().get("authorization").and_then(get_auth_user)
|
||||
{
|
||||
data.insert(name.to_string(), user);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
LogElement::Header(name) => {
|
||||
if let Some(value) = req.headers().get(name).and_then(|v| v.to_str().ok()) {
|
||||
data.insert(name.to_string(), value.to_string());
|
||||
}
|
||||
}
|
||||
LogElement::Literal(_) => {}
|
||||
}
|
||||
}
|
||||
data
|
||||
}
|
||||
|
||||
pub fn log(&self, data: &HashMap<String, String>, err: Option<String>) {
|
||||
if self.elements.is_empty() {
|
||||
return;
|
||||
}
|
||||
let mut output = String::new();
|
||||
for element in self.elements.iter() {
|
||||
match element {
|
||||
LogElement::Literal(value) => output.push_str(value.as_str()),
|
||||
LogElement::Header(name) | LogElement::Variable(name) => {
|
||||
output.push_str(data.get(name).map(|v| v.as_str()).unwrap_or("-"))
|
||||
}
|
||||
}
|
||||
}
|
||||
match err {
|
||||
Some(err) => error!("{output} {err}"),
|
||||
None => info!("{output}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for HttpLogger {
|
||||
type Err = anyhow::Error;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut elements = vec![];
|
||||
let mut is_var = false;
|
||||
let mut cache = String::new();
|
||||
for c in format!("{s} ").chars() {
|
||||
if c == '$' {
|
||||
if !cache.is_empty() {
|
||||
elements.push(LogElement::Literal(cache.to_string()));
|
||||
}
|
||||
cache.clear();
|
||||
is_var = true;
|
||||
} else if is_var && !(c.is_alphanumeric() || c == '_') {
|
||||
if let Some(value) = cache.strip_prefix("$http_") {
|
||||
elements.push(LogElement::Header(value.replace('_', "-").to_string()));
|
||||
} else if let Some(value) = cache.strip_prefix('$') {
|
||||
elements.push(LogElement::Variable(value.to_string()));
|
||||
}
|
||||
cache.clear();
|
||||
is_var = false;
|
||||
}
|
||||
cache.push(c);
|
||||
}
|
||||
let cache = cache.trim();
|
||||
if !cache.is_empty() {
|
||||
elements.push(LogElement::Literal(cache.to_string()));
|
||||
}
|
||||
Ok(Self { elements })
|
||||
}
|
||||
}
|
||||
105
src/http_utils.rs
Normal file
105
src/http_utils.rs
Normal file
@@ -0,0 +1,105 @@
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use futures_util::Stream;
|
||||
use http_body_util::{combinators::BoxBody, BodyExt, Full};
|
||||
use hyper::body::{Body, Incoming};
|
||||
use std::{
|
||||
pin::Pin,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
use tokio::io::AsyncRead;
|
||||
use tokio_util::io::poll_read_buf;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IncomingStream {
|
||||
inner: Incoming,
|
||||
}
|
||||
|
||||
impl IncomingStream {
|
||||
pub fn new(inner: Incoming) -> Self {
|
||||
Self { inner }
|
||||
}
|
||||
}
|
||||
|
||||
impl Stream for IncomingStream {
|
||||
type Item = Result<Bytes, anyhow::Error>;
|
||||
|
||||
#[inline]
|
||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
loop {
|
||||
match futures_util::ready!(Pin::new(&mut self.inner).poll_frame(cx)?) {
|
||||
Some(frame) => match frame.into_data() {
|
||||
Ok(data) => return Poll::Ready(Some(Ok(data))),
|
||||
Err(_frame) => {}
|
||||
},
|
||||
None => return Poll::Ready(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pin_project_lite::pin_project! {
|
||||
pub struct LengthLimitedStream<R> {
|
||||
#[pin]
|
||||
reader: Option<R>,
|
||||
remaining: usize,
|
||||
buf: BytesMut,
|
||||
capacity: usize,
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> LengthLimitedStream<R> {
|
||||
pub fn new(reader: R, limit: usize) -> Self {
|
||||
Self {
|
||||
reader: Some(reader),
|
||||
remaining: limit,
|
||||
buf: BytesMut::new(),
|
||||
capacity: 4096,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: AsyncRead> Stream for LengthLimitedStream<R> {
|
||||
type Item = std::io::Result<Bytes>;
|
||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
let mut this = self.as_mut().project();
|
||||
|
||||
if *this.remaining == 0 {
|
||||
self.project().reader.set(None);
|
||||
return Poll::Ready(None);
|
||||
}
|
||||
|
||||
let reader = match this.reader.as_pin_mut() {
|
||||
Some(r) => r,
|
||||
None => return Poll::Ready(None),
|
||||
};
|
||||
|
||||
if this.buf.capacity() == 0 {
|
||||
this.buf.reserve(*this.capacity);
|
||||
}
|
||||
|
||||
match poll_read_buf(reader, cx, &mut this.buf) {
|
||||
Poll::Pending => Poll::Pending,
|
||||
Poll::Ready(Err(err)) => {
|
||||
self.project().reader.set(None);
|
||||
Poll::Ready(Some(Err(err)))
|
||||
}
|
||||
Poll::Ready(Ok(0)) => {
|
||||
self.project().reader.set(None);
|
||||
Poll::Ready(None)
|
||||
}
|
||||
Poll::Ready(Ok(_)) => {
|
||||
let mut chunk = this.buf.split();
|
||||
let chunk_size = (*this.remaining).min(chunk.len());
|
||||
chunk.truncate(chunk_size);
|
||||
*this.remaining -= chunk_size;
|
||||
Poll::Ready(Some(Ok(chunk.freeze())))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn body_full(content: impl Into<hyper::body::Bytes>) -> BoxBody<Bytes, anyhow::Error> {
|
||||
Full::new(content.into())
|
||||
.map_err(anyhow::Error::new)
|
||||
.boxed()
|
||||
}
|
||||
61
src/logger.rs
Normal file
61
src/logger.rs
Normal file
@@ -0,0 +1,61 @@
|
||||
use anyhow::{Context, Result};
|
||||
use chrono::{Local, SecondsFormat};
|
||||
use log::{Level, LevelFilter, Metadata, Record};
|
||||
use std::fs::{File, OpenOptions};
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Mutex;
|
||||
|
||||
struct SimpleLogger {
|
||||
file: Option<Mutex<File>>,
|
||||
}
|
||||
|
||||
impl log::Log for SimpleLogger {
|
||||
fn enabled(&self, metadata: &Metadata) -> bool {
|
||||
metadata.level() <= Level::Info
|
||||
}
|
||||
|
||||
fn log(&self, record: &Record) {
|
||||
if self.enabled(record.metadata()) {
|
||||
let timestamp = Local::now().to_rfc3339_opts(SecondsFormat::Secs, true);
|
||||
let text = format!("{} {} - {}", timestamp, record.level(), record.args());
|
||||
match &self.file {
|
||||
Some(file) => {
|
||||
if let Ok(mut file) = file.lock() {
|
||||
let _ = writeln!(file, "{text}");
|
||||
}
|
||||
}
|
||||
None => {
|
||||
if record.level() < Level::Info {
|
||||
eprintln!("{text}");
|
||||
} else {
|
||||
println!("{text}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn flush(&self) {}
|
||||
}
|
||||
|
||||
pub fn init(log_file: Option<PathBuf>) -> Result<()> {
|
||||
let file = match log_file {
|
||||
None => None,
|
||||
Some(log_file) => {
|
||||
let file = OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(&log_file)
|
||||
.with_context(|| {
|
||||
format!("Failed to open the log file at '{}'", log_file.display())
|
||||
})?;
|
||||
Some(Mutex::new(file))
|
||||
}
|
||||
};
|
||||
let logger = SimpleLogger { file };
|
||||
log::set_boxed_logger(Box::new(logger))
|
||||
.map(|_| log::set_max_level(LevelFilter::Info))
|
||||
.with_context(|| "Failed to init logger")?;
|
||||
Ok(())
|
||||
}
|
||||
302
src/main.rs
302
src/main.rs
@@ -1,48 +1,302 @@
|
||||
mod args;
|
||||
mod auth;
|
||||
mod http_logger;
|
||||
mod http_utils;
|
||||
mod logger;
|
||||
mod noscript;
|
||||
mod server;
|
||||
mod utils;
|
||||
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
|
||||
pub type BoxResult<T> = Result<T, Box<dyn std::error::Error>>;
|
||||
use crate::args::{build_cli, print_completions, Args};
|
||||
use crate::server::Server;
|
||||
#[cfg(feature = "tls")]
|
||||
use crate::utils::{load_certs, load_private_key};
|
||||
|
||||
use std::env;
|
||||
use std::io::Write;
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use args::BindAddr;
|
||||
use clap_complete::Shell;
|
||||
use futures_util::future::join_all;
|
||||
|
||||
use crate::args::{matches, Args};
|
||||
use crate::server::serve;
|
||||
use hyper::{body::Incoming, service::service_fn, Request};
|
||||
use hyper_util::{
|
||||
rt::{TokioExecutor, TokioIo},
|
||||
server::conn::auto::Builder,
|
||||
};
|
||||
use std::net::{IpAddr, SocketAddr, TcpListener as StdTcpListener};
|
||||
use std::sync::{
|
||||
atomic::{AtomicBool, Ordering},
|
||||
Arc,
|
||||
};
|
||||
use std::time::Duration;
|
||||
use tokio::time::timeout;
|
||||
use tokio::{net::TcpListener, task::JoinHandle};
|
||||
#[cfg(feature = "tls")]
|
||||
use tokio_rustls::{rustls::ServerConfig, TlsAcceptor};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
run().await.unwrap_or_else(handle_err)
|
||||
}
|
||||
|
||||
async fn run() -> BoxResult<()> {
|
||||
if env::var("RUST_LOG").is_err() {
|
||||
env::set_var("RUST_LOG", "info")
|
||||
async fn main() -> Result<()> {
|
||||
let cmd = build_cli();
|
||||
let matches = cmd.get_matches();
|
||||
if let Some(generator) = matches.get_one::<Shell>("completions") {
|
||||
let mut cmd = build_cli();
|
||||
print_completions(*generator, &mut cmd);
|
||||
return Ok(());
|
||||
}
|
||||
env_logger::builder()
|
||||
.format(|buf, record| {
|
||||
let timestamp = buf.timestamp();
|
||||
writeln!(buf, "[{} {}] {}", timestamp, record.level(), record.args())
|
||||
})
|
||||
.init();
|
||||
let mut args = Args::parse(matches)?;
|
||||
logger::init(args.log_file.clone()).map_err(|e| anyhow!("Failed to init logger, {e}"))?;
|
||||
let (new_addrs, print_addrs) = check_addrs(&args)?;
|
||||
args.addrs = new_addrs;
|
||||
let running = Arc::new(AtomicBool::new(true));
|
||||
let listening = print_listening(&args, &print_addrs)?;
|
||||
let handles = serve(args, running.clone())?;
|
||||
println!("{listening}");
|
||||
|
||||
let args = Args::parse(matches())?;
|
||||
tokio::select! {
|
||||
ret = serve(args) => {
|
||||
ret
|
||||
ret = join_all(handles) => {
|
||||
for r in ret {
|
||||
if let Err(e) = r {
|
||||
error!("{e}");
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
_ = shutdown_signal() => {
|
||||
running.store(false, Ordering::SeqCst);
|
||||
Ok(())
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_err<T>(err: Box<dyn std::error::Error>) -> T {
|
||||
eprintln!("error: {}", err);
|
||||
std::process::exit(1);
|
||||
fn serve(args: Args, running: Arc<AtomicBool>) -> Result<Vec<JoinHandle<()>>> {
|
||||
let addrs = args.addrs.clone();
|
||||
let port = args.port;
|
||||
let tls_config = (args.tls_cert.clone(), args.tls_key.clone());
|
||||
let server_handle = Arc::new(Server::init(args, running)?);
|
||||
let mut handles = vec![];
|
||||
for bind_addr in addrs.iter() {
|
||||
let server_handle = server_handle.clone();
|
||||
match bind_addr {
|
||||
BindAddr::IpAddr(ip) => {
|
||||
let listener = create_listener(SocketAddr::new(*ip, port))
|
||||
.with_context(|| format!("Failed to bind `{ip}:{port}`"))?;
|
||||
|
||||
match &tls_config {
|
||||
#[cfg(feature = "tls")]
|
||||
(Some(cert_file), Some(key_file)) => {
|
||||
let certs = load_certs(cert_file)?;
|
||||
let key = load_private_key(key_file)?;
|
||||
let mut config = ServerConfig::builder()
|
||||
.with_no_client_auth()
|
||||
.with_single_cert(certs, key)?;
|
||||
config.alpn_protocols = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
|
||||
let config = Arc::new(config);
|
||||
let tls_accepter = TlsAcceptor::from(config);
|
||||
let handshake_timeout = Duration::from_secs(10);
|
||||
|
||||
let handle = tokio::spawn(async move {
|
||||
loop {
|
||||
let Ok((stream, addr)) = listener.accept().await else {
|
||||
continue;
|
||||
};
|
||||
let Some(stream) =
|
||||
timeout(handshake_timeout, tls_accepter.accept(stream))
|
||||
.await
|
||||
.ok()
|
||||
.and_then(|v| v.ok())
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
let stream = TokioIo::new(stream);
|
||||
tokio::spawn(handle_stream(
|
||||
server_handle.clone(),
|
||||
stream,
|
||||
Some(addr),
|
||||
));
|
||||
}
|
||||
});
|
||||
|
||||
handles.push(handle);
|
||||
}
|
||||
(None, None) => {
|
||||
let handle = tokio::spawn(async move {
|
||||
loop {
|
||||
let Ok((stream, addr)) = listener.accept().await else {
|
||||
continue;
|
||||
};
|
||||
let stream = TokioIo::new(stream);
|
||||
tokio::spawn(handle_stream(
|
||||
server_handle.clone(),
|
||||
stream,
|
||||
Some(addr),
|
||||
));
|
||||
}
|
||||
});
|
||||
handles.push(handle);
|
||||
}
|
||||
_ => {
|
||||
unreachable!()
|
||||
}
|
||||
};
|
||||
}
|
||||
#[cfg(unix)]
|
||||
BindAddr::SocketPath(path) => {
|
||||
let socket_path = if path.starts_with("@")
|
||||
&& cfg!(any(target_os = "linux", target_os = "android"))
|
||||
{
|
||||
let mut path_buf = path.as_bytes().to_vec();
|
||||
path_buf[0] = b'\0';
|
||||
unsafe { std::ffi::OsStr::from_encoded_bytes_unchecked(&path_buf) }
|
||||
.to_os_string()
|
||||
} else {
|
||||
let _ = std::fs::remove_file(path);
|
||||
path.into()
|
||||
};
|
||||
let listener = tokio::net::UnixListener::bind(socket_path)
|
||||
.with_context(|| format!("Failed to bind `{path}`"))?;
|
||||
let handle = tokio::spawn(async move {
|
||||
loop {
|
||||
let Ok((stream, _addr)) = listener.accept().await else {
|
||||
continue;
|
||||
};
|
||||
let stream = TokioIo::new(stream);
|
||||
tokio::spawn(handle_stream(server_handle.clone(), stream, None));
|
||||
}
|
||||
});
|
||||
|
||||
handles.push(handle);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(handles)
|
||||
}
|
||||
|
||||
async fn handle_stream<T>(handle: Arc<Server>, stream: TokioIo<T>, addr: Option<SocketAddr>)
|
||||
where
|
||||
T: tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static,
|
||||
{
|
||||
let hyper_service =
|
||||
service_fn(move |request: Request<Incoming>| handle.clone().call(request, addr));
|
||||
|
||||
match Builder::new(TokioExecutor::new())
|
||||
.serve_connection_with_upgrades(stream, hyper_service)
|
||||
.await
|
||||
{
|
||||
Ok(()) => {}
|
||||
Err(_err) => {
|
||||
// This error only appears when the client doesn't send a request and terminate the connection.
|
||||
//
|
||||
// If client sends one request then terminate connection whenever, it doesn't appear.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn create_listener(addr: SocketAddr) -> Result<TcpListener> {
|
||||
use socket2::{Domain, Protocol, Socket, Type};
|
||||
let socket = Socket::new(Domain::for_address(addr), Type::STREAM, Some(Protocol::TCP))?;
|
||||
if addr.is_ipv6() {
|
||||
socket.set_only_v6(true)?;
|
||||
}
|
||||
socket.set_reuse_address(true)?;
|
||||
socket.bind(&addr.into())?;
|
||||
socket.listen(1024 /* Default backlog */)?;
|
||||
let std_listener = StdTcpListener::from(socket);
|
||||
std_listener.set_nonblocking(true)?;
|
||||
let listener = TcpListener::from_std(std_listener)?;
|
||||
Ok(listener)
|
||||
}
|
||||
|
||||
fn check_addrs(args: &Args) -> Result<(Vec<BindAddr>, Vec<BindAddr>)> {
|
||||
let mut new_addrs = vec![];
|
||||
let mut print_addrs = vec![];
|
||||
let (ipv4_addrs, ipv6_addrs) = interface_addrs()?;
|
||||
for bind_addr in args.addrs.iter() {
|
||||
match bind_addr {
|
||||
BindAddr::IpAddr(ip) => match &ip {
|
||||
IpAddr::V4(_) => {
|
||||
if !ipv4_addrs.is_empty() {
|
||||
new_addrs.push(bind_addr.clone());
|
||||
if ip.is_unspecified() {
|
||||
print_addrs.extend(ipv4_addrs.clone());
|
||||
} else {
|
||||
print_addrs.push(bind_addr.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
IpAddr::V6(_) => {
|
||||
if !ipv6_addrs.is_empty() {
|
||||
new_addrs.push(bind_addr.clone());
|
||||
if ip.is_unspecified() {
|
||||
print_addrs.extend(ipv6_addrs.clone());
|
||||
} else {
|
||||
print_addrs.push(bind_addr.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
#[cfg(unix)]
|
||||
_ => {
|
||||
new_addrs.push(bind_addr.clone());
|
||||
print_addrs.push(bind_addr.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
print_addrs.sort_unstable();
|
||||
Ok((new_addrs, print_addrs))
|
||||
}
|
||||
|
||||
fn interface_addrs() -> Result<(Vec<BindAddr>, Vec<BindAddr>)> {
|
||||
let (mut ipv4_addrs, mut ipv6_addrs) = (vec![], vec![]);
|
||||
let ifaces =
|
||||
if_addrs::get_if_addrs().with_context(|| "Failed to get local interface addresses")?;
|
||||
for iface in ifaces.into_iter() {
|
||||
let ip = iface.ip();
|
||||
if ip.is_ipv4() {
|
||||
ipv4_addrs.push(BindAddr::IpAddr(ip))
|
||||
}
|
||||
if ip.is_ipv6() {
|
||||
ipv6_addrs.push(BindAddr::IpAddr(ip))
|
||||
}
|
||||
}
|
||||
Ok((ipv4_addrs, ipv6_addrs))
|
||||
}
|
||||
|
||||
fn print_listening(args: &Args, print_addrs: &[BindAddr]) -> Result<String> {
|
||||
let mut output = String::new();
|
||||
let urls = print_addrs
|
||||
.iter()
|
||||
.map(|bind_addr| match bind_addr {
|
||||
BindAddr::IpAddr(addr) => {
|
||||
let addr = match addr {
|
||||
IpAddr::V4(_) => format!("{}:{}", addr, args.port),
|
||||
IpAddr::V6(_) => format!("[{}]:{}", addr, args.port),
|
||||
};
|
||||
let protocol = if args.tls_cert.is_some() {
|
||||
"https"
|
||||
} else {
|
||||
"http"
|
||||
};
|
||||
format!("{}://{}{}", protocol, addr, args.uri_prefix)
|
||||
}
|
||||
#[cfg(unix)]
|
||||
BindAddr::SocketPath(path) => path.to_string(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if urls.len() == 1 {
|
||||
output.push_str(&format!("Listening on {}", urls[0]))
|
||||
} else {
|
||||
let info = urls
|
||||
.iter()
|
||||
.map(|v| format!(" {v}"))
|
||||
.collect::<Vec<String>>()
|
||||
.join("\n");
|
||||
output.push_str(&format!("Listening on:\n{info}\n"))
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
async fn shutdown_signal() {
|
||||
|
||||
103
src/noscript.rs
Normal file
103
src/noscript.rs
Normal file
@@ -0,0 +1,103 @@
|
||||
use crate::{
|
||||
server::{IndexData, PathItem, PathType, MAX_SUBPATHS_COUNT},
|
||||
utils::encode_uri,
|
||||
};
|
||||
|
||||
use anyhow::Result;
|
||||
use chrono::{DateTime, Utc};
|
||||
use xml::escape::escape_str_pcdata;
|
||||
|
||||
pub fn detect_noscript(user_agent: &str) -> bool {
|
||||
[
|
||||
"lynx/", "w3m/", "links ", "elinks/", "curl/", "wget/", "httpie/", "aria2/",
|
||||
]
|
||||
.iter()
|
||||
.any(|v| user_agent.starts_with(v))
|
||||
}
|
||||
|
||||
pub fn generate_noscript_html(data: &IndexData) -> Result<String> {
|
||||
let mut html = String::new();
|
||||
|
||||
let title = format!("Index of {}", escape_str_pcdata(&data.href));
|
||||
|
||||
html.push_str("<html>\n");
|
||||
html.push_str("<head>\n");
|
||||
html.push_str(&format!("<title>{title}</title>\n"));
|
||||
html.push_str(
|
||||
r#"<style>
|
||||
td {
|
||||
padding: 0.2rem;
|
||||
text-align: left;
|
||||
}
|
||||
td:nth-child(3) {
|
||||
text-align: right;
|
||||
}
|
||||
</style>
|
||||
"#,
|
||||
);
|
||||
html.push_str("</head>\n");
|
||||
html.push_str("<body>\n");
|
||||
html.push_str(&format!("<h1>{title}</h1>\n"));
|
||||
html.push_str("<table>\n");
|
||||
html.push_str(" <tbody>\n");
|
||||
html.push_str(&format!(" {}\n", render_parent()));
|
||||
|
||||
for path in &data.paths {
|
||||
html.push_str(&format!(" {}\n", render_path_item(path)));
|
||||
}
|
||||
|
||||
html.push_str(" </tbody>\n");
|
||||
html.push_str("</table>\n");
|
||||
html.push_str("</body>\n");
|
||||
|
||||
Ok(html)
|
||||
}
|
||||
|
||||
fn render_parent() -> String {
|
||||
let value = "../";
|
||||
format!("<tr><td><a href=\"{value}?noscript\">{value}</a></td><td></td><td></td></tr>")
|
||||
}
|
||||
|
||||
fn render_path_item(path: &PathItem) -> String {
|
||||
let mut href = encode_uri(&path.name);
|
||||
let mut name = escape_str_pcdata(&path.name).to_string();
|
||||
if path.path_type.is_dir() {
|
||||
href.push_str("/?noscript");
|
||||
name.push('/');
|
||||
};
|
||||
let mtime = format_mtime(path.mtime).unwrap_or_default();
|
||||
let size = format_size(path.size, path.path_type);
|
||||
|
||||
format!("<tr><td><a href=\"{href}\">{name}</a></td><td>{mtime}</td><td>{size}</td></tr>")
|
||||
}
|
||||
|
||||
fn format_mtime(mtime: u64) -> Option<String> {
|
||||
let datetime = DateTime::<Utc>::from_timestamp_millis(mtime as _)?;
|
||||
Some(datetime.format("%Y-%m-%dT%H:%M:%S.%3fZ").to_string())
|
||||
}
|
||||
|
||||
fn format_size(size: u64, path_type: PathType) -> String {
|
||||
if path_type.is_dir() {
|
||||
let unit = if size == 1 { "item" } else { "items" };
|
||||
let num = match size >= MAX_SUBPATHS_COUNT {
|
||||
true => format!(">{}", MAX_SUBPATHS_COUNT - 1),
|
||||
false => size.to_string(),
|
||||
};
|
||||
format!("{num} {unit}")
|
||||
} else {
|
||||
if size == 0 {
|
||||
return "0 B".to_string();
|
||||
}
|
||||
const UNITS: [&str; 5] = ["B", "KB", "MB", "GB", "TB"];
|
||||
let i = (size as f64).log2() / 10.0;
|
||||
let i = i.floor() as usize;
|
||||
|
||||
if i >= UNITS.len() {
|
||||
// Handle extremely large numbers beyond Terabytes
|
||||
return format!("{:.2} PB", size as f64 / 1024.0f64.powi(5));
|
||||
}
|
||||
|
||||
let size = size as f64 / 1024.0f64.powi(i as i32);
|
||||
format!("{:.2} {}", size, UNITS[i])
|
||||
}
|
||||
}
|
||||
2010
src/server.rs
2010
src/server.rs
File diff suppressed because it is too large
Load Diff
184
src/utils.rs
Normal file
184
src/utils.rs
Normal file
@@ -0,0 +1,184 @@
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use chrono::{DateTime, Utc};
|
||||
#[cfg(feature = "tls")]
|
||||
use rustls_pki_types::{pem::PemObject, CertificateDer, PrivateKeyDer};
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
path::Path,
|
||||
time::{Duration, SystemTime, UNIX_EPOCH},
|
||||
};
|
||||
|
||||
pub fn unix_now() -> Duration {
|
||||
SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.expect("Unable to get unix epoch time")
|
||||
}
|
||||
|
||||
pub fn encode_uri(v: &str) -> String {
|
||||
let parts: Vec<_> = v.split('/').map(urlencoding::encode).collect();
|
||||
parts.join("/")
|
||||
}
|
||||
|
||||
pub fn decode_uri(v: &str) -> Option<Cow<'_, str>> {
|
||||
percent_encoding::percent_decode(v.as_bytes())
|
||||
.decode_utf8()
|
||||
.ok()
|
||||
}
|
||||
|
||||
pub fn get_file_name(path: &Path) -> &str {
|
||||
path.file_name()
|
||||
.and_then(|v| v.to_str())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub async fn get_file_mtime_and_mode(path: &Path) -> Result<(DateTime<Utc>, u16)> {
|
||||
use std::os::unix::prelude::MetadataExt;
|
||||
let meta = tokio::fs::metadata(path).await?;
|
||||
let datetime: DateTime<Utc> = meta.modified()?.into();
|
||||
Ok((datetime, meta.mode() as u16))
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
pub async fn get_file_mtime_and_mode(path: &Path) -> Result<(DateTime<Utc>, u16)> {
|
||||
let meta = tokio::fs::metadata(&path).await?;
|
||||
let datetime: DateTime<Utc> = meta.modified()?.into();
|
||||
Ok((datetime, 0o644))
|
||||
}
|
||||
|
||||
pub fn try_get_file_name(path: &Path) -> Result<&str> {
|
||||
path.file_name()
|
||||
.and_then(|v| v.to_str())
|
||||
.ok_or_else(|| anyhow!("Failed to get file name of `{}`", path.display()))
|
||||
}
|
||||
|
||||
pub fn glob(pattern: &str, target: &str) -> bool {
|
||||
let pat = match ::glob::Pattern::new(pattern) {
|
||||
Ok(pat) => pat,
|
||||
Err(_) => return false,
|
||||
};
|
||||
pat.matches(target)
|
||||
}
|
||||
|
||||
// Load public certificate from file.
|
||||
#[cfg(feature = "tls")]
|
||||
pub fn load_certs<T: AsRef<Path>>(file_name: T) -> Result<Vec<CertificateDer<'static>>> {
|
||||
let mut certs = vec![];
|
||||
for cert in CertificateDer::pem_file_iter(file_name.as_ref()).with_context(|| {
|
||||
format!(
|
||||
"Failed to load cert file at `{}`",
|
||||
file_name.as_ref().display()
|
||||
)
|
||||
})? {
|
||||
let cert = cert.with_context(|| {
|
||||
format!(
|
||||
"Invalid certificate data in file `{}`",
|
||||
file_name.as_ref().display()
|
||||
)
|
||||
})?;
|
||||
certs.push(cert)
|
||||
}
|
||||
if certs.is_empty() {
|
||||
anyhow::bail!(
|
||||
"No supported certificate in file `{}`",
|
||||
file_name.as_ref().display()
|
||||
);
|
||||
}
|
||||
Ok(certs)
|
||||
}
|
||||
|
||||
// Load private key from file.
|
||||
#[cfg(feature = "tls")]
|
||||
pub fn load_private_key<T: AsRef<Path>>(file_name: T) -> Result<PrivateKeyDer<'static>> {
|
||||
PrivateKeyDer::from_pem_file(file_name.as_ref()).with_context(|| {
|
||||
format!(
|
||||
"Failed to load key file at `{}`",
|
||||
file_name.as_ref().display()
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn parse_range(range: &str, size: u64) -> Option<Vec<(u64, u64)>> {
|
||||
let (unit, ranges) = range.split_once('=')?;
|
||||
if unit != "bytes" {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut result = Vec::new();
|
||||
for range in ranges.split(',') {
|
||||
let (start, end) = range.trim().split_once('-')?;
|
||||
if start.is_empty() {
|
||||
let offset = end.parse::<u64>().ok()?;
|
||||
if offset <= size {
|
||||
result.push((size - offset, size - 1));
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
} else {
|
||||
let start = start.parse::<u64>().ok()?;
|
||||
if start < size {
|
||||
if end.is_empty() {
|
||||
result.push((start, size - 1));
|
||||
} else {
|
||||
let end = end.parse::<u64>().ok()?;
|
||||
if end < size {
|
||||
result.push((start, end));
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(result)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_glob_key() {
|
||||
assert!(glob("", ""));
|
||||
assert!(glob(".*", ".git"));
|
||||
assert!(glob("abc", "abc"));
|
||||
assert!(glob("a*c", "abc"));
|
||||
assert!(glob("a?c", "abc"));
|
||||
assert!(glob("a*c", "abbc"));
|
||||
assert!(glob("*c", "abc"));
|
||||
assert!(glob("a*", "abc"));
|
||||
assert!(glob("?c", "bc"));
|
||||
assert!(glob("a?", "ab"));
|
||||
assert!(!glob("abc", "adc"));
|
||||
assert!(!glob("abc", "abcd"));
|
||||
assert!(!glob("a?c", "abbc"));
|
||||
assert!(!glob("*.log", "log"));
|
||||
assert!(glob("*.abc-cba", "xyz.abc-cba"));
|
||||
assert!(glob("*.abc-cba", "123.xyz.abc-cba"));
|
||||
assert!(glob("*.log", ".log"));
|
||||
assert!(glob("*.log", "a.log"));
|
||||
assert!(glob("*/", "abc/"));
|
||||
assert!(!glob("*/", "abc"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_range() {
|
||||
assert_eq!(parse_range("bytes=0-499", 500), Some(vec![(0, 499)]));
|
||||
assert_eq!(parse_range("bytes=0-", 500), Some(vec![(0, 499)]));
|
||||
assert_eq!(parse_range("bytes=299-", 500), Some(vec![(299, 499)]));
|
||||
assert_eq!(parse_range("bytes=-500", 500), Some(vec![(0, 499)]));
|
||||
assert_eq!(parse_range("bytes=-300", 500), Some(vec![(200, 499)]));
|
||||
assert_eq!(
|
||||
parse_range("bytes=0-199, 100-399, 400-, -200", 500),
|
||||
Some(vec![(0, 199), (100, 399), (400, 499), (300, 499)])
|
||||
);
|
||||
assert_eq!(parse_range("bytes=500-", 500), None);
|
||||
assert_eq!(parse_range("bytes=-501", 500), None);
|
||||
assert_eq!(parse_range("bytes=0-500", 500), None);
|
||||
assert_eq!(parse_range("bytes=0-199,", 500), None);
|
||||
assert_eq!(parse_range("bytes=0-199, 500-", 500), None);
|
||||
}
|
||||
}
|
||||
92
tests/allow.rs
Normal file
92
tests/allow.rs
Normal file
@@ -0,0 +1,92 @@
|
||||
mod fixtures;
|
||||
mod utils;
|
||||
|
||||
use fixtures::{server, Error, TestServer};
|
||||
use rstest::rstest;
|
||||
|
||||
#[rstest]
|
||||
fn default_not_allow_upload(server: TestServer) -> Result<(), Error> {
|
||||
let url = format!("{}file1", server.url());
|
||||
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
|
||||
assert_eq!(resp.status(), 403);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn default_not_allow_delete(server: TestServer) -> Result<(), Error> {
|
||||
let url = format!("{}test.html", server.url());
|
||||
let resp = fetch!(b"DELETE", &url).send()?;
|
||||
assert_eq!(resp.status(), 403);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn default_not_allow_archive(server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}?zip", server.url()))?;
|
||||
assert_eq!(resp.status(), 404);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn default_not_exist_dir(server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}404/", server.url()))?;
|
||||
assert_eq!(resp.status(), 404);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn allow_upload_not_exist_dir(
|
||||
#[with(&["--allow-upload"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}404/", server.url()))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn allow_upload_no_override(#[with(&["--allow-upload"])] server: TestServer) -> Result<(), Error> {
|
||||
let url = format!("{}index.html", server.url());
|
||||
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
|
||||
assert_eq!(resp.status(), 403);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn allow_delete_no_override(#[with(&["--allow-delete"])] server: TestServer) -> Result<(), Error> {
|
||||
let url = format!("{}index.html", server.url());
|
||||
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
|
||||
assert_eq!(resp.status(), 403);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn allow_upload_delete_can_override(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let url = format!("{}index.html", server.url());
|
||||
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
|
||||
assert_eq!(resp.status(), 201);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn allow_search(#[with(&["--allow-search"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "test.html"))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let paths = utils::retrieve_index_paths(&resp.text()?);
|
||||
assert!(!paths.is_empty());
|
||||
for p in paths {
|
||||
assert!(p.contains("test.html"));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn allow_archive(#[with(&["--allow-archive"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}?zip", server.url()))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"application/zip"
|
||||
);
|
||||
assert!(resp.headers().contains_key("content-disposition"));
|
||||
Ok(())
|
||||
}
|
||||
32
tests/args.rs
Normal file
32
tests/args.rs
Normal file
@@ -0,0 +1,32 @@
|
||||
//! Run file server with different args
|
||||
|
||||
mod fixtures;
|
||||
mod utils;
|
||||
|
||||
use fixtures::{server, Error, TestServer};
|
||||
use rstest::rstest;
|
||||
|
||||
#[rstest]
|
||||
fn path_prefix_index(#[with(&["--path-prefix", "xyz"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}{}", server.url(), "xyz"))?;
|
||||
assert_resp_paths!(resp);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn path_prefix_file(#[with(&["--path-prefix", "xyz"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}{}/index.html", server.url(), "xyz"))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(resp.text()?, "This is index.html");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn path_prefix_propfind(
|
||||
#[with(&["--path-prefix", "xyz"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let resp = fetch!(b"PROPFIND", format!("{}{}", server.url(), "xyz")).send()?;
|
||||
let text = resp.text()?;
|
||||
assert!(text.contains("<D:href>/xyz/</D:href>"));
|
||||
Ok(())
|
||||
}
|
||||
125
tests/assets.rs
Normal file
125
tests/assets.rs
Normal file
@@ -0,0 +1,125 @@
|
||||
mod fixtures;
|
||||
mod utils;
|
||||
|
||||
use assert_fs::fixture::TempDir;
|
||||
use fixtures::{port, server, tmpdir, wait_for_port, Error, TestServer, DIR_ASSETS};
|
||||
use rstest::rstest;
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
#[rstest]
|
||||
fn assets(server: TestServer) -> Result<(), Error> {
|
||||
let ver = env!("CARGO_PKG_VERSION");
|
||||
let resp = reqwest::blocking::get(server.url())?;
|
||||
let index_js = format!("/__dufs_v{ver}__/index.js");
|
||||
let index_css = format!("/__dufs_v{ver}__/index.css");
|
||||
let favicon_ico = format!("/__dufs_v{ver}__/favicon.ico");
|
||||
let text = resp.text()?;
|
||||
println!("{text}");
|
||||
assert!(text.contains(&format!(r#"href="{index_css}""#)));
|
||||
assert!(text.contains(&format!(r#"href="{favicon_ico}""#)));
|
||||
assert!(text.contains(&format!(r#"src="{index_js}""#)));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn asset_js(server: TestServer) -> Result<(), Error> {
|
||||
let url = format!(
|
||||
"{}__dufs_v{}__/index.js",
|
||||
server.url(),
|
||||
env!("CARGO_PKG_VERSION")
|
||||
);
|
||||
let resp = reqwest::blocking::get(url)?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"application/javascript; charset=UTF-8"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn asset_css(server: TestServer) -> Result<(), Error> {
|
||||
let url = format!(
|
||||
"{}__dufs_v{}__/index.css",
|
||||
server.url(),
|
||||
env!("CARGO_PKG_VERSION")
|
||||
);
|
||||
let resp = reqwest::blocking::get(url)?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"text/css; charset=UTF-8"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn asset_ico(server: TestServer) -> Result<(), Error> {
|
||||
let url = format!(
|
||||
"{}__dufs_v{}__/favicon.ico",
|
||||
server.url(),
|
||||
env!("CARGO_PKG_VERSION")
|
||||
);
|
||||
let resp = reqwest::blocking::get(url)?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(resp.headers().get("content-type").unwrap(), "image/x-icon");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn assets_with_prefix(#[with(&["--path-prefix", "xyz"])] server: TestServer) -> Result<(), Error> {
|
||||
let ver = env!("CARGO_PKG_VERSION");
|
||||
let resp = reqwest::blocking::get(format!("{}xyz/", server.url()))?;
|
||||
let index_js = format!("/xyz/__dufs_v{ver}__/index.js");
|
||||
let index_css = format!("/xyz/__dufs_v{ver}__/index.css");
|
||||
let favicon_ico = format!("/xyz/__dufs_v{ver}__/favicon.ico");
|
||||
let text = resp.text()?;
|
||||
assert!(text.contains(&format!(r#"href="{index_css}""#)));
|
||||
assert!(text.contains(&format!(r#"href="{favicon_ico}""#)));
|
||||
assert!(text.contains(&format!(r#"src="{index_js}""#)));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn asset_js_with_prefix(
|
||||
#[with(&["--path-prefix", "xyz"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let url = format!(
|
||||
"{}xyz/__dufs_v{}__/index.js",
|
||||
server.url(),
|
||||
env!("CARGO_PKG_VERSION")
|
||||
);
|
||||
let resp = reqwest::blocking::get(url)?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"application/javascript; charset=UTF-8"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn assets_override(tmpdir: TempDir, port: u16) -> Result<(), Error> {
|
||||
let mut child = Command::new(assert_cmd::cargo::cargo_bin!())
|
||||
.arg(tmpdir.path())
|
||||
.arg("-p")
|
||||
.arg(port.to_string())
|
||||
.arg("--assets")
|
||||
.arg(tmpdir.join(DIR_ASSETS))
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
wait_for_port(port);
|
||||
|
||||
let url = format!("http://localhost:{port}");
|
||||
let resp = reqwest::blocking::get(&url)?;
|
||||
assert!(resp.text()?.starts_with(&format!(
|
||||
"/__dufs_v{}__/index.js;<template id=\"index-data\">",
|
||||
env!("CARGO_PKG_VERSION")
|
||||
)));
|
||||
let resp = reqwest::blocking::get(&url)?;
|
||||
assert_resp_paths!(resp);
|
||||
|
||||
child.kill()?;
|
||||
Ok(())
|
||||
}
|
||||
396
tests/auth.rs
Normal file
396
tests/auth.rs
Normal file
@@ -0,0 +1,396 @@
|
||||
mod digest_auth_util;
|
||||
mod fixtures;
|
||||
mod utils;
|
||||
|
||||
use digest_auth_util::send_with_digest_auth;
|
||||
use fixtures::{server, Error, TestServer};
|
||||
use indexmap::IndexSet;
|
||||
use rstest::rstest;
|
||||
|
||||
#[rstest]
|
||||
fn no_auth(#[with(&["--auth", "user:pass@/:rw", "-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(server.url())?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
let values: Vec<&str> = resp
|
||||
.headers()
|
||||
.get_all("www-authenticate")
|
||||
.iter()
|
||||
.map(|v| v.to_str().unwrap())
|
||||
.collect();
|
||||
assert!(values[0].starts_with("Digest"));
|
||||
assert!(values[1].starts_with("Basic"));
|
||||
|
||||
let url = format!("{}file1", server.url());
|
||||
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case(server(&["--auth", "user:pass@/:rw", "-A"]), "user", "pass")]
|
||||
#[case(server(&["--auth", "user:pa:ss@1@/:rw", "-A"]), "user", "pa:ss@1")]
|
||||
fn auth(#[case] server: TestServer, #[case] user: &str, #[case] pass: &str) -> Result<(), Error> {
|
||||
let url = format!("{}file1", server.url());
|
||||
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), user, pass)?;
|
||||
assert_eq!(resp.status(), 201);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn invalid_auth(
|
||||
#[with(&["-a", "user:pass@/:rw", "-a", "@/", "-A"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let resp = fetch!(b"GET", server.url())
|
||||
.basic_auth("user", Some("-"))
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
let resp = fetch!(b"GET", server.url())
|
||||
.basic_auth("-", Some("pass"))
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
let resp = fetch!(b"GET", server.url())
|
||||
.header("Authorization", "Basic Og==")
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case(server(&["--auth", "user:$6$gQxZwKyWn/ZmWEA2$4uV7KKMnSUnET2BtWTj/9T5.Jq3h/MdkOlnIl5hdlTxDZ4MZKmJ.kl6C.NL9xnNPqC4lVHC1vuI0E5cLpTJX81@/:rw", "-A"]), "user", "pass")]
|
||||
#[case(server(&["--auth", "user:$6$YV1J6OHZAAgbzCbS$V55ZEgvJ6JFdz1nLO4AD696PRHAJYhfQf.Gy2HafrCz5itnbgNTtTgfUSqZrt4BJ7FcpRfSt/QZzAan68pido0@/:rw", "-A"]), "user", "pa:ss@1")]
|
||||
fn auth_hashed_password(
|
||||
#[case] server: TestServer,
|
||||
#[case] user: &str,
|
||||
#[case] pass: &str,
|
||||
) -> Result<(), Error> {
|
||||
let url = format!("{}file1", server.url());
|
||||
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
if let Err(err) = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), user, pass)
|
||||
{
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
r#"Missing "realm" in header: Basic realm="DUFS""#
|
||||
);
|
||||
}
|
||||
let resp = fetch!(b"PUT", &url)
|
||||
.body(b"abc".to_vec())
|
||||
.basic_auth(user, Some(pass))
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 201);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_and_public(
|
||||
#[with(&["-a", "user:pass@/:rw", "-a", "@/", "-A"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let url = format!("{}file1", server.url());
|
||||
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user", "pass")?;
|
||||
assert_eq!(resp.status(), 201);
|
||||
let resp = fetch!(b"GET", &url).send()?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(resp.text()?, "abc");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_skip(#[with(&["--auth", "@/"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(server.url())?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_skip_on_options_method(
|
||||
#[with(&["--auth", "user:pass@/:rw"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let url = format!("{}index.html", server.url());
|
||||
let resp = fetch!(b"OPTIONS", &url).send()?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_skip_if_no_auth_user(server: TestServer) -> Result<(), Error> {
|
||||
let url = format!("{}index.html", server.url());
|
||||
let resp = fetch!(b"GET", &url)
|
||||
.basic_auth("user", Some("pass"))
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_no_skip_if_anonymous(
|
||||
#[with(&["--auth", "@/:ro"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let url = format!("{}index.html", server.url());
|
||||
let resp = fetch!(b"GET", &url)
|
||||
.basic_auth("user", Some("pass"))
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
let resp = fetch!(b"GET", &url).send()?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let resp = fetch!(b"DELETE", &url)
|
||||
.basic_auth("user", Some("pass"))
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_check(
|
||||
#[with(&["--auth", "user:pass@/:rw", "--auth", "user2:pass2@/", "-A"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let url = format!("{}", server.url());
|
||||
let resp = fetch!(b"CHECKAUTH", &url).send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
let resp = send_with_digest_auth(fetch!(b"CHECKAUTH", &url), "user", "pass")?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let resp = send_with_digest_auth(fetch!(b"CHECKAUTH", &url), "user2", "pass2")?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_check2(
|
||||
#[with(&["--auth", "user:pass@/:rw|user2:pass2@/", "-A"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let url = format!("{}", server.url());
|
||||
let resp = fetch!(b"CHECKAUTH", &url).send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
let resp = send_with_digest_auth(fetch!(b"CHECKAUTH", &url), "user", "pass")?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let resp = send_with_digest_auth(fetch!(b"CHECKAUTH", &url), "user2", "pass2")?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_check3(
|
||||
#[with(&["--auth", "user:pass@/:rw", "--auth", "@/dir1:rw", "-A"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let url = format!("{}dir1/", server.url());
|
||||
let resp = fetch!(b"CHECKAUTH", &url).send()?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let resp = fetch!(b"CHECKAUTH", format!("{url}?login")).send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_logout(
|
||||
#[with(&["--auth", "user:pass@/:rw", "-A"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let url = format!("{}index.html", server.url());
|
||||
let resp = fetch!(b"LOGOUT", &url).send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
let resp = send_with_digest_auth(fetch!(b"LOGOUT", &url), "user", "pass")?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_readonly(
|
||||
#[with(&["--auth", "user:pass@/:rw", "--auth", "user2:pass2@/", "-A"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let url = format!("{}index.html", server.url());
|
||||
let resp = fetch!(b"GET", &url).send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
let resp = send_with_digest_auth(fetch!(b"GET", &url), "user2", "pass2")?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let url = format!("{}file1", server.url());
|
||||
let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user2", "pass2")?;
|
||||
assert_eq!(resp.status(), 403);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_nest(
|
||||
#[with(&["--auth", "user:pass@/:rw", "--auth", "user2:pass2@/", "--auth", "user3:pass3@/dir1:rw", "-A"])]
|
||||
server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let url = format!("{}dir1/file1", server.url());
|
||||
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user3", "pass3")?;
|
||||
assert_eq!(resp.status(), 201);
|
||||
let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user", "pass")?;
|
||||
assert_eq!(resp.status(), 201);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_nest_share(
|
||||
#[with(&["--auth", "@/", "--auth", "user:pass@/:rw", "--auth", "user3:pass3@/dir1:rw", "-A"])]
|
||||
server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let url = format!("{}index.html", server.url());
|
||||
let resp = fetch!(b"GET", &url).send()?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case(server(&["--auth", "user:pass@/:rw", "-A"]), "user", "pass")]
|
||||
#[case(server(&["--auth", "u1:p1@/:rw", "-A"]), "u1", "p1")]
|
||||
fn auth_basic(
|
||||
#[case] server: TestServer,
|
||||
#[case] user: &str,
|
||||
#[case] pass: &str,
|
||||
) -> Result<(), Error> {
|
||||
let url = format!("{}file1", server.url());
|
||||
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
let resp = fetch!(b"PUT", &url)
|
||||
.body(b"abc".to_vec())
|
||||
.basic_auth(user, Some(pass))
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 201);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_webdav_move(
|
||||
#[with(&["--auth", "user:pass@/:rw", "--auth", "user3:pass3@/dir1:rw", "-A"])]
|
||||
server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let origin_url = format!("{}dir1/test.html", server.url());
|
||||
let new_url = format!("{}test2.html", server.url());
|
||||
let resp = send_with_digest_auth(
|
||||
fetch!(b"MOVE", &origin_url).header("Destination", &new_url),
|
||||
"user3",
|
||||
"pass3",
|
||||
)?;
|
||||
assert_eq!(resp.status(), 403);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_webdav_copy(
|
||||
#[with(&["--auth", "user:pass@/:rw", "--auth", "user3:pass3@/dir1:rw", "-A"])]
|
||||
server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let origin_url = format!("{}dir1/test.html", server.url());
|
||||
let new_url = format!("{}test2.html", server.url());
|
||||
let resp = send_with_digest_auth(
|
||||
fetch!(b"COPY", &origin_url).header("Destination", &new_url),
|
||||
"user3",
|
||||
"pass3",
|
||||
)?;
|
||||
assert_eq!(resp.status(), 403);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_path_prefix(
|
||||
#[with(&["--auth", "user:pass@/:rw", "--path-prefix", "xyz", "-A"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let url = format!("{}xyz/index.html", server.url());
|
||||
let resp = fetch!(b"GET", &url).send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
let resp = send_with_digest_auth(fetch!(b"GET", &url), "user", "pass")?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_partial_index(
|
||||
#[with(&["--auth", "user:pass@/dir1:rw,/dir2:rw", "-A"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let resp = send_with_digest_auth(fetch!(b"GET", server.url()), "user", "pass")?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let paths = utils::retrieve_index_paths(&resp.text()?);
|
||||
assert_eq!(paths, IndexSet::from(["dir1/".into(), "dir2/".into()]));
|
||||
let resp = send_with_digest_auth(
|
||||
fetch!(b"GET", format!("{}?q={}", server.url(), "test.html")),
|
||||
"user",
|
||||
"pass",
|
||||
)?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let paths = utils::retrieve_index_paths(&resp.text()?);
|
||||
assert_eq!(
|
||||
paths,
|
||||
IndexSet::from(["dir1/test.html".into(), "dir2/test.html".into()])
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn no_auth_propfind_dir(
|
||||
#[with(&["--auth", "admin:admin@/:rw", "--auth", "@/dir-assets", "-A"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let resp = fetch!(b"PROPFIND", server.url()).send()?;
|
||||
assert_eq!(resp.status(), 207);
|
||||
let body = resp.text()?;
|
||||
assert!(body.contains("<D:href>/dir-assets/</D:href>"));
|
||||
assert!(body.contains("<D:href>/dir1/</D:href>"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_propfind_dir(
|
||||
#[with(&["--auth", "admin:admin@/:rw", "--auth", "user:pass@/dir-assets", "-A"])]
|
||||
server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let resp = send_with_digest_auth(fetch!(b"PROPFIND", server.url()), "user", "pass")?;
|
||||
assert_eq!(resp.status(), 207);
|
||||
let body = resp.text()?;
|
||||
assert!(body.contains("<D:href>/dir-assets/</D:href>"));
|
||||
assert!(!body.contains("<D:href>/dir1/</D:href>"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_data(
|
||||
#[with(&["-a", "user:pass@/:rw", "-a", "@/", "-A"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(server.url())?;
|
||||
let content = resp.text()?;
|
||||
let json = utils::retrieve_json(&content).unwrap();
|
||||
assert_eq!(json["allow_delete"], serde_json::Value::Bool(false));
|
||||
assert_eq!(json["allow_upload"], serde_json::Value::Bool(false));
|
||||
let resp = fetch!(b"GET", server.url())
|
||||
.basic_auth("user", Some("pass"))
|
||||
.send()?;
|
||||
let content = resp.text()?;
|
||||
let json = utils::retrieve_json(&content).unwrap();
|
||||
assert_eq!(json["allow_delete"], serde_json::Value::Bool(true));
|
||||
assert_eq!(json["allow_upload"], serde_json::Value::Bool(true));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_shadow(
|
||||
#[with(&["--auth", "user:pass@/:rw", "-a", "@/dir1", "-A"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let url = format!("{}dir1/test.txt", server.url());
|
||||
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
|
||||
let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user", "pass")?;
|
||||
assert_eq!(resp.status(), 201);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn token_auth(#[with(&["-a", "user:pass@/"])] server: TestServer) -> Result<(), Error> {
|
||||
let url = format!("{}index.html", server.url());
|
||||
let resp = fetch!(b"GET", &url).send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
let url = format!("{}index.html?tokengen", server.url());
|
||||
let resp = fetch!(b"GET", &url)
|
||||
.basic_auth("user", Some("pass"))
|
||||
.send()?;
|
||||
let token = resp.text()?;
|
||||
let url = format!("{}index.html?token={token}", server.url());
|
||||
let resp = fetch!(b"GET", &url).send()?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
Ok(())
|
||||
}
|
||||
84
tests/bind.rs
Normal file
84
tests/bind.rs
Normal file
@@ -0,0 +1,84 @@
|
||||
mod fixtures;
|
||||
|
||||
use fixtures::{port, server, tmpdir, wait_for_port, Error, TestServer};
|
||||
|
||||
use assert_cmd::prelude::*;
|
||||
use assert_fs::fixture::TempDir;
|
||||
use regex::Regex;
|
||||
use rstest::rstest;
|
||||
use std::io::Read;
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
#[rstest]
|
||||
#[case(&["-b", "20.205.243.166"])]
|
||||
fn bind_fails(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> {
|
||||
Command::new(assert_cmd::cargo::cargo_bin!())
|
||||
.arg(tmpdir.path())
|
||||
.arg("-p")
|
||||
.arg(port.to_string())
|
||||
.args(args)
|
||||
.assert()
|
||||
.stderr(predicates::str::contains("Failed to bind"))
|
||||
.failure();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case(server(&[] as &[&str]), true, true)]
|
||||
#[case(server(&["-b", "0.0.0.0"]), true, false)]
|
||||
#[case(server(&["-b", "127.0.0.1", "-b", "::1"]), true, true)]
|
||||
fn bind_ipv4_ipv6(
|
||||
#[case] server: TestServer,
|
||||
#[case] bind_ipv4: bool,
|
||||
#[case] bind_ipv6: bool,
|
||||
) -> Result<(), Error> {
|
||||
assert_eq!(
|
||||
reqwest::blocking::get(format!("http://127.0.0.1:{}", server.port()).as_str()).is_ok(),
|
||||
bind_ipv4
|
||||
);
|
||||
assert_eq!(
|
||||
reqwest::blocking::get(format!("http://[::1]:{}", server.port()).as_str()).is_ok(),
|
||||
bind_ipv6
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case(&[] as &[&str])]
|
||||
#[case(&["--path-prefix", "/prefix"])]
|
||||
fn validate_printed_urls(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> {
|
||||
let mut child = Command::new(assert_cmd::cargo::cargo_bin!())
|
||||
.arg(tmpdir.path())
|
||||
.arg("-p")
|
||||
.arg(port.to_string())
|
||||
.args(args)
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
wait_for_port(port);
|
||||
|
||||
let stdout = child.stdout.as_mut().expect("Failed to get stdout");
|
||||
let mut buf = [0; 1000];
|
||||
let buf_len = stdout.read(&mut buf)?;
|
||||
let output = std::str::from_utf8(&buf[0..buf_len])?;
|
||||
let url_lines = output
|
||||
.lines()
|
||||
.take_while(|line| !line.is_empty()) /* non-empty lines */
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
let urls = Regex::new(r"http://[a-zA-Z0-9\.\[\]:/]+")
|
||||
.unwrap()
|
||||
.captures_iter(url_lines.as_str())
|
||||
.filter_map(|caps| caps.get(0).map(|v| v.as_str()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert!(!urls.is_empty());
|
||||
reqwest::blocking::get(urls[0])?.error_for_status()?;
|
||||
|
||||
child.kill()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
80
tests/cache.rs
Normal file
80
tests/cache.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
mod fixtures;
|
||||
mod utils;
|
||||
|
||||
use chrono::{DateTime, Duration};
|
||||
use fixtures::{server, Error, TestServer};
|
||||
use reqwest::header::{
|
||||
HeaderName, ETAG, IF_MATCH, IF_MODIFIED_SINCE, IF_NONE_MATCH, IF_UNMODIFIED_SINCE,
|
||||
LAST_MODIFIED,
|
||||
};
|
||||
use reqwest::StatusCode;
|
||||
use rstest::rstest;
|
||||
|
||||
#[rstest]
|
||||
#[case(IF_UNMODIFIED_SINCE, Duration::days(1), StatusCode::OK)]
|
||||
#[case(IF_UNMODIFIED_SINCE, Duration::days(0), StatusCode::OK)]
|
||||
#[case(IF_UNMODIFIED_SINCE, Duration::days(-1), StatusCode::PRECONDITION_FAILED)]
|
||||
#[case(IF_MODIFIED_SINCE, Duration::days(1), StatusCode::NOT_MODIFIED)]
|
||||
#[case(IF_MODIFIED_SINCE, Duration::days(0), StatusCode::NOT_MODIFIED)]
|
||||
#[case(IF_MODIFIED_SINCE, Duration::days(-1), StatusCode::OK)]
|
||||
fn get_file_with_if_modified_since_condition(
|
||||
#[case] header_condition: HeaderName,
|
||||
#[case] duration_after_file_modified: Duration,
|
||||
#[case] expected_code: StatusCode,
|
||||
server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let resp = fetch!(b"HEAD", format!("{}index.html", server.url())).send()?;
|
||||
|
||||
let last_modified = resp
|
||||
.headers()
|
||||
.get(LAST_MODIFIED)
|
||||
.and_then(|h| h.to_str().ok())
|
||||
.and_then(|s| DateTime::parse_from_rfc2822(s).ok())
|
||||
.expect("Received no valid last modified header");
|
||||
|
||||
let req_modified_time = (last_modified + duration_after_file_modified)
|
||||
.format("%a, %d %b %Y %T GMT")
|
||||
.to_string();
|
||||
|
||||
let resp = fetch!(b"GET", format!("{}index.html", server.url()))
|
||||
.header(header_condition, req_modified_time)
|
||||
.send()?;
|
||||
|
||||
assert_eq!(resp.status(), expected_code);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn same_etag(etag: &str) -> String {
|
||||
etag.to_owned()
|
||||
}
|
||||
|
||||
fn different_etag(etag: &str) -> String {
|
||||
format!("{etag}1234")
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case(IF_MATCH, same_etag, StatusCode::OK)]
|
||||
#[case(IF_MATCH, different_etag, StatusCode::PRECONDITION_FAILED)]
|
||||
#[case(IF_NONE_MATCH, same_etag, StatusCode::NOT_MODIFIED)]
|
||||
#[case(IF_NONE_MATCH, different_etag, StatusCode::OK)]
|
||||
fn get_file_with_etag_match(
|
||||
#[case] header_condition: HeaderName,
|
||||
#[case] etag_modifier: fn(&str) -> String,
|
||||
#[case] expected_code: StatusCode,
|
||||
server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let resp = fetch!(b"HEAD", format!("{}index.html", server.url())).send()?;
|
||||
|
||||
let etag = resp
|
||||
.headers()
|
||||
.get(ETAG)
|
||||
.and_then(|h| h.to_str().ok())
|
||||
.expect("Received no valid etag header");
|
||||
|
||||
let resp = fetch!(b"GET", format!("{}index.html", server.url()))
|
||||
.header(header_condition, etag_modifier(etag))
|
||||
.send()?;
|
||||
|
||||
assert_eq!(resp.status(), expected_code);
|
||||
Ok(())
|
||||
}
|
||||
35
tests/cli.rs
Normal file
35
tests/cli.rs
Normal file
@@ -0,0 +1,35 @@
|
||||
//! Run cli with different args, not starting a server
|
||||
|
||||
mod fixtures;
|
||||
|
||||
use assert_cmd::prelude::*;
|
||||
use clap::ValueEnum;
|
||||
use clap_complete::Shell;
|
||||
use fixtures::Error;
|
||||
use std::process::Command;
|
||||
|
||||
#[test]
|
||||
/// Show help and exit.
|
||||
fn help_shows() -> Result<(), Error> {
|
||||
Command::new(assert_cmd::cargo::cargo_bin!())
|
||||
.arg("-h")
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Print completions and exit.
|
||||
fn print_completions() -> Result<(), Error> {
|
||||
// let shell_enums = EnumValueParser::<Shell>::new();
|
||||
for shell in Shell::value_variants() {
|
||||
Command::new(assert_cmd::cargo::cargo_bin!())
|
||||
.arg("--completions")
|
||||
.arg(shell.to_string())
|
||||
.assert()
|
||||
.success();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
54
tests/config.rs
Normal file
54
tests/config.rs
Normal file
@@ -0,0 +1,54 @@
|
||||
mod digest_auth_util;
|
||||
mod fixtures;
|
||||
mod utils;
|
||||
|
||||
use assert_fs::TempDir;
|
||||
use digest_auth_util::send_with_digest_auth;
|
||||
use fixtures::{port, tmpdir, wait_for_port, Error};
|
||||
use rstest::rstest;
|
||||
use std::path::PathBuf;
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
#[rstest]
|
||||
fn use_config_file(tmpdir: TempDir, port: u16) -> Result<(), Error> {
|
||||
let config_path = get_config_path().display().to_string();
|
||||
let mut child = Command::new(assert_cmd::cargo::cargo_bin!())
|
||||
.arg(tmpdir.path())
|
||||
.arg("-p")
|
||||
.arg(port.to_string())
|
||||
.args(["--config", &config_path])
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
wait_for_port(port);
|
||||
|
||||
let url = format!("http://localhost:{port}/dufs/index.html");
|
||||
let resp = fetch!(b"GET", &url).send()?;
|
||||
assert_eq!(resp.status(), 401);
|
||||
|
||||
let url = format!("http://localhost:{port}/dufs/index.html");
|
||||
let resp = send_with_digest_auth(fetch!(b"GET", &url), "user", "pass")?;
|
||||
assert_eq!(resp.text()?, "This is index.html");
|
||||
|
||||
let url = format!("http://localhost:{port}/dufs?simple");
|
||||
let resp = send_with_digest_auth(fetch!(b"GET", &url), "user", "pass")?;
|
||||
let text: String = resp.text().unwrap();
|
||||
assert!(text.split('\n').any(|c| c == "dir1/"));
|
||||
assert!(!text.split('\n').any(|c| c == "dir3/"));
|
||||
assert!(!text.split('\n').any(|c| c == "test.txt"));
|
||||
|
||||
let url = format!("http://localhost:{port}/dufs/dir1/upload.txt");
|
||||
let resp = send_with_digest_auth(fetch!(b"PUT", &url).body("Hello"), "user", "pass")?;
|
||||
assert_eq!(resp.status(), 201);
|
||||
|
||||
child.kill()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_config_path() -> PathBuf {
|
||||
let mut path = std::env::current_dir().expect("Failed to get current directory");
|
||||
path.push("tests");
|
||||
path.push("data");
|
||||
path.push("config.yaml");
|
||||
path
|
||||
}
|
||||
33
tests/cors.rs
Normal file
33
tests/cors.rs
Normal file
@@ -0,0 +1,33 @@
|
||||
mod fixtures;
|
||||
mod utils;
|
||||
|
||||
use fixtures::{server, Error, TestServer};
|
||||
use rstest::rstest;
|
||||
|
||||
#[rstest]
|
||||
fn cors(#[with(&["--enable-cors"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(server.url())?;
|
||||
assert_eq!(
|
||||
resp.headers().get("access-control-allow-origin").unwrap(),
|
||||
"*"
|
||||
);
|
||||
assert_eq!(
|
||||
resp.headers()
|
||||
.get("access-control-allow-credentials")
|
||||
.unwrap(),
|
||||
"true"
|
||||
);
|
||||
assert_eq!(
|
||||
resp.headers().get("access-control-allow-methods").unwrap(),
|
||||
"*"
|
||||
);
|
||||
assert_eq!(
|
||||
resp.headers().get("access-control-allow-headers").unwrap(),
|
||||
"Authorization,*"
|
||||
);
|
||||
assert_eq!(
|
||||
resp.headers().get("access-control-expose-headers").unwrap(),
|
||||
"Authorization,*"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
29
tests/data/cert.pem
Normal file
29
tests/data/cert.pem
Normal file
@@ -0,0 +1,29 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFCTCCAvGgAwIBAgIUcegjikATvwNSIbN43QybKWIcKSMwDQYJKoZIhvcNAQEL
|
||||
BQAwFDESMBAGA1UEAwwJbG9jYWxob3N0MB4XDTIyMDYxMTA4NTQyMloXDTMyMDYw
|
||||
ODA4NTQyMlowFDESMBAGA1UEAwwJbG9jYWxob3N0MIICIjANBgkqhkiG9w0BAQEF
|
||||
AAOCAg8AMIICCgKCAgEAo2wdMbFPkX7CAF/Y+hVj5bwm4dlxhwW2Z9Ic2RZFC5w2
|
||||
oK2XwyasDBEqDlgv/bN4xObAVlDZ/4/SuTVSDrNB8dtQl7GTWptpbFKJUdNocU88
|
||||
wqd4k/cLZg2aiQqnZKD88w/AxXnYw+F8yU0pFGj9GX0S5at3/V1hrBVxVO8Y99bb
|
||||
gnJA8NMm0Pw2xYZS++ULuzoECk0xbNdtbtPrIuweI5mMvsJvtiw67EIdl3N9Lj5p
|
||||
L4a7X1C0Xk5H4mOcwM0qq3m31HsCW91PMCjU6suo764rx5Jqv0n9HCNxdiSEadCw
|
||||
f+GrmKtFOw3DcGPETg5AJR8H3rG1agKKjI+vRtL/tZ7coFOhZKXdjGvvUFcWcqO+
|
||||
GppHh16pzJDXi2qeD9Cu5b2ayM2uBnfV7Q3FjOeDqD+BCJ0ClaqNmAD9TF2htzdu
|
||||
Inl+G3OJb4cqaYjaF5YmiZISfrimK5eR2I3et5cqnbuDHMKvDfUd9Jgj/2IqPOHJ
|
||||
EguuXSO7WNKfQmlTv7EN/xrD6jiB/M8ADaSxjCqTbtKNyCbJlu2Wy9WlDXwPkNW8
|
||||
g70T4Br4U4Iy3N/0w2lAAhiizdC2jkehSKmWE2nmixGSXxkSOMgXQXDJ9RBtDQfd
|
||||
8ym/ADfyVndUSnHvf9jCH1NPHlFbB7RVSvUHX22Qq63NUvhV32ct+/IyD/qPpl0C
|
||||
AwEAAaNTMFEwHQYDVR0OBBYEFKwSSbPXBIkmzja3/cNJyqhWy96WMB8GA1UdIwQY
|
||||
MBaAFKwSSbPXBIkmzja3/cNJyqhWy96WMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZI
|
||||
hvcNAQELBQADggIBAHcrdu1nGDN5YvcHXzbBx73AC921fmn5xxzeFRO7af157g5h
|
||||
4zornLMk4Obp+UGkMbWK4K0NAQXKKm5WjcmoOHNRg7TgTE7b1gcVuS4phdwlIqA6
|
||||
eZGg+NWZyeaIJNjdHgWgGoe+S+5Ne1I7sDKiEXrOzITJrDcQgBKFF08kqT6UNY2W
|
||||
q90m+olPtrewAMgWllpxJ90u4qifPcwP+neDZJim9MhVYtHHeFsmyzlS185iasj8
|
||||
sxvp5HDTopmz0tDuiLHvOMKmyf7vapsnbqEGngQi2qV9rBmldyRLnWSe8u/FN31f
|
||||
zhSk1ikSm1cQ/iyL898XexSmTafyaF8ELswdIMHkGZkVQurWeKn3/CEDXokXkpMI
|
||||
4dlCSgM7SU+XtcjtXbR8/pHpcW2ZnBR0la/qIv81aNKkJeUkTcPC8BUv4jI/oT6z
|
||||
LRrvRjMnHJjnADACuutlNRU4/e7h1XuvlXgFHsp63k7GJXouoIwdHjfkErZXsoEX
|
||||
WeS+pPatkT7wbhfgYVwglMRIpgCu++htSRCV/lbSuYzCG6mKtxJyy4eslSjpHNPG
|
||||
wELDKgzsgLtuTyNfP458O9i8x6wf9J6eVaHe3nqgqkOnnmQxEYnsPaFUMWG1/DYi
|
||||
U+mA/VdQrPe3J4Z082sCe4MVmTzWlWCDpNFFQpv51NbWzc/kuIZuJCAwoZD0
|
||||
-----END CERTIFICATE-----
|
||||
11
tests/data/cert_ecdsa.pem
Normal file
11
tests/data/cert_ecdsa.pem
Normal file
@@ -0,0 +1,11 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIBfTCCASOgAwIBAgIUfrAUHXIfeM54OLnTIUD9xT6FIwkwCgYIKoZIzj0EAwIw
|
||||
FDESMBAGA1UEAwwJbG9jYWxob3N0MB4XDTIyMDgwMjAxMjQ1NFoXDTMyMDczMDAx
|
||||
MjQ1NFowFDESMBAGA1UEAwwJbG9jYWxob3N0MFkwEwYHKoZIzj0CAQYIKoZIzj0D
|
||||
AQcDQgAEW4tBe0jF2wYSLCvdreb0izR/8sgKNKkbe4xPyA9uNEbtTk58eoO3944R
|
||||
JPT6S5wRTHFpF0BJhQRfiuW4K2EUcaNTMFEwHQYDVR0OBBYEFEebUDkiMJoV2d5W
|
||||
8o+6p4DauHFFMB8GA1UdIwQYMBaAFEebUDkiMJoV2d5W8o+6p4DauHFFMA8GA1Ud
|
||||
EwEB/wQFMAMBAf8wCgYIKoZIzj0EAwIDSAAwRQIhAPJvmzqaq/S5yYxeB4se8k2z
|
||||
6pnVNxrTT2CqdPD8Z+7rAiBZAyU+5+KbQq3aZsmuNUx+YOqTDMkaUR/nd/tjnnOX
|
||||
gA==
|
||||
-----END CERTIFICATE-----
|
||||
9
tests/data/config.yaml
Normal file
9
tests/data/config.yaml
Normal file
@@ -0,0 +1,9 @@
|
||||
bind:
|
||||
- 0.0.0.0
|
||||
path-prefix: dufs
|
||||
hidden:
|
||||
- dir3
|
||||
- test.txt
|
||||
auth:
|
||||
- user:pass@/:rw
|
||||
allow-upload: true
|
||||
5
tests/data/generate_tls_certs.sh
Executable file
5
tests/data/generate_tls_certs.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
openssl req -subj '/CN=localhost' -x509 -newkey rsa:4096 -keyout key_pkcs8.pem -out cert.pem -nodes -days 3650
|
||||
openssl rsa -in key_pkcs8.pem -out key_pkcs1.pem
|
||||
openssl ecparam -name prime256v1 -genkey -noout -out key_ecdsa.pem
|
||||
openssl req -subj '/CN=localhost' -x509 -key key_ecdsa.pem -out cert_ecdsa.pem -nodes -days 3650
|
||||
5
tests/data/key_ecdsa.pem
Normal file
5
tests/data/key_ecdsa.pem
Normal file
@@ -0,0 +1,5 @@
|
||||
-----BEGIN EC PRIVATE KEY-----
|
||||
MHcCAQEEILOQ44lHqD4w12HJKlZJ+Y3u91eUKjabu3UKPSahhC89oAoGCCqGSM49
|
||||
AwEHoUQDQgAEW4tBe0jF2wYSLCvdreb0izR/8sgKNKkbe4xPyA9uNEbtTk58eoO3
|
||||
944RJPT6S5wRTHFpF0BJhQRfiuW4K2EUcQ==
|
||||
-----END EC PRIVATE KEY-----
|
||||
51
tests/data/key_pkcs1.pem
Normal file
51
tests/data/key_pkcs1.pem
Normal file
@@ -0,0 +1,51 @@
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIJKAIBAAKCAgEAo2wdMbFPkX7CAF/Y+hVj5bwm4dlxhwW2Z9Ic2RZFC5w2oK2X
|
||||
wyasDBEqDlgv/bN4xObAVlDZ/4/SuTVSDrNB8dtQl7GTWptpbFKJUdNocU88wqd4
|
||||
k/cLZg2aiQqnZKD88w/AxXnYw+F8yU0pFGj9GX0S5at3/V1hrBVxVO8Y99bbgnJA
|
||||
8NMm0Pw2xYZS++ULuzoECk0xbNdtbtPrIuweI5mMvsJvtiw67EIdl3N9Lj5pL4a7
|
||||
X1C0Xk5H4mOcwM0qq3m31HsCW91PMCjU6suo764rx5Jqv0n9HCNxdiSEadCwf+Gr
|
||||
mKtFOw3DcGPETg5AJR8H3rG1agKKjI+vRtL/tZ7coFOhZKXdjGvvUFcWcqO+GppH
|
||||
h16pzJDXi2qeD9Cu5b2ayM2uBnfV7Q3FjOeDqD+BCJ0ClaqNmAD9TF2htzduInl+
|
||||
G3OJb4cqaYjaF5YmiZISfrimK5eR2I3et5cqnbuDHMKvDfUd9Jgj/2IqPOHJEguu
|
||||
XSO7WNKfQmlTv7EN/xrD6jiB/M8ADaSxjCqTbtKNyCbJlu2Wy9WlDXwPkNW8g70T
|
||||
4Br4U4Iy3N/0w2lAAhiizdC2jkehSKmWE2nmixGSXxkSOMgXQXDJ9RBtDQfd8ym/
|
||||
ADfyVndUSnHvf9jCH1NPHlFbB7RVSvUHX22Qq63NUvhV32ct+/IyD/qPpl0CAwEA
|
||||
AQKCAgAPM29DwAp2riO9hSzZlkPEisvTFjbJKG7fGVw1lSy298DdEUicjmxScwZG
|
||||
b02He7owFoatgLfGXcpsD9miJGpt5MiKU6oxM2OK/+JmChQc9hHgyVMd8EzPIVTO
|
||||
in8njRH6SezUcZEIJ2FEGDlJ/LoONOQdGOYAWz9KknQIQnVAGGwypg4EWJ+zsMIn
|
||||
fWcapyOANtVJYATI6wDy3iNxDCWBijbdR5i8iUCx2TSHceai9osyMIYdR5R/cSie
|
||||
lkVuaacebCP9T7PYd611/VZQwMDmCn1oAuaLBIbWpzVWl+75KMBCJOuhN80owQ78
|
||||
1UrdN9YfndNNk5ocUkAw8uyK2fWO+TcdFddHrx0tnEIsnkzy+Jtp/j5Eq/JGVlSY
|
||||
03dck4FIjDSM/M+6HP5R2jfGCsitono03XGjzNsJou0UnordY+VL4qolItoovWkf
|
||||
N5hudmbste4gS3/dSvtoByto5SAqUGUS0VNjhsU5w+IyMFK+kImlJthb3+GNF/7h
|
||||
NPn4MwuxIFXEy1cVPu+wwoFoL5+7stp68mlYnrxmEIFOJNcjF1urfqCMAXWXxad+
|
||||
71TtBiRit5tAZVHjTz9NBkyvCcXOEq3RMEjAzCtTGlduUwNQpmmdCyHk2SnrWieV
|
||||
LqyTt55r1FhzEZ0AqHiWmHCNRnqz/PJFBIKfX9YKnkK2xVAgAQKCAQEA0jcvZ0cf
|
||||
GGIo8WG/r5mitpnVeQy9XZ+Ic7Js9T73ZLcG+qo/2XDhEXcR4OKZoSMIJIotMIJ1
|
||||
TZKdNN9QgFp7IuUWnYpnp2h+Hyfv8h7DHZwohHw4Ys9AJY9j4WVGP/NKVcPrTY/F
|
||||
kJ3VHKiVd10FXoNn0qEw5y3oa4zRtRYFrp7gvOoRMwoWADLN/hwuQ2QRrBPt0zth
|
||||
qfbeTtQE4g950tkqMy6V6uahkZEvQmSd1UpD35aGKMwxOpK9ew9CAKduftDVOu9x
|
||||
3vKAOh0uXs9DxMUfJFKf8ISI2JB3vFmrAJ2l6qSGEdoVdiXkwHdRsaEBJbDrR3uq
|
||||
R5ovM0qVk2s23QKCAQEAxwPqqv5SuPPMksBCBSds692cEsXA1xbvw1IsOugqG22f
|
||||
CPDSIr0w9c5xU3QSv2BFmaCLJQEVAPoI/jqPMqIdOWC9lSXEuKw297i0r/GAMcNc
|
||||
e1N+Xz1ahyVE3Ak65Jwi/vgr0D38thtQJlF//BB0hPFvvt4GQ2E4O5ELwTXIPr46
|
||||
wQFGf0IfqvufpHoKiszJ5F5liyTtB50J4Is2CKUMUuXq6XlWMrCNLyaGW42cttci
|
||||
gbNAPagnQANHFUIO9M06dAU9WVnUJG9eNDd/tDw0XDLjRqTRXlNoqWRwWMl38ZXi
|
||||
HI9oHpOqHjeAXevdu5nkqsmtSQ50LiHOlK9/cO51gQKCAQBHlj9wXkn6lcL3oKAU
|
||||
fq9om66U0H/UWDWxoLt2MQEyrRmVV1DzDXu35OKTwNcshq+JMfz9ng+wYRNkJABY
|
||||
FXgFhBpVgAKYgf8hQQp3W356oOkzZNIW5BkmMVSEN2ba9FEGL/f7q9BN1VHztn1f
|
||||
7q+bZgh/NCFhOMMDjSsFDgDVXImQC+3bgb3IR4Ta2mHu1S8neInu+zPhG47NLWqU
|
||||
SUzlPsseLuki23N+DQEZDQaq0eWXSL1bO14wYjRgqeuCKYJ5cUiMD2qpz89W+wUF
|
||||
iHO9mJtoVTLeR2QKy/fajnareQQ9idWWUrwoRfNGj9ukL/4iBcO5ziVIyPr17ppN
|
||||
X5+JAoIBAClkoCeGlDARzUfsow6tX5NDWZXx+aUDCUVnzvlFlpRz3XMfm6VMEmXd
|
||||
1WZVKx0Q6gkFAkvlCLhWSQ6PoX8XhtqLS4M9AsiiUSB/E13Q7ifriU3BVPR8L1sS
|
||||
nlrhtJUeAI1lkr9SVUCPN8FwjB0iUwnfqa1aQpU7IFYLWhWKmSarrE6+dCo915ZZ
|
||||
lZ/BHnY2F/vewmIJgR9nQ0mnyspLgd+wIIcFDK+oVwUqjyF1t9Wzs2KkpMTuN5Ox
|
||||
2tQKFFBIa1L8UAFIlL4rR722mWIkb4OJtgnYeA+Va5xn3pIo/UCLOydTkIVjkyuL
|
||||
wbBHQawmWxBGuDsMvY9myq/UPL6BaoECggEBAJeY5OgVbJHB6YageBtUBPe0tLIb
|
||||
nrYPYXIPsLycZ+PXo73ASbpbHh6av7CdP288Ouu+zE0P6iAdrIrU41kc+2Tx7K8b
|
||||
Qb0pDrX0pQZQAIzoBWKouwra8kSeS1dkiLOLiOhnYDn+OYE4tN5ePe7AlBk7b1/x
|
||||
ybNuCyTYdaH1uPaI56RaPB8aHJXnxtPHUvYm0oMfm3EPjgF/FjGdpE7rPcdYWqKU
|
||||
Ek5UPmcGVVs+yHRSsEDna5zXBqQoDaLn+7KfgcO8UxhhL2cdcQ2vsC1C7QIPu043
|
||||
lAIXge5d+1hNwrZjHw/9SkV3UItnEGnxyaZ2NMmRKjdT3g2ilTgkAB2w/Kk=
|
||||
-----END RSA PRIVATE KEY-----
|
||||
52
tests/data/key_pkcs8.pem
Normal file
52
tests/data/key_pkcs8.pem
Normal file
@@ -0,0 +1,52 @@
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQCjbB0xsU+RfsIA
|
||||
X9j6FWPlvCbh2XGHBbZn0hzZFkULnDagrZfDJqwMESoOWC/9s3jE5sBWUNn/j9K5
|
||||
NVIOs0Hx21CXsZNam2lsUolR02hxTzzCp3iT9wtmDZqJCqdkoPzzD8DFedjD4XzJ
|
||||
TSkUaP0ZfRLlq3f9XWGsFXFU7xj31tuCckDw0ybQ/DbFhlL75Qu7OgQKTTFs121u
|
||||
0+si7B4jmYy+wm+2LDrsQh2Xc30uPmkvhrtfULReTkfiY5zAzSqrebfUewJb3U8w
|
||||
KNTqy6jvrivHkmq/Sf0cI3F2JIRp0LB/4auYq0U7DcNwY8RODkAlHwfesbVqAoqM
|
||||
j69G0v+1ntygU6Fkpd2Ma+9QVxZyo74amkeHXqnMkNeLap4P0K7lvZrIza4Gd9Xt
|
||||
DcWM54OoP4EInQKVqo2YAP1MXaG3N24ieX4bc4lvhyppiNoXliaJkhJ+uKYrl5HY
|
||||
jd63lyqdu4Mcwq8N9R30mCP/Yio84ckSC65dI7tY0p9CaVO/sQ3/GsPqOIH8zwAN
|
||||
pLGMKpNu0o3IJsmW7ZbL1aUNfA+Q1byDvRPgGvhTgjLc3/TDaUACGKLN0LaOR6FI
|
||||
qZYTaeaLEZJfGRI4yBdBcMn1EG0NB93zKb8AN/JWd1RKce9/2MIfU08eUVsHtFVK
|
||||
9QdfbZCrrc1S+FXfZy378jIP+o+mXQIDAQABAoICAA8zb0PACnauI72FLNmWQ8SK
|
||||
y9MWNskobt8ZXDWVLLb3wN0RSJyObFJzBkZvTYd7ujAWhq2At8ZdymwP2aIkam3k
|
||||
yIpTqjEzY4r/4mYKFBz2EeDJUx3wTM8hVM6KfyeNEfpJ7NRxkQgnYUQYOUn8ug40
|
||||
5B0Y5gBbP0qSdAhCdUAYbDKmDgRYn7Owwid9ZxqnI4A21UlgBMjrAPLeI3EMJYGK
|
||||
Nt1HmLyJQLHZNIdx5qL2izIwhh1HlH9xKJ6WRW5ppx5sI/1Ps9h3rXX9VlDAwOYK
|
||||
fWgC5osEhtanNVaX7vkowEIk66E3zSjBDvzVSt031h+d002TmhxSQDDy7IrZ9Y75
|
||||
Nx0V10evHS2cQiyeTPL4m2n+PkSr8kZWVJjTd1yTgUiMNIz8z7oc/lHaN8YKyK2i
|
||||
ejTdcaPM2wmi7RSeit1j5UviqiUi2ii9aR83mG52Zuy17iBLf91K+2gHK2jlICpQ
|
||||
ZRLRU2OGxTnD4jIwUr6QiaUm2Fvf4Y0X/uE0+fgzC7EgVcTLVxU+77DCgWgvn7uy
|
||||
2nryaVievGYQgU4k1yMXW6t+oIwBdZfFp37vVO0GJGK3m0BlUeNPP00GTK8Jxc4S
|
||||
rdEwSMDMK1MaV25TA1CmaZ0LIeTZKetaJ5UurJO3nmvUWHMRnQCoeJaYcI1GerP8
|
||||
8kUEgp9f1gqeQrbFUCABAoIBAQDSNy9nRx8YYijxYb+vmaK2mdV5DL1dn4hzsmz1
|
||||
Pvdktwb6qj/ZcOERdxHg4pmhIwgkii0wgnVNkp0031CAWnsi5RadimenaH4fJ+/y
|
||||
HsMdnCiEfDhiz0Alj2PhZUY/80pVw+tNj8WQndUcqJV3XQVeg2fSoTDnLehrjNG1
|
||||
FgWunuC86hEzChYAMs3+HC5DZBGsE+3TO2Gp9t5O1ATiD3nS2SozLpXq5qGRkS9C
|
||||
ZJ3VSkPfloYozDE6kr17D0IAp25+0NU673He8oA6HS5ez0PExR8kUp/whIjYkHe8
|
||||
WasAnaXqpIYR2hV2JeTAd1GxoQElsOtHe6pHmi8zSpWTazbdAoIBAQDHA+qq/lK4
|
||||
88ySwEIFJ2zr3ZwSxcDXFu/DUiw66CobbZ8I8NIivTD1znFTdBK/YEWZoIslARUA
|
||||
+gj+Oo8yoh05YL2VJcS4rDb3uLSv8YAxw1x7U35fPVqHJUTcCTrknCL++CvQPfy2
|
||||
G1AmUX/8EHSE8W++3gZDYTg7kQvBNcg+vjrBAUZ/Qh+q+5+kegqKzMnkXmWLJO0H
|
||||
nQngizYIpQxS5erpeVYysI0vJoZbjZy21yKBs0A9qCdAA0cVQg70zTp0BT1ZWdQk
|
||||
b140N3+0PDRcMuNGpNFeU2ipZHBYyXfxleIcj2gek6oeN4Bd6927meSqya1JDnQu
|
||||
Ic6Ur39w7nWBAoIBAEeWP3BeSfqVwvegoBR+r2ibrpTQf9RYNbGgu3YxATKtGZVX
|
||||
UPMNe7fk4pPA1yyGr4kx/P2eD7BhE2QkAFgVeAWEGlWAApiB/yFBCndbfnqg6TNk
|
||||
0hbkGSYxVIQ3Ztr0UQYv9/ur0E3VUfO2fV/ur5tmCH80IWE4wwONKwUOANVciZAL
|
||||
7duBvchHhNraYe7VLyd4ie77M+Ebjs0tapRJTOU+yx4u6SLbc34NARkNBqrR5ZdI
|
||||
vVs7XjBiNGCp64IpgnlxSIwPaqnPz1b7BQWIc72Ym2hVMt5HZArL99qOdqt5BD2J
|
||||
1ZZSvChF80aP26Qv/iIFw7nOJUjI+vXumk1fn4kCggEAKWSgJ4aUMBHNR+yjDq1f
|
||||
k0NZlfH5pQMJRWfO+UWWlHPdcx+bpUwSZd3VZlUrHRDqCQUCS+UIuFZJDo+hfxeG
|
||||
2otLgz0CyKJRIH8TXdDuJ+uJTcFU9HwvWxKeWuG0lR4AjWWSv1JVQI83wXCMHSJT
|
||||
Cd+prVpClTsgVgtaFYqZJqusTr50Kj3XllmVn8EedjYX+97CYgmBH2dDSafKykuB
|
||||
37AghwUMr6hXBSqPIXW31bOzYqSkxO43k7Ha1AoUUEhrUvxQAUiUvitHvbaZYiRv
|
||||
g4m2Cdh4D5VrnGfekij9QIs7J1OQhWOTK4vBsEdBrCZbEEa4Owy9j2bKr9Q8voFq
|
||||
gQKCAQEAl5jk6BVskcHphqB4G1QE97S0shuetg9hcg+wvJxn49ejvcBJulseHpq/
|
||||
sJ0/bzw6677MTQ/qIB2sitTjWRz7ZPHsrxtBvSkOtfSlBlAAjOgFYqi7CtryRJ5L
|
||||
V2SIs4uI6GdgOf45gTi03l497sCUGTtvX/HJs24LJNh1ofW49ojnpFo8HxoclefG
|
||||
08dS9ibSgx+bcQ+OAX8WMZ2kTus9x1haopQSTlQ+ZwZVWz7IdFKwQOdrnNcGpCgN
|
||||
ouf7sp+Bw7xTGGEvZx1xDa+wLULtAg+7TjeUAheB7l37WE3CtmMfD/1KRXdQi2cQ
|
||||
afHJpnY0yZEqN1PeDaKVOCQAHbD8qQ==
|
||||
-----END PRIVATE KEY-----
|
||||
91
tests/digest_auth_util.rs
Normal file
91
tests/digest_auth_util.rs
Normal file
@@ -0,0 +1,91 @@
|
||||
/// Refs https://github.dev/maoertel/diqwest/blob/main/src/blocking.rs
|
||||
use anyhow::{anyhow, Result};
|
||||
use digest_auth::{AuthContext, AuthorizationHeader, HttpMethod};
|
||||
use hyper::{header::AUTHORIZATION, HeaderMap, StatusCode};
|
||||
use reqwest::blocking::{RequestBuilder, Response};
|
||||
use url::Position;
|
||||
|
||||
pub fn send_with_digest_auth(
|
||||
request_builder: RequestBuilder,
|
||||
username: &str,
|
||||
password: &str,
|
||||
) -> Result<Response> {
|
||||
let first_response = try_clone_request_builder(&request_builder)?.send()?;
|
||||
match first_response.status() {
|
||||
StatusCode::UNAUTHORIZED => {
|
||||
try_digest_auth(request_builder, first_response, username, password)
|
||||
}
|
||||
_ => Ok(first_response),
|
||||
}
|
||||
}
|
||||
|
||||
fn try_digest_auth(
|
||||
request_builder: RequestBuilder,
|
||||
first_response: Response,
|
||||
username: &str,
|
||||
password: &str,
|
||||
) -> Result<Response> {
|
||||
if let Some(answer) = get_answer(
|
||||
&request_builder,
|
||||
first_response.headers(),
|
||||
username,
|
||||
password,
|
||||
)? {
|
||||
return Ok(request_builder
|
||||
.header(AUTHORIZATION, answer.to_header_string())
|
||||
.send()?);
|
||||
};
|
||||
|
||||
Ok(first_response)
|
||||
}
|
||||
|
||||
fn try_clone_request_builder(request_builder: &RequestBuilder) -> Result<RequestBuilder> {
|
||||
request_builder
|
||||
.try_clone()
|
||||
.ok_or_else(|| anyhow!("Request body must not be a stream"))
|
||||
}
|
||||
|
||||
fn get_answer(
|
||||
request_builder: &RequestBuilder,
|
||||
first_response: &HeaderMap,
|
||||
username: &str,
|
||||
password: &str,
|
||||
) -> Result<Option<AuthorizationHeader>> {
|
||||
let answer = calculate_answer(request_builder, first_response, username, password);
|
||||
match answer {
|
||||
Ok(answer) => Ok(Some(answer)),
|
||||
Err(error) => Err(error),
|
||||
}
|
||||
}
|
||||
|
||||
fn calculate_answer(
|
||||
request_builder: &RequestBuilder,
|
||||
headers: &HeaderMap,
|
||||
username: &str,
|
||||
password: &str,
|
||||
) -> Result<AuthorizationHeader> {
|
||||
let request = try_clone_request_builder(request_builder)?.build()?;
|
||||
let path = &request.url()[Position::AfterPort..];
|
||||
let method = HttpMethod::from(request.method().as_str());
|
||||
let body = request.body().and_then(|b| b.as_bytes());
|
||||
|
||||
parse_digest_auth_header(headers, path, method, body, username, password)
|
||||
}
|
||||
|
||||
fn parse_digest_auth_header(
|
||||
header: &HeaderMap,
|
||||
path: &str,
|
||||
method: HttpMethod,
|
||||
body: Option<&[u8]>,
|
||||
username: &str,
|
||||
password: &str,
|
||||
) -> Result<AuthorizationHeader> {
|
||||
let www_auth = header
|
||||
.get("www-authenticate")
|
||||
.ok_or_else(|| anyhow!("The header 'www-authenticate' is missing."))?
|
||||
.to_str()?;
|
||||
let context = AuthContext::new_with_method(username, password, path, body, method);
|
||||
let mut prompt = digest_auth::parse(www_auth)?;
|
||||
|
||||
Ok(prompt.respond(&context)?)
|
||||
}
|
||||
198
tests/fixtures.rs
Normal file
198
tests/fixtures.rs
Normal file
@@ -0,0 +1,198 @@
|
||||
use assert_fs::fixture::TempDir;
|
||||
use assert_fs::prelude::*;
|
||||
use port_check::free_local_port;
|
||||
use reqwest::Url;
|
||||
use rstest::fixture;
|
||||
use std::process::{Child, Command, Stdio};
|
||||
use std::thread::sleep;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub type Error = Box<dyn std::error::Error>;
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub const BIN_FILE: &str = "😀.bin";
|
||||
|
||||
/// File names for testing purpose
|
||||
#[allow(dead_code)]
|
||||
pub static FILES: &[&str] = &[
|
||||
"test.txt",
|
||||
"test.html",
|
||||
"index.html",
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
"file\n1.txt",
|
||||
BIN_FILE,
|
||||
];
|
||||
|
||||
/// Directory names for testing directory don't exist
|
||||
#[allow(dead_code)]
|
||||
pub static DIR_NO_FOUND: &str = "dir-no-found/";
|
||||
|
||||
/// Directory names for testing directory don't have index.html
|
||||
#[allow(dead_code)]
|
||||
pub static DIR_NO_INDEX: &str = "dir-no-index/";
|
||||
|
||||
/// Directory names for testing hidden
|
||||
#[allow(dead_code)]
|
||||
pub static DIR_GIT: &str = ".git/";
|
||||
|
||||
/// Directory names for testings assets override
|
||||
#[allow(dead_code)]
|
||||
pub static DIR_ASSETS: &str = "dir-assets/";
|
||||
|
||||
/// Directory names for testing purpose
|
||||
#[allow(dead_code)]
|
||||
pub static DIRECTORIES: &[&str] = &["dir1/", "dir2/", "dir3/", DIR_NO_INDEX, DIR_GIT, DIR_ASSETS];
|
||||
|
||||
/// Test fixture which creates a temporary directory with a few files and directories inside.
|
||||
/// The directories also contain files.
|
||||
#[fixture]
|
||||
#[allow(dead_code)]
|
||||
pub fn tmpdir() -> TempDir {
|
||||
let tmpdir = assert_fs::TempDir::new().expect("Couldn't create a temp dir for tests");
|
||||
for file in FILES {
|
||||
if *file == BIN_FILE {
|
||||
tmpdir.child(file).write_binary(b"bin\0\x00123").unwrap();
|
||||
} else {
|
||||
tmpdir
|
||||
.child(file)
|
||||
.write_str(&format!("This is {file}"))
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
for directory in DIRECTORIES {
|
||||
if *directory == DIR_ASSETS {
|
||||
tmpdir
|
||||
.child(format!("{}{}", directory, "index.html"))
|
||||
.write_str("__ASSETS_PREFIX__index.js;<template id=\"index-data\">__INDEX_DATA__</template>")
|
||||
.unwrap();
|
||||
} else {
|
||||
for file in FILES {
|
||||
if *directory == DIR_NO_INDEX && *file == "index.html" {
|
||||
continue;
|
||||
}
|
||||
if *file == BIN_FILE {
|
||||
tmpdir
|
||||
.child(format!("{directory}{file}"))
|
||||
.write_binary(b"bin\0\x00123")
|
||||
.unwrap();
|
||||
} else {
|
||||
tmpdir
|
||||
.child(format!("{directory}{file}"))
|
||||
.write_str(&format!("This is {directory}{file}"))
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
tmpdir.child("dir4/hidden").touch().unwrap();
|
||||
tmpdir
|
||||
.child("content-types/bin.tar")
|
||||
.write_binary(b"\x7f\x45\x4c\x46\x02\x01\x00\x00")
|
||||
.unwrap();
|
||||
tmpdir
|
||||
.child("content-types/bin")
|
||||
.write_binary(b"\x7f\x45\x4c\x46\x02\x01\x00\x00")
|
||||
.unwrap();
|
||||
tmpdir
|
||||
.child("content-types/file-utf8.txt")
|
||||
.write_str("世界")
|
||||
.unwrap();
|
||||
tmpdir
|
||||
.child("content-types/file-gbk.txt")
|
||||
.write_binary(b"\xca\xc0\xbd\xe7")
|
||||
.unwrap();
|
||||
tmpdir
|
||||
.child("content-types/file")
|
||||
.write_str("世界")
|
||||
.unwrap();
|
||||
|
||||
tmpdir
|
||||
}
|
||||
|
||||
/// Get a free port.
|
||||
#[fixture]
|
||||
#[allow(dead_code)]
|
||||
pub fn port() -> u16 {
|
||||
free_local_port().expect("Couldn't find a free local port")
|
||||
}
|
||||
|
||||
/// Run dufs as a server; Start with a temporary directory, a free port and some
|
||||
/// optional arguments then wait for a while for the server setup to complete.
|
||||
#[fixture]
|
||||
#[allow(dead_code)]
|
||||
pub fn server<I>(#[default(&[] as &[&str])] args: I) -> TestServer
|
||||
where
|
||||
I: IntoIterator + Clone,
|
||||
I::Item: AsRef<std::ffi::OsStr>,
|
||||
{
|
||||
let port = port();
|
||||
let tmpdir = tmpdir();
|
||||
let child = Command::new(assert_cmd::cargo::cargo_bin!())
|
||||
.arg(tmpdir.path())
|
||||
.arg("-p")
|
||||
.arg(port.to_string())
|
||||
.args(args.clone())
|
||||
.stdout(Stdio::null())
|
||||
.spawn()
|
||||
.expect("Couldn't run test binary");
|
||||
let is_tls = args
|
||||
.into_iter()
|
||||
.any(|x| x.as_ref().to_str().unwrap().contains("tls"));
|
||||
|
||||
wait_for_port(port);
|
||||
TestServer::new(port, tmpdir, child, is_tls)
|
||||
}
|
||||
|
||||
/// Wait a max of 2s for the port to become available.
|
||||
pub fn wait_for_port(port: u16) {
|
||||
let start_wait = Instant::now();
|
||||
|
||||
while !port_check::is_port_reachable(format!("localhost:{port}")) {
|
||||
sleep(Duration::from_millis(250));
|
||||
|
||||
if start_wait.elapsed().as_secs() > 2 {
|
||||
panic!("timeout waiting for port {port}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub struct TestServer {
|
||||
port: u16,
|
||||
tmpdir: TempDir,
|
||||
child: Child,
|
||||
is_tls: bool,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl TestServer {
|
||||
pub fn new(port: u16, tmpdir: TempDir, child: Child, is_tls: bool) -> Self {
|
||||
Self {
|
||||
port,
|
||||
tmpdir,
|
||||
child,
|
||||
is_tls,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn url(&self) -> Url {
|
||||
let protocol = if self.is_tls { "https" } else { "http" };
|
||||
Url::parse(&format!("{}://localhost:{}", protocol, self.port)).unwrap()
|
||||
}
|
||||
|
||||
pub fn path(&self) -> &std::path::Path {
|
||||
self.tmpdir.path()
|
||||
}
|
||||
|
||||
pub fn port(&self) -> u16 {
|
||||
self.port
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for TestServer {
|
||||
fn drop(&mut self) {
|
||||
self.child.kill().expect("Couldn't kill test server");
|
||||
self.child.wait().unwrap();
|
||||
}
|
||||
}
|
||||
31
tests/health.rs
Normal file
31
tests/health.rs
Normal file
@@ -0,0 +1,31 @@
|
||||
mod fixtures;
|
||||
mod utils;
|
||||
|
||||
use fixtures::{server, Error, TestServer};
|
||||
use rstest::rstest;
|
||||
|
||||
const HEALTH_CHECK_PATH: &str = "__dufs__/health";
|
||||
const HEALTH_CHECK_RESPONSE: &str = r#"{"status":"OK"}"#;
|
||||
|
||||
#[rstest]
|
||||
fn normal_health(server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}{HEALTH_CHECK_PATH}", server.url()))?;
|
||||
assert_eq!(resp.text()?, HEALTH_CHECK_RESPONSE);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn auth_health(
|
||||
#[with(&["--auth", "user:pass@/:rw", "-A"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}{HEALTH_CHECK_PATH}", server.url()))?;
|
||||
assert_eq!(resp.text()?, HEALTH_CHECK_RESPONSE);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn path_prefix_health(#[with(&["--path-prefix", "xyz"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}xyz/{HEALTH_CHECK_PATH}", server.url()))?;
|
||||
assert_eq!(resp.text()?, HEALTH_CHECK_RESPONSE);
|
||||
Ok(())
|
||||
}
|
||||
72
tests/hidden.rs
Normal file
72
tests/hidden.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
mod fixtures;
|
||||
mod utils;
|
||||
|
||||
use fixtures::{server, Error, TestServer};
|
||||
use rstest::rstest;
|
||||
|
||||
#[rstest]
|
||||
#[case(server(&[] as &[&str]), true)]
|
||||
#[case(server(&["--hidden", ".git,index.html"]), false)]
|
||||
fn hidden_get_dir(#[case] server: TestServer, #[case] exist: bool) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(server.url())?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let paths = utils::retrieve_index_paths(&resp.text()?);
|
||||
assert!(paths.contains("dir1/"));
|
||||
assert_eq!(paths.contains(".git/"), exist);
|
||||
assert_eq!(paths.contains("index.html"), exist);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case(server(&[] as &[&str]), true)]
|
||||
#[case(server(&["--hidden", "*.html"]), false)]
|
||||
fn hidden_get_dir2(#[case] server: TestServer, #[case] exist: bool) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(server.url())?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let paths = utils::retrieve_index_paths(&resp.text()?);
|
||||
assert!(paths.contains("dir1/"));
|
||||
assert_eq!(paths.contains("index.html"), exist);
|
||||
assert_eq!(paths.contains("test.html"), exist);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case(server(&[] as &[&str]), true)]
|
||||
#[case(server(&["--hidden", ".git,index.html"]), false)]
|
||||
fn hidden_propfind_dir(#[case] server: TestServer, #[case] exist: bool) -> Result<(), Error> {
|
||||
let resp = fetch!(b"PROPFIND", server.url()).send()?;
|
||||
assert_eq!(resp.status(), 207);
|
||||
let body = resp.text()?;
|
||||
assert!(body.contains("<D:href>/dir1/</D:href>"));
|
||||
assert_eq!(body.contains("<D:href>/.git/</D:href>"), exist);
|
||||
assert_eq!(body.contains("<D:href>/index.html</D:href>"), exist);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case(server(&["--allow-search"] as &[&str]), true)]
|
||||
#[case(server(&["--allow-search", "--hidden", ".git,test.html"]), false)]
|
||||
fn hidden_search_dir(#[case] server: TestServer, #[case] exist: bool) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "test.html"))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let paths = utils::retrieve_index_paths(&resp.text()?);
|
||||
for p in paths {
|
||||
assert_eq!(p.contains("test.html"), exist);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case(server(&["--hidden", "hidden/"]), "dir4/", 1)]
|
||||
#[case(server(&["--hidden", "hidden"]), "dir4/", 0)]
|
||||
fn hidden_dir_only(
|
||||
#[case] server: TestServer,
|
||||
#[case] dir: &str,
|
||||
#[case] count: usize,
|
||||
) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}{}", server.url(), dir))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let paths = utils::retrieve_index_paths(&resp.text()?);
|
||||
assert_eq!(paths.len(), count);
|
||||
Ok(())
|
||||
}
|
||||
383
tests/http.rs
Normal file
383
tests/http.rs
Normal file
@@ -0,0 +1,383 @@
|
||||
mod fixtures;
|
||||
mod utils;
|
||||
|
||||
use fixtures::{server, Error, TestServer, BIN_FILE};
|
||||
use rstest::rstest;
|
||||
use serde_json::Value;
|
||||
use utils::retrieve_edit_file;
|
||||
|
||||
#[rstest]
|
||||
fn get_dir(server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(server.url())?;
|
||||
assert_resp_paths!(resp);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn head_dir(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"HEAD", server.url()).send()?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"text/html; charset=utf-8"
|
||||
);
|
||||
assert_eq!(resp.text()?, "");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn get_dir_404(server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}404/", server.url()))?;
|
||||
assert_eq!(resp.status(), 404);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn head_dir_404(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"HEAD", format!("{}404/", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 404);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case(server(&["--allow-archive"] as &[&str]))]
|
||||
#[case(server(&["--allow-archive", "--compress", "none"]))]
|
||||
#[case(server(&["--allow-archive", "--compress", "low"]))]
|
||||
#[case(server(&["--allow-archive", "--compress", "medium"]))]
|
||||
#[case(server(&["--allow-archive", "--compress", "high"]))]
|
||||
fn get_dir_zip(#[case] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}?zip", server.url()))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"application/zip"
|
||||
);
|
||||
assert!(resp.headers().contains_key("content-disposition"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn get_dir_json(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}?json", server.url()))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"application/json"
|
||||
);
|
||||
let json: Value = serde_json::from_str(&resp.text().unwrap()).unwrap();
|
||||
assert!(json["paths"].as_array().is_some());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn get_dir_simple(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}?simple", server.url()))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"text/html; charset=utf-8"
|
||||
);
|
||||
let text = resp.text().unwrap();
|
||||
assert!(text.split('\n').any(|v| v == "index.html"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn get_dir_noscript(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}?noscript", server.url()))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"text/html; charset=utf-8"
|
||||
);
|
||||
let text = resp.text().unwrap();
|
||||
assert!(text.contains(r#"<td><a href="index.html">index.html</a></td>"#));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn head_dir_zip(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"HEAD", format!("{}?zip", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"application/zip"
|
||||
);
|
||||
assert!(resp.headers().contains_key("content-disposition"));
|
||||
assert_eq!(resp.text()?, "");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn get_dir_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "test.html"))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let paths = utils::retrieve_index_paths(&resp.text()?);
|
||||
assert!(!paths.is_empty());
|
||||
for p in paths {
|
||||
assert!(p.contains("test.html"));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn get_dir_search2(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}?q={BIN_FILE}", server.url()))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let paths = utils::retrieve_index_paths(&resp.text()?);
|
||||
assert!(!paths.is_empty());
|
||||
for p in paths {
|
||||
assert!(p.contains(BIN_FILE));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn get_dir_search3(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}?q={}&simple", server.url(), "test.html"))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let text = resp.text().unwrap();
|
||||
assert!(text.split('\n').any(|v| v == "test.html"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn get_dir_search4(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}dir1?q=dir1&simple", server.url()))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let text = resp.text().unwrap();
|
||||
assert!(text.is_empty());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn head_dir_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"HEAD", format!("{}?q={}", server.url(), "test.html")).send()?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"text/html; charset=utf-8"
|
||||
);
|
||||
assert_eq!(resp.text()?, "");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn empty_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}?q=", server.url()))?;
|
||||
assert_resp_paths!(resp);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn get_file(server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}index.html", server.url()))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"text/html; charset=UTF-8"
|
||||
);
|
||||
assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes");
|
||||
assert!(resp.headers().contains_key("etag"));
|
||||
assert!(resp.headers().contains_key("last-modified"));
|
||||
assert!(resp.headers().contains_key("content-length"));
|
||||
assert_eq!(resp.text()?, "This is index.html");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn head_file(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"HEAD", format!("{}index.html", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"text/html; charset=UTF-8"
|
||||
);
|
||||
assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes");
|
||||
assert!(resp.headers().contains_key("content-disposition"));
|
||||
assert!(resp.headers().contains_key("etag"));
|
||||
assert!(resp.headers().contains_key("last-modified"));
|
||||
assert!(resp.headers().contains_key("content-length"));
|
||||
assert_eq!(resp.text()?, "");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn hash_file(#[with(&["--allow-hash"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}index.html?hash", server.url()))?;
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"text/html; charset=utf-8"
|
||||
);
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(
|
||||
resp.text()?,
|
||||
"c8dd395e3202674b9512f7b7f956e0d96a8ba8f572e785b0d5413ab83766dbc4"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn no_hash_file(server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}index.html?hash", server.url()))?;
|
||||
assert_eq!(resp.status(), 403);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn get_file_404(server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}404", server.url()))?;
|
||||
assert_eq!(resp.status(), 404);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn get_file_emoji_path(server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}{BIN_FILE}", server.url()))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(
|
||||
resp.headers().get("content-disposition").unwrap(),
|
||||
"inline; filename=\"😀.bin\"; filename*=UTF-8''%F0%9F%98%80.bin"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
#[rstest]
|
||||
fn get_file_newline_path(server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}file%0A1.txt", server.url()))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(
|
||||
resp.headers().get("content-disposition").unwrap(),
|
||||
"inline; filename=\"file 1.txt\""
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn get_file_edit(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"GET", format!("{}index.html?edit", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let editable = retrieve_edit_file(&resp.text().unwrap()).unwrap();
|
||||
assert!(editable);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn get_file_edit_bin(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"GET", format!("{}{BIN_FILE}?edit", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let editable = retrieve_edit_file(&resp.text().unwrap()).unwrap();
|
||||
assert!(!editable);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn head_file_404(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"HEAD", format!("{}404", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 404);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn options_dir(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"OPTIONS", format!("{}index.html", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(
|
||||
resp.headers().get("allow").unwrap(),
|
||||
"GET,HEAD,PUT,OPTIONS,DELETE,PATCH,PROPFIND,COPY,MOVE,CHECKAUTH,LOGOUT"
|
||||
);
|
||||
assert_eq!(resp.headers().get("dav").unwrap(), "1, 2, 3");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn put_file(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let url = format!("{}file1", server.url());
|
||||
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
|
||||
assert_eq!(resp.status(), 201);
|
||||
let resp = reqwest::blocking::get(url)?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn put_file_create_dir(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let url = format!("{}xyz/file1", server.url());
|
||||
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
|
||||
assert_eq!(resp.status(), 201);
|
||||
let resp = reqwest::blocking::get(url)?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn put_file_conflict_dir(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let url = format!("{}dir1", server.url());
|
||||
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
|
||||
assert_eq!(resp.status(), 403);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn delete_file(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let url = format!("{}test.html", server.url());
|
||||
let resp = fetch!(b"DELETE", &url).send()?;
|
||||
assert_eq!(resp.status(), 204);
|
||||
let resp = reqwest::blocking::get(url)?;
|
||||
assert_eq!(resp.status(), 404);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn delete_file_404(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"DELETE", format!("{}file1", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 404);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn get_file_content_type(server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}content-types/bin.tar", server.url()))?;
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"application/x-tar"
|
||||
);
|
||||
let resp = reqwest::blocking::get(format!("{}content-types/bin", server.url()))?;
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"application/octet-stream"
|
||||
);
|
||||
let resp = reqwest::blocking::get(format!("{}content-types/file-utf8.txt", server.url()))?;
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"text/plain; charset=UTF-8"
|
||||
);
|
||||
let resp = reqwest::blocking::get(format!("{}content-types/file-gbk.txt", server.url()))?;
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"text/plain; charset=GBK"
|
||||
);
|
||||
let resp = reqwest::blocking::get(format!("{}content-types/file", server.url()))?;
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"text/plain; charset=UTF-8"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn resumable_upload(#[with(&["--allow-upload"])] server: TestServer) -> Result<(), Error> {
|
||||
let url = format!("{}file1", server.url());
|
||||
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
|
||||
assert_eq!(resp.status(), 201);
|
||||
let resp = fetch!(b"PATCH", &url)
|
||||
.header("X-Update-Range", "append")
|
||||
.body(b"123".to_vec())
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 204);
|
||||
let resp = reqwest::blocking::get(url)?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(resp.text().unwrap(), "abc123");
|
||||
Ok(())
|
||||
}
|
||||
80
tests/http_logger.rs
Normal file
80
tests/http_logger.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
mod digest_auth_util;
|
||||
mod fixtures;
|
||||
mod utils;
|
||||
|
||||
use digest_auth_util::send_with_digest_auth;
|
||||
use fixtures::{port, tmpdir, wait_for_port, Error};
|
||||
|
||||
use assert_fs::fixture::TempDir;
|
||||
use rstest::rstest;
|
||||
use std::io::Read;
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
#[rstest]
|
||||
#[case(&["-a", "user:pass@/:rw", "--log-format", "$remote_user"], false)]
|
||||
#[case(&["-a", "user:pass@/:rw", "--log-format", "$remote_user"], true)]
|
||||
fn log_remote_user(
|
||||
tmpdir: TempDir,
|
||||
port: u16,
|
||||
#[case] args: &[&str],
|
||||
#[case] is_basic: bool,
|
||||
) -> Result<(), Error> {
|
||||
let mut child = Command::new(assert_cmd::cargo::cargo_bin!())
|
||||
.arg(tmpdir.path())
|
||||
.arg("-p")
|
||||
.arg(port.to_string())
|
||||
.args(args)
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
wait_for_port(port);
|
||||
|
||||
let stdout = child.stdout.as_mut().expect("Failed to get stdout");
|
||||
|
||||
let req_builder = fetch!(b"GET", &format!("http://localhost:{port}"));
|
||||
|
||||
let resp = if is_basic {
|
||||
req_builder.basic_auth("user", Some("pass")).send()?
|
||||
} else {
|
||||
send_with_digest_auth(req_builder, "user", "pass")?
|
||||
};
|
||||
|
||||
assert_eq!(resp.status(), 200);
|
||||
|
||||
let mut buf = [0; 2048];
|
||||
let buf_len = stdout.read(&mut buf)?;
|
||||
let output = std::str::from_utf8(&buf[0..buf_len])?;
|
||||
|
||||
assert!(output.lines().last().unwrap().ends_with("user"));
|
||||
|
||||
child.kill()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case(&["--log-format", ""])]
|
||||
fn no_log(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> {
|
||||
let mut child = Command::new(assert_cmd::cargo::cargo_bin!())
|
||||
.arg(tmpdir.path())
|
||||
.arg("-p")
|
||||
.arg(port.to_string())
|
||||
.args(args)
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
wait_for_port(port);
|
||||
|
||||
let stdout = child.stdout.as_mut().expect("Failed to get stdout");
|
||||
|
||||
let resp = fetch!(b"GET", &format!("http://localhost:{port}")).send()?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
|
||||
let mut buf = [0; 2048];
|
||||
let buf_len = stdout.read(&mut buf)?;
|
||||
let output = std::str::from_utf8(&buf[0..buf_len])?;
|
||||
|
||||
assert_eq!(output.lines().last().unwrap(), "");
|
||||
|
||||
child.kill()?;
|
||||
Ok(())
|
||||
}
|
||||
106
tests/range.rs
Normal file
106
tests/range.rs
Normal file
@@ -0,0 +1,106 @@
|
||||
mod fixtures;
|
||||
mod utils;
|
||||
|
||||
use fixtures::{server, Error, TestServer};
|
||||
use reqwest::header::{HeaderMap, HeaderName, HeaderValue};
|
||||
use rstest::rstest;
|
||||
|
||||
#[rstest]
|
||||
fn get_file_range(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"GET", format!("{}index.html", server.url()))
|
||||
.header("range", HeaderValue::from_static("bytes=0-6"))
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 206);
|
||||
assert_eq!(resp.headers().get("content-range").unwrap(), "bytes 0-6/18");
|
||||
assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes");
|
||||
assert_eq!(resp.headers().get("content-length").unwrap(), "7");
|
||||
assert_eq!(resp.text()?, "This is");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn get_file_range_beyond(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"GET", format!("{}index.html", server.url()))
|
||||
.header("range", HeaderValue::from_static("bytes=12-20"))
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 416);
|
||||
assert_eq!(resp.headers().get("content-range").unwrap(), "bytes */18");
|
||||
assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes");
|
||||
assert_eq!(resp.headers().get("content-length").unwrap(), "0");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn get_file_range_invalid(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"GET", format!("{}index.html", server.url()))
|
||||
.header("range", HeaderValue::from_static("bytes=20-"))
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 416);
|
||||
assert_eq!(resp.headers().get("content-range").unwrap(), "bytes */18");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_multipart_body<'a>(body: &'a str, boundary: &str) -> Vec<(HeaderMap, &'a str)> {
|
||||
body.split(&format!("--{boundary}"))
|
||||
.filter(|part| !part.is_empty() && *part != "--\r\n")
|
||||
.map(|part| {
|
||||
let (head, body) = part.trim_ascii().split_once("\r\n\r\n").unwrap();
|
||||
let headers = head
|
||||
.split("\r\n")
|
||||
.fold(HeaderMap::new(), |mut headers, header| {
|
||||
let (key, value) = header.split_once(":").unwrap();
|
||||
let key = HeaderName::from_bytes(key.as_bytes()).unwrap();
|
||||
let value = HeaderValue::from_str(value.trim_ascii_start()).unwrap();
|
||||
headers.insert(key, value);
|
||||
headers
|
||||
});
|
||||
(headers, body)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn get_file_multipart_range(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"GET", format!("{}index.html", server.url()))
|
||||
.header("range", HeaderValue::from_static("bytes=0-11, 6-17"))
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 206);
|
||||
assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes");
|
||||
|
||||
let content_type = resp
|
||||
.headers()
|
||||
.get("content-type")
|
||||
.unwrap()
|
||||
.to_str()?
|
||||
.to_string();
|
||||
assert!(content_type.starts_with("multipart/byteranges; boundary="));
|
||||
|
||||
let boundary = content_type.split_once('=').unwrap().1.trim_ascii_start();
|
||||
assert!(!boundary.is_empty());
|
||||
|
||||
let body = resp.text()?;
|
||||
let parts = parse_multipart_body(&body, boundary);
|
||||
assert_eq!(parts.len(), 2);
|
||||
|
||||
let (headers, body) = &parts[0];
|
||||
assert_eq!(headers.get("content-range").unwrap(), "bytes 0-11/18");
|
||||
assert_eq!(*body, "This is inde");
|
||||
|
||||
let (headers, body) = &parts[1];
|
||||
assert_eq!(headers.get("content-range").unwrap(), "bytes 6-17/18");
|
||||
assert_eq!(*body, "s index.html");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn get_file_multipart_range_invalid(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"GET", format!("{}index.html", server.url()))
|
||||
.header("range", HeaderValue::from_static("bytes=0-6, 20-30"))
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 416);
|
||||
assert_eq!(resp.headers().get("content-range").unwrap(), "bytes */18");
|
||||
assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes");
|
||||
assert_eq!(resp.headers().get("content-length").unwrap(), "0");
|
||||
Ok(())
|
||||
}
|
||||
80
tests/render.rs
Normal file
80
tests/render.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
mod fixtures;
|
||||
mod utils;
|
||||
|
||||
use fixtures::{server, Error, TestServer, BIN_FILE, DIR_NO_FOUND, DIR_NO_INDEX, FILES};
|
||||
use rstest::rstest;
|
||||
|
||||
#[rstest]
|
||||
fn render_index(#[with(&["--render-index"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(server.url())?;
|
||||
let text = resp.text()?;
|
||||
assert_eq!(text, "This is index.html");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn render_index2(#[with(&["--render-index"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}{}", server.url(), DIR_NO_INDEX))?;
|
||||
assert_eq!(resp.status(), 404);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn render_try_index(#[with(&["--render-try-index"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(server.url())?;
|
||||
let text = resp.text()?;
|
||||
assert_eq!(text, "This is index.html");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn render_try_index2(#[with(&["--render-try-index"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}{}", server.url(), DIR_NO_INDEX))?;
|
||||
let files: Vec<&str> = FILES
|
||||
.iter()
|
||||
.filter(|v| **v != "index.html")
|
||||
.cloned()
|
||||
.collect();
|
||||
assert_resp_paths!(resp, files);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn render_try_index3(
|
||||
#[with(&["--render-try-index", "--allow-archive"])] server: TestServer,
|
||||
) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}{}?zip", server.url(), DIR_NO_INDEX))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
assert_eq!(
|
||||
resp.headers().get("content-type").unwrap(),
|
||||
"application/zip"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case(server(&["--render-try-index"] as &[&str]), false)]
|
||||
#[case(server(&["--render-try-index", "--allow-search"] as &[&str]), true)]
|
||||
fn render_try_index4(#[case] server: TestServer, #[case] searched: bool) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}{}?q={}", server.url(), DIR_NO_INDEX, BIN_FILE))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let paths = utils::retrieve_index_paths(&resp.text()?);
|
||||
assert_eq!(paths.iter().all(|v| v.contains(BIN_FILE)), searched);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn render_spa(#[with(&["--render-spa"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(server.url())?;
|
||||
let text = resp.text()?;
|
||||
assert_eq!(text, "This is index.html");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn render_spa2(#[with(&["--render-spa"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = reqwest::blocking::get(format!("{}{}", server.url(), DIR_NO_FOUND))?;
|
||||
let text = resp.text()?;
|
||||
assert_eq!(text, "This is index.html");
|
||||
Ok(())
|
||||
}
|
||||
59
tests/single_file.rs
Normal file
59
tests/single_file.rs
Normal file
@@ -0,0 +1,59 @@
|
||||
//! Run file server with different args
|
||||
|
||||
mod fixtures;
|
||||
mod utils;
|
||||
|
||||
use assert_fs::fixture::TempDir;
|
||||
use fixtures::{port, tmpdir, wait_for_port, Error};
|
||||
use rstest::rstest;
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
#[rstest]
|
||||
#[case("index.html")]
|
||||
fn single_file(tmpdir: TempDir, port: u16, #[case] file: &str) -> Result<(), Error> {
|
||||
let mut child = Command::new(assert_cmd::cargo::cargo_bin!())
|
||||
.arg(tmpdir.path().join(file))
|
||||
.arg("-p")
|
||||
.arg(port.to_string())
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
wait_for_port(port);
|
||||
|
||||
let resp = reqwest::blocking::get(format!("http://localhost:{port}"))?;
|
||||
assert_eq!(resp.text()?, "This is index.html");
|
||||
let resp = reqwest::blocking::get(format!("http://localhost:{port}/"))?;
|
||||
assert_eq!(resp.text()?, "This is index.html");
|
||||
let resp = reqwest::blocking::get(format!("http://localhost:{port}/index.html"))?;
|
||||
assert_eq!(resp.text()?, "This is index.html");
|
||||
|
||||
child.kill()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case("index.html")]
|
||||
fn path_prefix_single_file(tmpdir: TempDir, port: u16, #[case] file: &str) -> Result<(), Error> {
|
||||
let mut child = Command::new(assert_cmd::cargo::cargo_bin!())
|
||||
.arg(tmpdir.path().join(file))
|
||||
.arg("-p")
|
||||
.arg(port.to_string())
|
||||
.arg("--path-prefix")
|
||||
.arg("xyz")
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
wait_for_port(port);
|
||||
|
||||
let resp = reqwest::blocking::get(format!("http://localhost:{port}/xyz"))?;
|
||||
assert_eq!(resp.text()?, "This is index.html");
|
||||
let resp = reqwest::blocking::get(format!("http://localhost:{port}/xyz/"))?;
|
||||
assert_eq!(resp.text()?, "This is index.html");
|
||||
let resp = reqwest::blocking::get(format!("http://localhost:{port}/xyz/index.html"))?;
|
||||
assert_eq!(resp.text()?, "This is index.html");
|
||||
let resp = reqwest::blocking::get(format!("http://localhost:{port}"))?;
|
||||
assert_eq!(resp.status(), 400);
|
||||
|
||||
child.kill()?;
|
||||
Ok(())
|
||||
}
|
||||
29
tests/sort.rs
Normal file
29
tests/sort.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
mod fixtures;
|
||||
mod utils;
|
||||
|
||||
use fixtures::{server, Error, TestServer};
|
||||
use rstest::rstest;
|
||||
|
||||
#[rstest]
|
||||
fn ls_dir_sort_by_name(server: TestServer) -> Result<(), Error> {
|
||||
let url = server.url();
|
||||
let resp = reqwest::blocking::get(format!("{url}?sort=name&order=asc"))?;
|
||||
let paths1 = self::utils::retrieve_index_paths(&resp.text()?);
|
||||
let resp = reqwest::blocking::get(format!("{url}?sort=name&order=desc"))?;
|
||||
let mut paths2 = self::utils::retrieve_index_paths(&resp.text()?);
|
||||
paths2.reverse();
|
||||
assert_eq!(paths1, paths2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn search_dir_sort_by_name(server: TestServer) -> Result<(), Error> {
|
||||
let url = server.url();
|
||||
let resp = reqwest::blocking::get(format!("{url}?q=test.html&sort=name&order=asc"))?;
|
||||
let paths1 = self::utils::retrieve_index_paths(&resp.text()?);
|
||||
let resp = reqwest::blocking::get(format!("{url}?q=test.html&sort=name&order=desc"))?;
|
||||
let mut paths2 = self::utils::retrieve_index_paths(&resp.text()?);
|
||||
paths2.reverse();
|
||||
assert_eq!(paths1, paths2);
|
||||
Ok(())
|
||||
}
|
||||
46
tests/symlink.rs
Normal file
46
tests/symlink.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
mod fixtures;
|
||||
mod utils;
|
||||
|
||||
use assert_fs::fixture::TempDir;
|
||||
use fixtures::{server, tmpdir, Error, TestServer};
|
||||
use rstest::rstest;
|
||||
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::symlink as symlink_dir;
|
||||
#[cfg(windows)]
|
||||
use std::os::windows::fs::symlink_dir;
|
||||
|
||||
#[rstest]
|
||||
fn default_not_allow_symlink(server: TestServer, tmpdir: TempDir) -> Result<(), Error> {
|
||||
// Create symlink directory "foo" to point outside the root
|
||||
let dir = "foo";
|
||||
symlink_dir(tmpdir.path(), server.path().join(dir)).expect("Couldn't create symlink");
|
||||
let resp = reqwest::blocking::get(format!("{}{}", server.url(), dir))?;
|
||||
assert_eq!(resp.status(), 404);
|
||||
let resp = reqwest::blocking::get(format!("{}{}/index.html", server.url(), dir))?;
|
||||
assert_eq!(resp.status(), 404);
|
||||
let resp = reqwest::blocking::get(server.url())?;
|
||||
let paths = utils::retrieve_index_paths(&resp.text()?);
|
||||
assert!(!paths.is_empty());
|
||||
assert!(!paths.contains(&format!("{dir}/")));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn allow_symlink(
|
||||
#[with(&["--allow-symlink"])] server: TestServer,
|
||||
tmpdir: TempDir,
|
||||
) -> Result<(), Error> {
|
||||
// Create symlink directory "foo" to point outside the root
|
||||
let dir = "foo";
|
||||
symlink_dir(tmpdir.path(), server.path().join(dir)).expect("Couldn't create symlink");
|
||||
let resp = reqwest::blocking::get(format!("{}{}", server.url(), dir))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let resp = reqwest::blocking::get(format!("{}{}/index.html", server.url(), dir))?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let resp = reqwest::blocking::get(server.url())?;
|
||||
let paths = utils::retrieve_index_paths(&resp.text()?);
|
||||
assert!(!paths.is_empty());
|
||||
assert!(paths.contains(&format!("{dir}/")));
|
||||
Ok(())
|
||||
}
|
||||
72
tests/tls.rs
Normal file
72
tests/tls.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
mod fixtures;
|
||||
mod utils;
|
||||
|
||||
use fixtures::{server, Error, TestServer};
|
||||
use predicates::str::contains;
|
||||
use reqwest::blocking::ClientBuilder;
|
||||
use rstest::rstest;
|
||||
|
||||
use crate::fixtures::port;
|
||||
|
||||
/// Can start the server with TLS and receive encrypted responses.
|
||||
#[rstest]
|
||||
#[case(server(&[
|
||||
"--tls-cert", "tests/data/cert.pem",
|
||||
"--tls-key", "tests/data/key_pkcs8.pem",
|
||||
]))]
|
||||
#[case(server(&[
|
||||
"--tls-cert", "tests/data/cert.pem",
|
||||
"--tls-key", "tests/data/key_pkcs1.pem",
|
||||
]))]
|
||||
#[case(server(&[
|
||||
"--tls-cert", "tests/data/cert_ecdsa.pem",
|
||||
"--tls-key", "tests/data/key_ecdsa.pem",
|
||||
]))]
|
||||
fn tls_works(#[case] server: TestServer) -> Result<(), Error> {
|
||||
let client = ClientBuilder::new()
|
||||
.tls_danger_accept_invalid_certs(true)
|
||||
.build()?;
|
||||
let resp = client.get(server.url()).send()?.error_for_status()?;
|
||||
assert_resp_paths!(resp);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Wrong path for cert throws error.
|
||||
#[rstest]
|
||||
fn wrong_path_cert() -> Result<(), Error> {
|
||||
let port = port().to_string();
|
||||
assert_cmd::cargo::cargo_bin_cmd!()
|
||||
.args([
|
||||
"--tls-cert",
|
||||
"wrong",
|
||||
"--tls-key",
|
||||
"tests/data/key.pem",
|
||||
"--port",
|
||||
&port,
|
||||
])
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr(contains("Failed to load cert file at `wrong`"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Wrong paths for key throws errors.
|
||||
#[rstest]
|
||||
fn wrong_path_key() -> Result<(), Error> {
|
||||
let port = port().to_string();
|
||||
assert_cmd::cargo::cargo_bin_cmd!()
|
||||
.args([
|
||||
"--tls-cert",
|
||||
"tests/data/cert.pem",
|
||||
"--tls-key",
|
||||
"wrong",
|
||||
"--port",
|
||||
&port,
|
||||
])
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr(contains("Failed to load key file at `wrong`"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
82
tests/utils.rs
Normal file
82
tests/utils.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
use base64::{engine::general_purpose::STANDARD, Engine as _};
|
||||
use indexmap::IndexSet;
|
||||
use serde_json::Value;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! assert_resp_paths {
|
||||
($resp:ident) => {
|
||||
assert_resp_paths!($resp, self::fixtures::FILES)
|
||||
};
|
||||
($resp:ident, $files:expr) => {
|
||||
assert_eq!($resp.status(), 200);
|
||||
let body = $resp.text()?;
|
||||
let paths = self::utils::retrieve_index_paths(&body);
|
||||
assert!(!paths.is_empty());
|
||||
for file in $files {
|
||||
assert!(paths.contains(&file.to_string()));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! fetch {
|
||||
($method:literal, $url:expr) => {
|
||||
reqwest::blocking::Client::new().request(reqwest::Method::from_bytes($method)?, $url)
|
||||
};
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn retrieve_index_paths(content: &str) -> IndexSet<String> {
|
||||
let value = retrieve_json(content).unwrap();
|
||||
let paths = value
|
||||
.get("paths")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.flat_map(|v| {
|
||||
let name = v.get("name")?.as_str()?;
|
||||
let path_type = v.get("path_type")?.as_str()?;
|
||||
if path_type.ends_with("Dir") {
|
||||
Some(format!("{name}/"))
|
||||
} else {
|
||||
Some(name.to_owned())
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
paths
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn retrieve_edit_file(content: &str) -> Option<bool> {
|
||||
let value = retrieve_json(content).unwrap();
|
||||
let value = value.get("editable").unwrap();
|
||||
Some(value.as_bool().unwrap())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn encode_uri(v: &str) -> String {
|
||||
let parts: Vec<_> = v.split('/').map(urlencoding::encode).collect();
|
||||
parts.join("/")
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn retrieve_json(content: &str) -> Option<Value> {
|
||||
let lines: Vec<&str> = content.lines().collect();
|
||||
let start_tag = "<template id=\"index-data\">";
|
||||
let end_tag = "</template>";
|
||||
|
||||
let line = lines.iter().find(|v| v.contains(start_tag))?;
|
||||
|
||||
let start_index = line.find(start_tag)?;
|
||||
let start_content_index = start_index + start_tag.len();
|
||||
|
||||
let end_index = line[start_content_index..].find(end_tag)?;
|
||||
let end_content_index = start_content_index + end_index;
|
||||
|
||||
let value = &line[start_content_index..end_content_index];
|
||||
let value = STANDARD.decode(value).ok()?;
|
||||
let value = serde_json::from_slice(&value).ok()?;
|
||||
|
||||
Some(value)
|
||||
}
|
||||
228
tests/webdav.rs
Normal file
228
tests/webdav.rs
Normal file
@@ -0,0 +1,228 @@
|
||||
mod fixtures;
|
||||
mod utils;
|
||||
|
||||
use fixtures::{server, Error, TestServer, FILES};
|
||||
use rstest::rstest;
|
||||
use xml::escape::escape_str_pcdata;
|
||||
|
||||
#[rstest]
|
||||
fn propfind_dir(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"PROPFIND", format!("{}dir1", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 207);
|
||||
let body = resp.text()?;
|
||||
assert!(body.contains("<D:href>/dir1/</D:href>"));
|
||||
assert!(body.contains("<D:displayname>dir1</D:displayname>"));
|
||||
for f in FILES {
|
||||
assert!(body.contains(&format!("<D:href>/dir1/{}</D:href>", utils::encode_uri(f))));
|
||||
assert!(body.contains(&format!(
|
||||
"<D:displayname>{}</D:displayname>",
|
||||
escape_str_pcdata(f)
|
||||
)));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn propfind_dir_depth0(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"PROPFIND", format!("{}dir1", server.url()))
|
||||
.header("depth", "0")
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 207);
|
||||
let body = resp.text()?;
|
||||
assert!(body.contains("<D:href>/dir1/</D:href>"));
|
||||
assert!(body.contains("<D:displayname>dir1</D:displayname>"));
|
||||
assert_eq!(
|
||||
body.lines()
|
||||
.filter(|v| *v == "<D:status>HTTP/1.1 200 OK</D:status>")
|
||||
.count(),
|
||||
1
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn propfind_dir_depth2(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"PROPFIND", format!("{}dir1", server.url()))
|
||||
.header("depth", "2")
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 400);
|
||||
let body = resp.text()?;
|
||||
assert_eq!(body, "Invalid depth: only 0 and 1 are allowed.");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn propfind_404(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"PROPFIND", format!("{}404", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 404);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn propfind_double_slash(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"PROPFIND", server.url()).send()?;
|
||||
assert_eq!(resp.status(), 207);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn propfind_file(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"PROPFIND", format!("{}test.html", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 207);
|
||||
let body = resp.text()?;
|
||||
assert!(body.contains("<D:href>/test.html</D:href>"));
|
||||
assert!(body.contains("<D:displayname>test.html</D:displayname>"));
|
||||
assert_eq!(
|
||||
body.lines()
|
||||
.filter(|v| *v == "<D:status>HTTP/1.1 200 OK</D:status>")
|
||||
.count(),
|
||||
1
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn proppatch_file(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"PROPPATCH", format!("{}test.html", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 207);
|
||||
let body = resp.text()?;
|
||||
assert!(body.contains("<D:href>/test.html</D:href>"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn proppatch_404(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"PROPPATCH", format!("{}404", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 404);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn mkcol_dir(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"MKCOL", format!("{}newdir", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 201);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn mkcol_not_allow_upload(server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"MKCOL", format!("{}newdir", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 403);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn mkcol_already_exists(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"MKCOL", format!("{}dir1", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 405);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn copy_file(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let new_url = format!("{}test2.html", server.url());
|
||||
let resp = fetch!(b"COPY", format!("{}test.html", server.url()))
|
||||
.header("Destination", &new_url)
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 204);
|
||||
let resp = reqwest::blocking::get(new_url)?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn copy_not_allow_upload(server: TestServer) -> Result<(), Error> {
|
||||
let new_url = format!("{}test2.html", server.url());
|
||||
let resp = fetch!(b"COPY", format!("{}test.html", server.url()))
|
||||
.header("Destination", &new_url)
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 403);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn copy_file_404(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let new_url = format!("{}test2.html", server.url());
|
||||
let resp = fetch!(b"COPY", format!("{}404", server.url()))
|
||||
.header("Destination", &new_url)
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 404);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn move_file(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let origin_url = format!("{}test.html", server.url());
|
||||
let new_url = format!("{}test2.html", server.url());
|
||||
let resp = fetch!(b"MOVE", &origin_url)
|
||||
.header("Destination", &new_url)
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 204);
|
||||
let resp = reqwest::blocking::get(new_url)?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let resp = reqwest::blocking::get(origin_url)?;
|
||||
assert_eq!(resp.status(), 404);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn move_not_allow_upload(#[with(&["--allow-delete"])] server: TestServer) -> Result<(), Error> {
|
||||
let origin_url = format!("{}test.html", server.url());
|
||||
let new_url = format!("{}test2.html", server.url());
|
||||
let resp = fetch!(b"MOVE", &origin_url)
|
||||
.header("Destination", &new_url)
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 403);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn move_not_allow_delete(#[with(&["--allow-upload"])] server: TestServer) -> Result<(), Error> {
|
||||
let origin_url = format!("{}test.html", server.url());
|
||||
let new_url = format!("{}test2.html", server.url());
|
||||
let resp = fetch!(b"MOVE", &origin_url)
|
||||
.header("Destination", &new_url)
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 403);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn move_file_404(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let new_url = format!("{}test2.html", server.url());
|
||||
let resp = fetch!(b"MOVE", format!("{}404", server.url()))
|
||||
.header("Destination", &new_url)
|
||||
.send()?;
|
||||
assert_eq!(resp.status(), 404);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn lock_file(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"LOCK", format!("{}test.html", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
let body = resp.text()?;
|
||||
assert!(body.contains("<D:href>/test.html</D:href>"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn lock_file_404(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"LOCK", format!("{}404", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 404);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn unlock_file(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"LOCK", format!("{}test.html", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 200);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn unlock_file_404(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
|
||||
let resp = fetch!(b"LOCK", format!("{}404", server.url())).send()?;
|
||||
assert_eq!(resp.status(), 404);
|
||||
Ok(())
|
||||
}
|
||||
Reference in New Issue
Block a user