Compare commits

...

95 Commits

Author SHA1 Message Date
sigoden
a118c1348e fix: ensure symlink inside serve root (#670) 2026-02-05 22:08:04 +08:00
sigoden
db7a0530a2 fix: escape filename in ?simple output (#669) 2026-02-05 19:13:10 +08:00
sigoden
bc27c8c479 fix: some search results missing due to broken symlinks (#665) 2026-01-21 12:00:57 +08:00
sigoden
2b2c7bd5f7 feat: add option --allow-hash to allow/disallow file hashing (#657) 2026-01-09 16:43:18 +08:00
sigoden
ca18df1a36 refactor: improve UI botton titles (#656) 2026-01-09 16:21:40 +08:00
sigoden
7cfb97dfdf refactor: update deps (#655) 2026-01-08 12:02:41 +08:00
sigoden
23619033ae chore: release v0.45.0 (#626) 2025-09-04 18:10:54 +08:00
sigoden
db75ba4357 fix: verify token length (#627) 2025-09-02 18:48:17 +08:00
sigoden
4016715187 fix: login btn does not work for readonly annoymous (#620) 2025-08-19 08:58:59 +08:00
sigoden
f8a7873582 fix: perms on dufs -A -a @/:ro (#619) 2025-08-19 07:51:52 +08:00
sigoden
7f8269881d feat: log decoded uri (#615) 2025-08-18 09:00:18 +08:00
sigoden
b2f244a4cf feat: make dir urls inherit ?noscript params (#614) 2025-08-16 07:36:19 +08:00
sigoden
6cc8a18a3d chore: release v0.44.0 (#606) 2025-08-14 09:05:35 +08:00
Matthias Möller
a387d727b2 chore: removes clippy warning with rust 1.89 (#609) 2025-08-08 06:50:30 +08:00
sigoden
19d65a5aa4 refactor: fix typos (#605) 2025-08-02 17:04:20 +08:00
sigoden
d37762d2b9 refactor: update deps (#604) 2025-08-02 16:56:07 +08:00
sigoden
9c9fca75d3 feat: support downloading via token auth (#603) 2025-08-02 14:37:49 +08:00
sigoden
089d30c5a5 feat: support noscript fallback (#602) 2025-08-02 09:50:00 +08:00
Matthias Möller
459a4d4f4a refactor: removes clippy warnings (#601) 2025-07-30 18:33:00 +08:00
sigoden
f8b69f4df8 fix: unexpected public auth asking for login info (#583) 2025-05-12 08:03:23 +08:00
Matthias Möller
53f064c73b fix: incorrect seperator for zip archives under windows (#577) 2025-04-25 08:14:21 +08:00
Falko Galperin
8a92a0cf1a fix: follow symlinks when searching/archiving (#572)
Specifically, this will always follow symlinks when they lead to a path
below the dufs root, and will follow other symlinks when
`--allow-symlink` is set.

I refactored some common functionality out of `zip_dir` and
`handle_search_dir` as well.
2025-04-12 09:49:19 +08:00
sigoden
59685da06e fix: webui formatDirSize (#568) 2025-04-07 07:36:49 +08:00
sigoden
09200860b4 chore: update deps and clippy (#569) 2025-04-07 07:27:43 +08:00
sigoden
4fbdec2878 feat: tolerate the absence of mtime (#559) 2025-03-20 08:46:26 +08:00
sigoden
d0453b7591 feat: limit sub directory item counting (#556) 2025-03-14 08:53:11 +08:00
45gfg9
eda9769b2a feat: support multipart ranges (#535) 2025-02-01 08:28:34 +08:00
sigoden
d255f1376a fix: incorrect dir size due to hidden files (#529) 2025-01-18 07:20:34 +08:00
sigoden
669c4f8811 feat: add cache-control:no-cache while sending file and index (#528) 2025-01-17 21:45:41 +08:00
sigoden
e576ddcbea feat: higher perm auth path shadows lower one (#521)
In `/:rw;/path1:ro`, the `/:rw` have higher perms, it shadow `/path1:ro`, make `/path1` granted read-write perms.
2025-01-02 09:00:28 +08:00
sigoden
af95ea1cd7 fix: webui can't handle hash property of URL well (#515) 2024-12-28 09:53:59 +08:00
sigoden
cbc620481d refactor: change description for --allow-archive (#511) 2024-12-24 18:58:03 +08:00
sigoden
f1c9776962 chore: update readme 2024-12-20 09:52:51 +08:00
sigoden
ac15ae4e8e Merge pull request #497 from sigoden/fix 2024-12-11 09:04:58 +08:00
sigoden
ab4ef06cb8 fix: no authentication check if no auth users 2024-12-11 08:57:30 +08:00
sigoden
bc6c573acb chore: adjust timeout for wait_for_port 2024-11-16 18:26:10 +08:00
sigoden
f27f9e997f chore: update readme about hashed password 2024-11-16 17:11:42 +08:00
sigoden
835438fc2a chore: release v0.43.0 (#476) 2024-11-05 17:08:43 +08:00
sigoden
d445b78f96 feat: provide healthcheck API (#474) 2024-10-28 07:37:21 +08:00
sigoden
881a67e1a4 feat: support binding abstract unix socket (#468) 2024-10-23 06:57:45 +08:00
sigoden
bb5a5564b4 feat: webui displays subdirectory items (#457) 2024-09-25 22:19:25 +08:00
clxcore
2cf6d39032 fix: resolve speed bottleneck in 10G network (#451)
Co-authored-by: clxcore <clxcore@gmail.com>
2024-09-07 17:57:05 +08:00
sigoden
c500ce7acc fix: auth failed if password contains : (#449) 2024-09-06 21:22:28 +08:00
freedit-dev
f87c52fda2 refactor: do not show size for Dir (#447) 2024-09-06 07:36:15 +08:00
sigoden
2c5cc60965 chore: release v0.42.0 (#446) 2024-09-02 11:57:13 +08:00
sigoden
972cf2377f chore: improve bug_report issue template (#444) 2024-08-28 09:40:10 +08:00
sigoden
5b338c40da refactor: make logout works on safari (#442) 2024-08-27 16:07:17 +08:00
sigoden
964bf61c37 refactor: improve logout at asserts/index.js (#440) 2024-08-24 15:52:29 +08:00
sigoden
4bf92cc47a feat: webui support logout (#439) 2024-08-24 15:38:13 +08:00
sigoden
7d17d9c415 fix: login successed but popup Forbidden (#437) 2024-08-22 08:52:50 +08:00
sigoden
1db263efae refactor: some query params work as flag and must not accept a value (#431) 2024-08-14 08:57:46 +08:00
sigoden
5d26103ea2 fix: webui unexpected save-btn when file is non-editable (#429) 2024-08-07 20:38:12 +08:00
Matthias Möller
3727dec115 refactor: date formatting in cache tests (#428) 2024-08-07 20:18:30 +08:00
sigoden
0311c9fb90 chore: update deps 2024-07-24 02:01:02 +00:00
sigoden
e9ce4b2dc3 chore: fix typos 2024-07-24 01:58:55 +00:00
52funny
7aba3fe0b6 fix: garbled characters caused by atob (#422) 2024-07-22 18:10:01 +08:00
sigoden
ca5c3d7c54 feat: base64 index-data to avoid misencoding (#421) 2024-07-22 08:02:32 +08:00
sigoden
ec2b064a9a refactor: remove sabredav-partialupdate from DAV res header (#415) 2024-07-14 08:31:07 +08:00
sigoden
cadea9a3bf chore: update deps 2024-07-11 11:47:05 +00:00
sigoden
3e0e6b2e8a chore: update bug_report issue_template 2024-07-11 11:42:39 +00:00
Matthias Möller
632f7a41bf feat: implements remaining http cache conditionalss (#407)
* implements remaining http conditionals

* computed etag is not optional
2024-06-23 20:25:07 +08:00
sigoden
f1e90686dc refactor: return 400 for propfind request when depth is neither 0 nor 1 (#403) 2024-06-14 22:16:50 +08:00
Need4Swede
dc7a7cbb3f refactor: no inline scripts in HTML (#391)
* Moved 'ready' func call from index.html

Inline script moved to index.js

* Moved <script> out from index.html

* moved inline-styling to css

* minor formatting changes

* changed ratio from const to let

* refactor

* fix tests

---------

Co-authored-by: sigoden <sigoden@gmail.com>
2024-05-31 08:51:59 +08:00
sigoden
ce740b1fb1 chore: release v0.41.0 (#389) 2024-05-22 11:20:24 +08:00
sigoden
1eb69f6806 chore: ui minior refinement 2024-05-22 01:56:06 +00:00
sigoden
5f0369aa39 chore: js format 2024-05-14 09:04:49 +00:00
sigoden
fe2358506d fix: head div overlap main contents when wrap (#386) 2024-05-14 17:04:06 +08:00
sigoden
6b6d69a8ef feat: add log-file option (#383) 2024-05-11 17:13:31 +08:00
sigoden
cb7d417fd3 fix: strange issue that occurs only on Microsoft WebDAV (#382) 2024-05-11 16:18:18 +08:00
sigoden
75f06f749c chore: fix typos and clippy (#379) 2024-05-05 06:23:18 +08:00
sigoden
d0c79a95e5 chore: update issue tempalte for bug report 2024-04-27 04:00:02 +00:00
Qishuai Liu
ffc0991a12 refactor: add fixed-width numerals to date and size on file list page (#378) 2024-04-26 17:34:38 +08:00
sigoden
51f9c87e65 chore: update deps 2024-04-19 01:41:41 +00:00
sigoden
529bb33f0b chore: update ci 2024-04-19 01:39:36 +00:00
sigoden
3d3bb822ee chore: update readme 2024-04-19 01:06:34 +00:00
sigoden
9353b2e759 feat: add api to get the hash of a file (#375) 2024-04-19 08:48:54 +08:00
sigoden
a277698322 chore: update docker 2024-04-07 23:01:59 +00:00
sigoden
0ff2b15c9a refactor: digest_auth related tests (#372) 2024-04-08 06:56:51 +08:00
sigoden
319333cd22 chore: update deps 2024-04-07 21:19:34 +00:00
sigoden
d66c9de8c8 feat: tls handshake timeout (#368) 2024-03-08 10:29:12 +08:00
sigoden
7c0fa3dab7 chore: update deps 2024-03-08 00:52:31 +00:00
sigoden
48066d79e0 chore: fix typo 2024-03-08 00:46:35 +00:00
tobyp
1c41db0c2d fix: timestamp format of getlastmodified in dav xml (#366) 2024-02-22 08:30:01 +08:00
Matthias Möller
76ef7ba0fb chore: removes unnecessary clone (#364) 2024-02-17 20:09:20 +08:00
sigoden
3deac84cc9 chore: add docker pulls badge to readme 2024-02-14 11:54:59 +00:00
sigoden
638b715bc2 chore: release v0.40.0 (#361)
* chore: release v0.40.0

* update deps
2024-02-13 12:05:46 +08:00
sigoden
920b70abc4 refactor: improve resolve_path and handle_assets, abandon guard_path (#360) 2024-02-07 16:27:22 +08:00
sigoden
015713bc6d chore: update deps 2024-02-06 09:32:31 +00:00
sigoden
3c75a9c4cc fix: guard req and destination path (#359) 2024-02-06 17:23:18 +08:00
sigoden
871e8276ff chore: add SECURITY.md 2024-02-05 00:09:25 +00:00
sigoden
f92c8ee91d refactor: improve invalid auth (#356) 2024-01-19 10:25:11 +08:00
sigoden
95eb648411 feat: revert supporting for forbidden permission (#352) 2024-01-17 11:31:26 +08:00
sigoden
3354b1face refactor: do not try to bind ipv6 if no ipv6 (#348) 2024-01-16 09:03:27 +08:00
sigoden
9b348fc945 chore: fix typos 2024-01-15 12:53:59 +00:00
sigoden
e1fabc7349 chore: update readme 2024-01-11 09:07:40 +00:00
39 changed files with 3615 additions and 1709 deletions

View File

@@ -5,14 +5,24 @@ about: Create a report to help us improve
**Problem** **Problem**
<!-- A clear and concise description of what the bug is. --> <!-- Provide a clear and concise description of the bug you're experiencing. What did you expect to happen, and what actually happened? -->
**Configuration**
<!-- Please specify the Dufs command-line arguments or configuration used. -->
<!-- If the issue is related to authentication/permissions, include auth configurations while concealing sensitive information (e.g., passwords). -->
**Log** **Log**
The dufs log is crucial for locating the problem, so please do not omit it. <!-- Attach relevant log outputs that can help diagnose the issue. -->
**Environment:** **Screenshots/Media**
<!-- If applicable, add screenshots or videos that help illustrate the issue, especially for WebUI problems. -->
**Environment Information**
- Dufs version: - Dufs version:
- Browser/Webdav info: - Browser/Webdav info:
- OS info: - OS info:
- Proxy server: e.g. nginx, cloudflare - Proxy server (if any): <!-- e.g. nginx, cloudflare -->

View File

@@ -29,7 +29,7 @@ jobs:
RUSTFLAGS: --deny warnings RUSTFLAGS: --deny warnings
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: Install Rust Toolchain Components - name: Install Rust Toolchain Components
uses: dtolnay/rust-toolchain@stable uses: dtolnay/rust-toolchain@stable

View File

@@ -54,28 +54,13 @@ jobs:
os: ubuntu-latest os: ubuntu-latest
use-cross: true use-cross: true
cargo-flags: "" cargo-flags: ""
- target: mips-unknown-linux-musl
os: ubuntu-latest
use-cross: true
cargo-flags: "--no-default-features"
- target: mipsel-unknown-linux-musl
os: ubuntu-latest
use-cross: true
cargo-flags: "--no-default-features"
- target: mips64-unknown-linux-gnuabi64
os: ubuntu-latest
use-cross: true
cargo-flags: "--no-default-features"
- target: mips64el-unknown-linux-gnuabi64
os: ubuntu-latest
use-cross: true
cargo-flags: "--no-default-features"
runs-on: ${{matrix.os}} runs-on: ${{matrix.os}}
env: env:
BUILD_CMD: cargo BUILD_CMD: cargo
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: Check Tag - name: Check Tag
id: check-tag id: check-tag
@@ -94,20 +79,18 @@ jobs:
uses: dtolnay/rust-toolchain@stable uses: dtolnay/rust-toolchain@stable
with: with:
targets: ${{ matrix.target }} targets: ${{ matrix.target }}
# Since rust 1.72, mips platforms are tier 3
toolchain: 1.71
- name: Install cross - name: Install cross
if: matrix.use-cross if: matrix.use-cross
uses: taiki-e/install-action@v2 uses: taiki-e/install-action@v2
with: with:
tool: cross tool: cross
- name: Overwrite build command env variable - name: Overwrite build command env variable
if: matrix.use-cross if: matrix.use-cross
shell: bash shell: bash
run: echo "BUILD_CMD=cross" >> $GITHUB_ENV run: echo "BUILD_CMD=cross" >> $GITHUB_ENV
- name: Show Version Information (Rust, cargo, GCC) - name: Show Version Information (Rust, cargo, GCC)
shell: bash shell: bash
run: | run: |
@@ -155,14 +138,12 @@ jobs:
fi fi
- name: Publish Archive - name: Publish Archive
uses: softprops/action-gh-release@v1 uses: softprops/action-gh-release@v2
if: ${{ startsWith(github.ref, 'refs/tags/') }} if: ${{ startsWith(github.ref, 'refs/tags/') }}
with: with:
draft: false draft: false
files: ${{ steps.package.outputs.archive }} files: ${{ steps.package.outputs.archive }}
prerelease: ${{ steps.check-tag.outputs.rc == 'true' }} prerelease: ${{ steps.check-tag.outputs.rc == 'true' }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
docker: docker:
name: Publish to Docker Hub name: Publish to Docker Hub
@@ -171,17 +152,18 @@ jobs:
needs: release needs: release
steps: steps:
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v2 uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2 uses: docker/setup-buildx-action@v3
- name: Login to DockerHub - name: Login to DockerHub
uses: docker/login-action@v2 uses: docker/login-action@v3
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ github.repository_owner }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push - name: Build and push
uses: docker/build-push-action@v4 uses: docker/build-push-action@v5
with: with:
file: Dockerfile-release
build-args: | build-args: |
REPO=${{ github.repository }} REPO=${{ github.repository }}
VER=${{ github.ref_name }} VER=${{ github.ref_name }}
@@ -199,7 +181,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: release needs: release
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable - uses: dtolnay/rust-toolchain@stable

View File

@@ -2,6 +2,124 @@
All notable changes to this project will be documented in this file. All notable changes to this project will be documented in this file.
## [0.45.0] - 2025-09-03
### Bug Fixes
- Perms on `dufs -A -a @/:ro` ([#619](https://github.com/sigoden/dufs/issues/619))
- Login btn does not work for readonly anonymous ([#620](https://github.com/sigoden/dufs/issues/620))
- Verify token length ([#627](https://github.com/sigoden/dufs/issues/627))
### Features
- Make dir urls inherit `?noscript` params ([#614](https://github.com/sigoden/dufs/issues/614))
- Log decoded uri ([#615](https://github.com/sigoden/dufs/issues/615))
## [0.44.0] - 2025-08-02
### Bug Fixes
- No authentication check if no auth users ([#497](https://github.com/sigoden/dufs/issues/497))
- Webui can't handle hash property of URL well ([#515](https://github.com/sigoden/dufs/issues/515))
- Incorrect dir size due to hidden files ([#529](https://github.com/sigoden/dufs/issues/529))
- Webui formatDirSize ([#568](https://github.com/sigoden/dufs/issues/568))
- Follow symlinks when searching/archiving ([#572](https://github.com/sigoden/dufs/issues/572))
- Incorrect separator for zip archives under windows ([#577](https://github.com/sigoden/dufs/issues/577))
- Unexpected public auth asking for login info ([#583](https://github.com/sigoden/dufs/issues/583))
### Features
- Higher perm auth path shadows lower one ([#521](https://github.com/sigoden/dufs/issues/521))
- Add cache-control:no-cache while sending file and index ([#528](https://github.com/sigoden/dufs/issues/528))
- Support multipart ranges ([#535](https://github.com/sigoden/dufs/issues/535))
- Limit sub directory item counting ([#556](https://github.com/sigoden/dufs/issues/556))
- Tolerate the absence of mtime ([#559](https://github.com/sigoden/dufs/issues/559))
- Support noscript fallback ([#602](https://github.com/sigoden/dufs/issues/602))
- Support downloading via token auth ([#603](https://github.com/sigoden/dufs/issues/603))
### Refactor
- Change description for `--allow-archive` ([#511](https://github.com/sigoden/dufs/issues/511))
- Removes clippy warnings ([#601](https://github.com/sigoden/dufs/issues/601))
- Update deps ([#604](https://github.com/sigoden/dufs/issues/604))
- Fix typos ([#605](https://github.com/sigoden/dufs/issues/605))
## [0.43.0] - 2024-11-04
### Bug Fixes
- Auth failed if password contains `:` ([#449](https://github.com/sigoden/dufs/issues/449))
- Resolve speed bottleneck in 10G network ([#451](https://github.com/sigoden/dufs/issues/451))
### Features
- Webui displays subdirectory items ([#457](https://github.com/sigoden/dufs/issues/457))
- Support binding abstract unix socket ([#468](https://github.com/sigoden/dufs/issues/468))
- Provide healthcheck API ([#474](https://github.com/sigoden/dufs/issues/474))
### Refactor
- Do not show size for Dir ([#447](https://github.com/sigoden/dufs/issues/447))
## [0.42.0] - 2024-09-01
### Bug Fixes
- Garbled characters caused by atob ([#422](https://github.com/sigoden/dufs/issues/422))
- Webui unexpected save-btn when file is non-editable ([#429](https://github.com/sigoden/dufs/issues/429))
- Login succeeded but popup `Forbidden` ([#437](https://github.com/sigoden/dufs/issues/437))
### Features
- Implements remaining http cache conditionalss ([#407](https://github.com/sigoden/dufs/issues/407))
- Base64 index-data to avoid misencoding ([#421](https://github.com/sigoden/dufs/issues/421))
- Webui support logout ([#439](https://github.com/sigoden/dufs/issues/439))
### Refactor
- No inline scripts in HTML ([#391](https://github.com/sigoden/dufs/issues/391))
- Return 400 for propfind request when depth is neither 0 nor 1 ([#403](https://github.com/sigoden/dufs/issues/403))
- Remove sabredav-partialupdate from DAV res header ([#415](https://github.com/sigoden/dufs/issues/415))
- Date formatting in cache tests ([#428](https://github.com/sigoden/dufs/issues/428))
- Some query params work as flag and must not accept a value ([#431](https://github.com/sigoden/dufs/issues/431))
- Improve logout at asserts/index.js ([#440](https://github.com/sigoden/dufs/issues/440))
- Make logout works on safari ([#442](https://github.com/sigoden/dufs/issues/442))
## [0.41.0] - 2024-05-22
### Bug Fixes
- Timestamp format of getlastmodified in dav xml ([#366](https://github.com/sigoden/dufs/issues/366))
- Strange issue that occurs only on Microsoft WebDAV ([#382](https://github.com/sigoden/dufs/issues/382))
- Head div overlap main contents when wrap ([#386](https://github.com/sigoden/dufs/issues/386))
### Features
- Tls handshake timeout ([#368](https://github.com/sigoden/dufs/issues/368))
- Add api to get the hash of a file ([#375](https://github.com/sigoden/dufs/issues/375))
- Add log-file option ([#383](https://github.com/sigoden/dufs/issues/383))
### Refactor
- Digest_auth related tests ([#372](https://github.com/sigoden/dufs/issues/372))
- Add fixed-width numerals to date and size on file list page ([#378](https://github.com/sigoden/dufs/issues/378))
## [0.40.0] - 2024-02-13
### Bug Fixes
- Guard req and destination path ([#359](https://github.com/sigoden/dufs/issues/359))
### Features
- Revert supporting for forbidden permission ([#352](https://github.com/sigoden/dufs/issues/352))
### Refactor
- Do not try to bind ipv6 if no ipv6 ([#348](https://github.com/sigoden/dufs/issues/348))
- Improve invalid auth ([#356](https://github.com/sigoden/dufs/issues/356))
- Improve resolve_path and handle_assets, abandon guard_path ([#360](https://github.com/sigoden/dufs/issues/360))
## [0.39.0] - 2024-01-11 ## [0.39.0] - 2024-01-11
### Bug Fixes ### Bug Fixes
@@ -69,7 +187,7 @@ All notable changes to this project will be documented in this file.
- Remove one clone on `assets_prefix` ([#270](https://github.com/sigoden/dufs/issues/270)) - Remove one clone on `assets_prefix` ([#270](https://github.com/sigoden/dufs/issues/270))
- Optimize tests - Optimize tests
- Improve code quanity ([#282](https://github.com/sigoden/dufs/issues/282)) - Improve code quality ([#282](https://github.com/sigoden/dufs/issues/282))
## [0.36.0] - 2023-08-24 ## [0.36.0] - 2023-08-24

2413
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "dufs" name = "dufs"
version = "0.39.0" version = "0.45.0"
edition = "2021" edition = "2021"
authors = ["sigoden <sigoden@gmail.com>"] authors = ["sigoden <sigoden@gmail.com>"]
description = "Dufs is a distinctive utility file server" description = "Dufs is a distinctive utility file server"
@@ -11,29 +11,28 @@ categories = ["command-line-utilities", "web-programming::http-server"]
keywords = ["static", "file", "server", "webdav", "cli"] keywords = ["static", "file", "server", "webdav", "cli"]
[dependencies] [dependencies]
clap = { version = "4", features = ["wrap_help", "env"] } clap = { version = "4.5", features = ["wrap_help", "env"] }
clap_complete = "4" clap_complete = "4.5"
chrono = { version = "0.4", default-features = false, features = ["clock"] } chrono = { version = "0.4", default-features = false, features = ["clock"] }
tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util", "signal"]} tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util", "signal", "net"]}
tokio-util = { version = "0.7", features = ["io-util", "compat"] } tokio-util = { version = "0.7", features = ["io-util", "compat"] }
hyper = { version = "1.0", features = ["http1", "server"] } hyper = { version = "1", features = ["http1", "server"] }
percent-encoding = "2.3" percent-encoding = "2.3"
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
serde_json = "1" serde_json = "1"
futures-util = { version = "0.3", default-features = false, features = ["alloc"] } futures-util = { version = "0.3", default-features = false, features = ["alloc"] }
async_zip = { version = "0.0.16", default-features = false, features = ["deflate", "bzip2", "xz", "chrono", "tokio"] } async_zip = { version = "0.0.18", default-features = false, features = ["deflate", "bzip2", "xz", "chrono", "tokio"] }
headers = "0.4" headers = "0.4"
mime_guess = "2.0" mime_guess = "2.0"
if-addrs = "0.11" if-addrs = "0.14"
rustls-pemfile = { version = "2.0", optional = true } tokio-rustls = { version = "0.26", optional = true }
tokio-rustls = { version = "0.25", optional = true } md5 = "0.8"
md5 = "0.7"
lazy_static = "1.4" lazy_static = "1.4"
uuid = { version = "1.4", features = ["v4", "fast-rng"] } uuid = { version = "1.7", features = ["v4", "fast-rng"] }
urlencoding = "2.1" urlencoding = "2.1"
xml-rs = "0.8" xml-rs = "1.0.0"
log = "0.4" log = { version = "0.4", features = ["std"] }
socket2 = "0.5" socket2 = "0.6"
async-stream = "0.3" async-stream = "0.3"
walkdir = "2.3" walkdir = "2.3"
form_urlencoded = "1.2" form_urlencoded = "1.2"
@@ -42,31 +41,34 @@ content_inspector = "0.2"
anyhow = "1.0" anyhow = "1.0"
chardetng = "0.1" chardetng = "0.1"
glob = "0.3" glob = "0.3"
indexmap = "2.0" indexmap = "2.2"
serde_yaml = "0.9" serde_yaml = "0.9"
sha-crypt = "0.5" sha-crypt = "0.5"
base64 = "0.21" base64 = "0.22"
smart-default = "0.7" smart-default = "0.7"
rustls-pki-types = "1.0" rustls-pki-types = "1.2"
hyper-util = { version = "0.1", features = ["server-auto", "tokio"] } hyper-util = { version = "0.1", features = ["server-auto", "tokio"] }
http-body-util = "0.1" http-body-util = "0.1"
bytes = "1.5" bytes = "1.5"
pin-project-lite = "0.2" pin-project-lite = "0.2"
sha2 = "0.10.8"
ed25519-dalek = "2.2.0"
hex = "0.4.3"
[features] [features]
default = ["tls"] default = ["tls"]
tls = ["rustls-pemfile", "tokio-rustls"] tls = ["tokio-rustls"]
[dev-dependencies] [dev-dependencies]
assert_cmd = "2" assert_cmd = "2"
reqwest = { version = "0.11", features = ["blocking", "multipart", "rustls-tls"], default-features = false } reqwest = { version = "0.13", features = ["blocking", "multipart", "rustls"], default-features = false }
assert_fs = "1" assert_fs = "1"
port_check = "0.1" port_check = "0.3"
rstest = "0.18" rstest = "0.26.1"
regex = "1" regex = "1"
url = "2" url = "2"
diqwest = { version = "2.0", features = ["blocking"], default-features = false }
predicates = "3" predicates = "3"
digest_auth = "0.3.1"
[profile.release] [profile.release]
opt-level = 3 opt-level = 3

View File

@@ -1,17 +1,12 @@
FROM alpine as builder FROM --platform=linux/amd64 messense/rust-musl-cross:x86_64-musl AS amd64
ARG REPO VER TARGETPLATFORM COPY . .
RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \ RUN cargo install --path . --root /
TARGET="x86_64-unknown-linux-musl"; \
elif [ "$TARGETPLATFORM" = "linux/arm64" ]; then \ FROM --platform=linux/amd64 messense/rust-musl-cross:aarch64-musl AS arm64
TARGET="aarch64-unknown-linux-musl"; \ COPY . .
elif [ "$TARGETPLATFORM" = "linux/386" ]; then \ RUN cargo install --path . --root /
TARGET="i686-unknown-linux-musl"; \
elif [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then \ FROM ${TARGETARCH} AS builder
TARGET="armv7-unknown-linux-musleabihf"; \
fi && \
wget https://github.com/${REPO}/releases/download/${VER}/dufs-${VER}-${TARGET}.tar.gz && \
tar -xf dufs-${VER}-${TARGET}.tar.gz && \
mv dufs /bin/
FROM scratch FROM scratch
COPY --from=builder /bin/dufs /bin/dufs COPY --from=builder /bin/dufs /bin/dufs

19
Dockerfile-release Normal file
View File

@@ -0,0 +1,19 @@
FROM alpine as builder
ARG REPO VER TARGETPLATFORM
RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \
TARGET="x86_64-unknown-linux-musl"; \
elif [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
TARGET="aarch64-unknown-linux-musl"; \
elif [ "$TARGETPLATFORM" = "linux/386" ]; then \
TARGET="i686-unknown-linux-musl"; \
elif [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then \
TARGET="armv7-unknown-linux-musleabihf"; \
fi && \
wget https://github.com/${REPO}/releases/download/${VER}/dufs-${VER}-${TARGET}.tar.gz && \
tar -xf dufs-${VER}-${TARGET}.tar.gz && \
mv dufs /bin/
FROM scratch
COPY --from=builder /bin/dufs /bin/dufs
STOPSIGNAL SIGINT
ENTRYPOINT ["/bin/dufs"]

View File

@@ -2,6 +2,7 @@
[![CI](https://github.com/sigoden/dufs/actions/workflows/ci.yaml/badge.svg)](https://github.com/sigoden/dufs/actions/workflows/ci.yaml) [![CI](https://github.com/sigoden/dufs/actions/workflows/ci.yaml/badge.svg)](https://github.com/sigoden/dufs/actions/workflows/ci.yaml)
[![Crates](https://img.shields.io/crates/v/dufs.svg)](https://crates.io/crates/dufs) [![Crates](https://img.shields.io/crates/v/dufs.svg)](https://crates.io/crates/dufs)
[![Docker Pulls](https://img.shields.io/docker/pulls/sigoden/dufs)](https://hub.docker.com/r/sigoden/dufs)
Dufs is a distinctive utility file server that supports static serving, uploading, searching, accessing control, webdav... Dufs is a distinctive utility file server that supports static serving, uploading, searching, accessing control, webdav...
@@ -30,7 +31,7 @@ cargo install dufs
### With docker ### With docker
``` ```
docker run -v `pwd`:/data -p 5000:5000 --rm -it sigoden/dufs /data -A docker run -v `pwd`:/data -p 5000:5000 --rm sigoden/dufs /data -A
``` ```
### With [Homebrew](https://brew.sh) ### With [Homebrew](https://brew.sh)
@@ -65,13 +66,15 @@ Options:
--allow-delete Allow delete files/folders --allow-delete Allow delete files/folders
--allow-search Allow search files/folders --allow-search Allow search files/folders
--allow-symlink Allow symlink to files/folders outside root directory --allow-symlink Allow symlink to files/folders outside root directory
--allow-archive Allow zip archive generation --allow-archive Allow download folders as archive file
--allow-hash Allow ?hash query to get file sha256 hash
--enable-cors Enable CORS, sets `Access-Control-Allow-Origin: *` --enable-cors Enable CORS, sets `Access-Control-Allow-Origin: *`
--render-index Serve index.html when requesting a directory, returns 404 if not found index.html --render-index Serve index.html when requesting a directory, returns 404 if not found index.html
--render-try-index Serve index.html when requesting a directory, returns directory listing if not found index.html --render-try-index Serve index.html when requesting a directory, returns directory listing if not found index.html
--render-spa Serve SPA(Single Page Application) --render-spa Serve SPA(Single Page Application)
--assets <path> Set the path to the assets directory for overriding the built-in assets --assets <path> Set the path to the assets directory for overriding the built-in assets
--log-format <format> Customize http log format --log-format <format> Customize http log format
--log-file <file> Specify the file to save logs to, other than stdout/stderr
--compress <level> Set zip compress level [default: low] [possible values: none, low, medium, high] --compress <level> Set zip compress level [default: low] [possible values: none, low, medium, high]
--completions <shell> Print shell completion script for <shell> [possible values: bash, elvish, fish, powershell, zsh] --completions <shell> Print shell completion script for <shell> [possible values: bash, elvish, fish, powershell, zsh]
--tls-cert <path> Path to an SSL/TLS certificate to serve with HTTPS --tls-cert <path> Path to an SSL/TLS certificate to serve with HTTPS
@@ -151,70 +154,77 @@ dufs --tls-cert my.crt --tls-key my.key
Upload a file Upload a file
``` ```sh
curl -T path-to-file http://127.0.0.1:5000/new-path/path-to-file curl -T path-to-file http://127.0.0.1:5000/new-path/path-to-file
``` ```
Download a file Download a file
``` ```sh
curl http://127.0.0.1:5000/path-to-file curl http://127.0.0.1:5000/path-to-file # download the file
curl http://127.0.0.1:5000/path-to-file?hash # retrieve the sha256 hash of the file
``` ```
Download a folder as zip file Download a folder as zip file
``` ```sh
curl -o path-to-folder.zip http://127.0.0.1:5000/path-to-folder?zip curl -o path-to-folder.zip http://127.0.0.1:5000/path-to-folder?zip
``` ```
Delete a file/folder Delete a file/folder
``` ```sh
curl -X DELETE http://127.0.0.1:5000/path-to-file-or-folder curl -X DELETE http://127.0.0.1:5000/path-to-file-or-folder
``` ```
Create a directory Create a directory
``` ```sh
curl -X MKCOL https://127.0.0.1:5000/path-to-folder curl -X MKCOL http://127.0.0.1:5000/path-to-folder
``` ```
Move the file/folder to the new path Move the file/folder to the new path
``` ```sh
curl -X MOVE https://127.0.0.1:5000/path -H "Destination: https://127.0.0.1:5000/new-path" curl -X MOVE http://127.0.0.1:5000/path -H "Destination: http://127.0.0.1:5000/new-path"
``` ```
List/search directory contents List/search directory contents
``` ```sh
curl http://127.0.0.1:5000?q=Dockerfile # search for files, similar to `find -name Dockerfile` curl http://127.0.0.1:5000?q=Dockerfile # search for files, similar to `find -name Dockerfile`
curl http://127.0.0.1:5000?simple # output names only, similar to `ls -1` curl http://127.0.0.1:5000?simple # output names only, similar to `ls -1`
curl http://127.0.0.1:5000?json # output paths in json format curl http://127.0.0.1:5000?json # output paths in json format
``` ```
With authorization With authorization (Both basic or digest auth works)
``` ```sh
curl http://127.0.0.1:5000/file --user user:pass # basic auth curl http://127.0.0.1:5000/file --user user:pass # basic auth
curl http://127.0.0.1:5000/file --user user:pass --digest # digest auth curl http://127.0.0.1:5000/file --user user:pass --digest # digest auth
``` ```
Resumable downloads Resumable downloads
``` ```sh
curl -C- -o file http://127.0.0.1:5000/file curl -C- -o file http://127.0.0.1:5000/file
``` ```
Resumable uploads Resumable uploads
``` ```sh
upload_offset=$(curl -I -s http://127.0.0.1:5000/file | tr -d '\r' | sed -n 's/content-length: //p') upload_offset=$(curl -I -s http://127.0.0.1:5000/file | tr -d '\r' | sed -n 's/content-length: //p')
dd skip=$upload_offset if=file status=none ibs=1 | \ dd skip=$upload_offset if=file status=none ibs=1 | \
curl -X PATCH -H "X-Update-Range: append" --data-binary @- http://127.0.0.1:5000/file curl -X PATCH -H "X-Update-Range: append" --data-binary @- http://127.0.0.1:5000/file
``` ```
Health checks
```sh
curl http://127.0.0.1:5000/__dufs__/health
```
<details> <details>
<summary><h2>Advanced topics</h2></summary> <summary><h2>Advanced Topics</h2></summary>
### Access Control ### Access Control
@@ -222,37 +232,38 @@ Dufs supports account based access control. You can control who can do what on w
``` ```
dufs -a admin:admin@/:rw -a guest:guest@/ dufs -a admin:admin@/:rw -a guest:guest@/
dufs -a user:pass@/:rw,/dir1,/dir2:- -a @/ dufs -a user:pass@/:rw,/dir1 -a @/
``` ```
1. Use `@` to separate the account and paths. No account means anonymous user. 1. Use `@` to separate the account and paths. No account means anonymous user.
2. Use `:` to separate the username and password of the account. 2. Use `:` to separate the username and password of the account.
3. Use `,` to separate paths. 3. Use `,` to separate paths.
4. Use path suffix `:rw`, `:ro`, `:-` to set permissions: `read-write`, `read-only`, `forbidden`. `:ro` can be omitted. 4. Use path suffix `:rw`/`:ro` set permissions: `read-write`/`read-only`. `:ro` can be omitted.
- `-a admin:admin@/:rw`: `admin` has complete permissions for all paths. - `-a admin:admin@/:rw`: `admin` has complete permissions for all paths.
- `-a guest:guest@/`: `guest` has read-only permissions for all paths. - `-a guest:guest@/`: `guest` has read-only permissions for all paths.
- `-a user:pass@/:rw,/dir1,/dir2:-`: `user` has read-write permissions for `/*`, has read-only permissions for `/dir1/*`, but is fordden for `/dir2/*`. - `-a user:pass@/:rw,/dir1`: `user` has read-write permissions for `/*`, has read-only permissions for `/dir1/*`.
- `-a @/`: All paths is publicly accessible, everyone can view/download it. - `-a @/`: All paths is publicly accessible, everyone can view/download it.
> There are no restrictions on using ':' and '@' characters in a password. For example, `user:pa:ss@1@/:rw` is valid, the password is `pa:ss@1`. **Auth permissions are restricted by dufs global permissions.** If dufs does not enable upload permissions via `--allow-upload`, then the account will not have upload permissions even if it is granted `read-write`(`:rw`) permissions.
#### Hashed Password #### Hashed Password
DUFS supports the use of sha-512 hashed password. DUFS supports the use of sha-512 hashed password.
Create hashed password Create hashed password:
``` ```sh
$ mkpasswd -m sha-512 -s $ openssl passwd -6 123456 # or `mkpasswd -m sha-512 123456`
Password: 123456 $6$tWMB51u6Kb2ui3wd$5gVHP92V9kZcMwQeKTjyTRgySsYJu471Jb1I6iHQ8iZ6s07GgCIO69KcPBRuwPE5tDq05xMAzye0NxVKuJdYs/
$6$qCAVUG7yn7t/hH4d$BWm8r5MoDywNmDP/J3V2S2a6flmKHC1IpblfoqZfuK.LtLBZ0KFXP9QIfJP8RqL8MCw4isdheoAMTuwOz.pAO/
``` ```
Use hashed password Use hashed password:
```
dufs -a 'admin:$6$qCAVUG7yn7t/hH4d$BWm8r5MoDywNmDP/J3V2S2a6flmKHC1IpblfoqZfuK.LtLBZ0KFXP9QIfJP8RqL8MCw4isdheoAMTuwOz.pAO/@/:rw' ```sh
dufs -a 'admin:$6$tWMB51u6Kb2ui3wd$5gVHP92V9kZcMwQeKTjyTRgySsYJu471Jb1I6iHQ8iZ6s07GgCIO69KcPBRuwPE5tDq05xMAzye0NxVKuJdYs/@/:rw'
``` ```
> The hashed password contains `$6`, which can expand to a variable in some shells, so you have to use **single quotes** to wrap it.
Two important things for hashed passwords: Two important things for hashed passwords:
@@ -327,7 +338,7 @@ All options can be set using environment variables prefixed with `DUFS_`.
--config <file> DUFS_CONFIG=config.yaml --config <file> DUFS_CONFIG=config.yaml
-b, --bind <addrs> DUFS_BIND=0.0.0.0 -b, --bind <addrs> DUFS_BIND=0.0.0.0
-p, --port <port> DUFS_PORT=5000 -p, --port <port> DUFS_PORT=5000
--path-prefix <path> DUFS_PATH_PREFIX=/static --path-prefix <path> DUFS_PATH_PREFIX=/dufs
--hidden <value> DUFS_HIDDEN=tmp,*.log,*.lock --hidden <value> DUFS_HIDDEN=tmp,*.log,*.lock
-a, --auth <rules> DUFS_AUTH="admin:admin@/:rw|@/" -a, --auth <rules> DUFS_AUTH="admin:admin@/:rw|@/"
-A, --allow-all DUFS_ALLOW_ALL=true -A, --allow-all DUFS_ALLOW_ALL=true
@@ -336,13 +347,15 @@ All options can be set using environment variables prefixed with `DUFS_`.
--allow-search DUFS_ALLOW_SEARCH=true --allow-search DUFS_ALLOW_SEARCH=true
--allow-symlink DUFS_ALLOW_SYMLINK=true --allow-symlink DUFS_ALLOW_SYMLINK=true
--allow-archive DUFS_ALLOW_ARCHIVE=true --allow-archive DUFS_ALLOW_ARCHIVE=true
--allow-hash DUFS_ALLOW_HASH=true
--enable-cors DUFS_ENABLE_CORS=true --enable-cors DUFS_ENABLE_CORS=true
--render-index DUFS_RENDER_INDEX=true --render-index DUFS_RENDER_INDEX=true
--render-try-index DUFS_RENDER_TRY_INDEX=true --render-try-index DUFS_RENDER_TRY_INDEX=true
--render-spa DUFS_RENDER_SPA=true --render-spa DUFS_RENDER_SPA=true
--assets <path> DUFS_ASSETS=/assets --assets <path> DUFS_ASSETS=./assets
--log-format <format> DUFS_LOG_FORMAT="" --log-format <format> DUFS_LOG_FORMAT=""
--compress <compress> DUFS_COMPRESS="low" --log-file <file> DUFS_LOG_FILE=./dufs.log
--compress <compress> DUFS_COMPRESS=low
--tls-cert <path> DUFS_TLS_CERT=cert.pem --tls-cert <path> DUFS_TLS_CERT=cert.pem
--tls-key <path> DUFS_TLS_KEY=key.pem --tls-key <path> DUFS_TLS_KEY=key.pem
``` ```
@@ -372,12 +385,14 @@ allow-delete: true
allow-search: true allow-search: true
allow-symlink: true allow-symlink: true
allow-archive: true allow-archive: true
allow-hash: true
enable-cors: true enable-cors: true
render-index: true render-index: true
render-try-index: true render-try-index: true
render-spa: true render-spa: true
assets: ./assets/ assets: ./assets/
log-format: '$remote_addr "$request" $status $http_user_agent' log-format: '$remote_addr "$request" $status $http_user_agent'
log-file: ./dufs.log
compress: low compress: low
tls-cert: tests/data/cert.pem tls-cert: tests/data/cert.pem
tls-key: tests/data/key_pkcs1.pem tls-key: tests/data/key_pkcs1.pem
@@ -391,6 +406,8 @@ Dufs allows users to customize the UI with your own assets.
dufs --assets my-assets-dir/ dufs --assets my-assets-dir/
``` ```
> If you only need to make slight adjustments to the current UI, you copy dufs's [assets](https://github.com/sigoden/dufs/tree/main/assets) directory and modify it accordingly. The current UI doesn't use any frameworks, just plain HTML/JS/CSS. As long as you have some basic knowledge of web development, it shouldn't be difficult to modify.
Your assets folder must contains a `index.html` file. Your assets folder must contains a `index.html` file.
`index.html` can use the following placeholder variables to retrieve internal data. `index.html` can use the following placeholder variables to retrieve internal data.
@@ -402,7 +419,7 @@ Your assets folder must contains a `index.html` file.
## License ## License
Copyright (c) 2022 dufs-developers. Copyright (c) 2022-2024 dufs-developers.
dufs is made available under the terms of either the MIT License or the Apache License 2.0, at your option. dufs is made available under the terms of either the MIT License or the Apache License 2.0, at your option.

21
SECURITY.md Normal file
View File

@@ -0,0 +1,21 @@
# Security Policy
## Supported Versions
The latest release of *dufs* is supported. The fixes for any security issues found will be included
in the next release.
## Reporting a Vulnerability
Please [use *dufs*'s security advisory reporting tool provided by
GitHub](https://github.com/sigoden/dufs/security/advisories/new) to report security issues.
We strive to fix security issues as quickly as possible. Across the industry, often the developers'
slowness in developing and releasing a fix is the biggest delay in the process; we take pride in
minimizing this delay as much as we practically can. We encourage you to also minimize the delay
between when you find an issue and when you contact us. You do not need to convince us to take your
report seriously. You don't need to create a PoC or a patch if that would slow down your reporting.
You don't need an elaborate write-up. A short, informal note about the issue is good. We can always
communicate later to fill in any details we need after that first note is shared with us.

0
assets/favicon.ico Executable file → Normal file
View File

Before

Width:  |  Height:  |  Size: 9.1 KiB

After

Width:  |  Height:  |  Size: 9.1 KiB

View File

@@ -6,7 +6,7 @@ html {
body { body {
/* prevent premature breadcrumb wrapping on mobile */ /* prevent premature breadcrumb wrapping on mobile */
min-width: 500px; min-width: 538px;
margin: 0; margin: 0;
} }
@@ -19,14 +19,15 @@ body {
flex-wrap: wrap; flex-wrap: wrap;
align-items: center; align-items: center;
padding: 0.6em 1em; padding: 0.6em 1em;
position: fixed; position: sticky;
width: 100%; top: 0;
background-color: white; background-color: white;
} }
.breadcrumb { .breadcrumb {
font-size: 1.25em; font-size: 1.25em;
padding-right: 0.6em; padding-right: 0.6em;
word-break: break-all;
} }
.breadcrumb>a { .breadcrumb>a {
@@ -108,7 +109,7 @@ body {
} }
.main { .main {
padding: 3.3em 1em 0; padding: 0 1em;
} }
.empty-folder { .empty-folder {
@@ -153,18 +154,20 @@ body {
.paths-table .cell-actions { .paths-table .cell-actions {
width: 90px; width: 90px;
display: flex; display: flex;
padding-left: 0.6em; padding-left: 0.5em;
} }
.paths-table .cell-mtime { .paths-table .cell-mtime {
width: 120px; width: 120px;
padding-left: 0.6em; padding-left: 0.5em;
font-variant-numeric: tabular-nums;
} }
.paths-table .cell-size { .paths-table .cell-size {
text-align: right; text-align: right;
width: 70px; width: 70px;
padding-left: 0.6em; padding-left: 0.5em;
font-variant-numeric: tabular-nums;
} }
.path svg { .path svg {
@@ -186,7 +189,7 @@ body {
display: block; display: block;
text-decoration: none; text-decoration: none;
max-width: calc(100vw - 375px); max-width: calc(100vw - 375px);
min-width: 200px; min-width: 170px;
} }
.path a:hover { .path a:hover {
@@ -219,15 +222,20 @@ body {
margin-right: 2em; margin-right: 2em;
} }
.login-btn {
cursor: pointer;
}
.save-btn { .save-btn {
cursor: pointer; cursor: pointer;
-webkit-user-select: none; -webkit-user-select: none;
user-select: none; user-select: none;
} }
.user-btn { .logout-btn {
display: flex; cursor: pointer;
align-items: center; display: flex;
align-items: center;
} }
.user-name { .user-name {

View File

@@ -4,12 +4,11 @@
<head> <head>
<meta charset="utf-8" /> <meta charset="utf-8" />
<meta name="viewport" content="width=device-width" /> <meta name="viewport" content="width=device-width" />
<noscript>
<meta http-equiv="refresh" content="0; url=?noscript">
</noscript>
<link rel="icon" type="image/x-icon" href="__ASSETS_PREFIX__favicon.ico"> <link rel="icon" type="image/x-icon" href="__ASSETS_PREFIX__favicon.ico">
<link rel="stylesheet" href="__ASSETS_PREFIX__index.css"> <link rel="stylesheet" href="__ASSETS_PREFIX__index.css">
<script>
DATA = __INDEX_DATA__
</script>
<script src="__ASSETS_PREFIX__index.js"></script>
</head> </head>
<body> <body>
@@ -24,7 +23,7 @@
d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z" /> d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z" />
</svg> </svg>
</a> </a>
<div class="control move-file hidden" title="Move to new path"> <div class="control move-file hidden" title="Move & Rename">
<svg class="icon-move" width="16" height="16" viewBox="0 0 16 16"> <svg class="icon-move" width="16" height="16" viewBox="0 0 16 16">
<path fill-rule="evenodd" <path fill-rule="evenodd"
d="M1.5 1.5A.5.5 0 0 0 1 2v4.8a2.5 2.5 0 0 0 2.5 2.5h9.793l-3.347 3.346a.5.5 0 0 0 .708.708l4.2-4.2a.5.5 0 0 0 0-.708l-4-4a.5.5 0 0 0-.708.708L13.293 8.3H3.5A1.5 1.5 0 0 1 2 6.8V2a.5.5 0 0 0-.5-.5z"> d="M1.5 1.5A.5.5 0 0 0 1 2v4.8a2.5 2.5 0 0 0 2.5 2.5h9.793l-3.347 3.346a.5.5 0 0 0 .708.708l4.2-4.2a.5.5 0 0 0 0-.708l-4-4a.5.5 0 0 0-.708.708L13.293 8.3H3.5A1.5 1.5 0 0 1 2 6.8V2a.5.5 0 0 0-.5-.5z">
@@ -39,7 +38,7 @@
d="M14 14V4.5L9.5 0H4a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h8a2 2 0 0 0 2-2zM9.5 3A1.5 1.5 0 0 0 11 4.5h2V14a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1V2a1 1 0 0 1 1-1h5.5v2z" /> d="M14 14V4.5L9.5 0H4a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h8a2 2 0 0 0 2-2zM9.5 3A1.5 1.5 0 0 0 11 4.5h2V14a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1V2a1 1 0 0 1 1-1h5.5v2z" />
</svg> </svg>
</div> </div>
<div class="control upload-file hidden" title="Upload files"> <div class="control upload-file hidden" title="Upload files/folders">
<label for="file"> <label for="file">
<svg width="16" height="16" viewBox="0 0 16 16"> <svg width="16" height="16" viewBox="0 0 16 16">
<path <path
@@ -48,7 +47,7 @@
d="M7.646 1.146a.5.5 0 0 1 .708 0l3 3a.5.5 0 0 1-.708.708L8.5 2.707V11.5a.5.5 0 0 1-1 0V2.707L5.354 4.854a.5.5 0 1 1-.708-.708l3-3z" /> d="M7.646 1.146a.5.5 0 0 1 .708 0l3 3a.5.5 0 0 1-.708.708L8.5 2.707V11.5a.5.5 0 0 1-1 0V2.707L5.354 4.854a.5.5 0 1 1-.708-.708l3-3z" />
</svg> </svg>
</label> </label>
<input type="file" id="file" title="Upload files" name="file" multiple> <input type="file" id="file" title="Upload files/folders" name="file" multiple>
</div> </div>
<div class="control new-folder hidden" title="New folder"> <div class="control new-folder hidden" title="New folder">
<svg width="16" height="16" viewBox="0 0 16 16"> <svg width="16" height="16" viewBox="0 0 16 16">
@@ -74,11 +73,12 @@
d="M11.742 10.344a6.5 6.5 0 1 0-1.397 1.398h-.001c.03.04.062.078.098.115l3.85 3.85a1 1 0 0 0 1.415-1.414l-3.85-3.85a1.007 1.007 0 0 0-.115-.1zM12 6.5a5.5 5.5 0 1 1-11 0 5.5 5.5 0 0 1 11 0z" /> d="M11.742 10.344a6.5 6.5 0 1 0-1.397 1.398h-.001c.03.04.062.078.098.115l3.85 3.85a1 1 0 0 0 1.415-1.414l-3.85-3.85a1.007 1.007 0 0 0-.115-.1zM12 6.5a5.5 5.5 0 1 1-11 0 5.5 5.5 0 0 1 11 0z" />
</svg> </svg>
</div> </div>
<input id="search" title="Searching for folders or files" name="q" type="text" maxlength="128" autocomplete="off" tabindex="1"> <input id="search" title="Searching for folders or files" name="q" type="text" maxlength="128" autocomplete="off"
tabindex="1">
<input type="submit" hidden /> <input type="submit" hidden />
</form> </form>
<div class="toolbox-right"> <div class="toolbox-right">
<div class="login-btn hidden" title="Login for upload/move/delete/edit permissions"> <div class="login-btn hidden" title="Login">
<svg width="16" height="16" viewBox="0 0 16 16"> <svg width="16" height="16" viewBox="0 0 16 16">
<path fill-rule="evenodd" <path fill-rule="evenodd"
d="M6 3.5a.5.5 0 0 1 .5-.5h8a.5.5 0 0 1 .5.5v9a.5.5 0 0 1-.5.5h-8a.5.5 0 0 1-.5-.5v-2a.5.5 0 0 0-1 0v2A1.5 1.5 0 0 0 6.5 14h8a1.5 1.5 0 0 0 1.5-1.5v-9A1.5 1.5 0 0 0 14.5 2h-8A1.5 1.5 0 0 0 5 3.5v2a.5.5 0 0 0 1 0v-2z" /> d="M6 3.5a.5.5 0 0 1 .5-.5h8a.5.5 0 0 1 .5.5v9a.5.5 0 0 1-.5.5h-8a.5.5 0 0 1-.5-.5v-2a.5.5 0 0 0-1 0v2A1.5 1.5 0 0 0 6.5 14h8a1.5 1.5 0 0 0 1.5-1.5v-9A1.5 1.5 0 0 0 14.5 2h-8A1.5 1.5 0 0 0 5 3.5v2a.5.5 0 0 0 1 0v-2z" />
@@ -86,10 +86,10 @@
d="M11.854 8.354a.5.5 0 0 0 0-.708l-3-3a.5.5 0 1 0-.708.708L10.293 7.5H1.5a.5.5 0 0 0 0 1h8.793l-2.147 2.146a.5.5 0 0 0 .708.708l3-3z" /> d="M11.854 8.354a.5.5 0 0 0 0-.708l-3-3a.5.5 0 1 0-.708.708L10.293 7.5H1.5a.5.5 0 0 0 0 1h8.793l-2.147 2.146a.5.5 0 0 0 .708.708l3-3z" />
</svg> </svg>
</div> </div>
<div class="user-btn hidden"> <div class="logout-btn hidden" title="Logout">
<svg width="16" height="16" viewBox="0 0 16 16"> <svg width="16" height="16" viewBox="0 0 16 16">
<path <path fill-rule="evenodd" d="M10 3.5a.5.5 0 0 0-.5-.5h-8a.5.5 0 0 0-.5.5v9a.5.5 0 0 0 .5.5h8a.5.5 0 0 0 .5-.5v-2a.5.5 0 0 1 1 0v2A1.5 1.5 0 0 1 9.5 14h-8A1.5 1.5 0 0 1 0 12.5v-9A1.5 1.5 0 0 1 1.5 2h8A1.5 1.5 0 0 1 11 3.5v2a.5.5 0 0 1-1 0z"/>
d="M8 8a3 3 0 1 0 0-6 3 3 0 0 0 0 6Zm2-3a2 2 0 1 1-4 0 2 2 0 0 1 4 0Zm4 8c0 1-1 1-1 1H3s-1 0-1-1 1-4 6-4 6 3 6 4Zm-1-.004c-.001-.246-.154-.986-.832-1.664C11.516 10.68 10.289 10 8 10c-2.29 0-3.516.68-4.168 1.332-.678.678-.83 1.418-.832 1.664h10Z" /> <path fill-rule="evenodd" d="M4.146 8.354a.5.5 0 0 1 0-.708l3-3a.5.5 0 1 1 .708.708L5.707 7.5H14.5a.5.5 0 0 1 0 1H5.707l2.147 2.146a.5.5 0 0 1-.708.708z"/>
</svg> </svg>
<span class="user-name"></span> <span class="user-name"></span>
</div> </div>
@@ -125,9 +125,8 @@
<textarea id="editor" class="editor hidden" aria-label="Editor" cols="10"></textarea> <textarea id="editor" class="editor hidden" aria-label="Editor" cols="10"></textarea>
</div> </div>
</div> </div>
<script> <template id="index-data">__INDEX_DATA__</template>
window.addEventListener("DOMContentLoaded", ready); <script src="__ASSETS_PREFIX__index.js"></script>
</script>
</body> </body>
</html> </html>

View File

@@ -29,6 +29,11 @@ var DUFS_MAX_UPLOADINGS = 1;
*/ */
var DATA; var DATA;
/**
* @type {string}
*/
var DIR_EMPTY_NOTE;
/** /**
* @type {PARAMS} * @type {PARAMS}
* @typedef {object} PARAMS * @typedef {object} PARAMS
@@ -45,7 +50,7 @@ const IFRAME_FORMATS = [
".mp3", ".ogg", ".wav", ".m4a", ".mp3", ".ogg", ".wav", ".m4a",
]; ];
const dirEmptyNote = PARAMS.q ? 'No results' : DATA.dir_exists ? 'Empty folder' : 'Folder will be created when a file is uploaded'; const MAX_SUBPATHS_COUNT = 1000;
const ICONS = { const ICONS = {
dir: `<svg height="16" viewBox="0 0 14 16" width="14"><path fill-rule="evenodd" d="M13 4H7V3c0-.66-.31-1-1-1H1c-.55 0-1 .45-1 1v10c0 .55.45 1 1 1h12c.55 0 1-.45 1-1V5c0-.55-.45-1-1-1zM6 4H1V3h5v1z"></path></svg>`, dir: `<svg height="16" viewBox="0 0 14 16" width="14"><path fill-rule="evenodd" d="M13 4H7V3c0-.66-.31-1-1-1H1c-.55 0-1 .45-1 1v10c0 .55.45 1 1 1h12c.55 0 1-.45 1-1V5c0-.55-.45-1-1-1zM6 4H1V3h5v1z"></path></svg>`,
@@ -91,44 +96,61 @@ let $editor;
/** /**
* @type Element * @type Element
*/ */
let $userBtn; let $loginBtn;
/**
* @type Element
*/
let $logoutBtn;
/** /**
* @type Element * @type Element
*/ */
let $userName; let $userName;
function ready() { // Produce table when window loads
$pathsTable = document.querySelector(".paths-table") window.addEventListener("DOMContentLoaded", async () => {
const $indexData = document.getElementById('index-data');
if (!$indexData) {
alert("No data");
return;
}
DATA = JSON.parse(decodeBase64($indexData.innerHTML));
DIR_EMPTY_NOTE = PARAMS.q ? 'No results' : DATA.dir_exists ? 'Empty folder' : 'Folder will be created when a file is uploaded';
await ready();
});
async function ready() {
$pathsTable = document.querySelector(".paths-table");
$pathsTableHead = document.querySelector(".paths-table thead"); $pathsTableHead = document.querySelector(".paths-table thead");
$pathsTableBody = document.querySelector(".paths-table tbody"); $pathsTableBody = document.querySelector(".paths-table tbody");
$uploadersTable = document.querySelector(".uploaders-table"); $uploadersTable = document.querySelector(".uploaders-table");
$emptyFolder = document.querySelector(".empty-folder"); $emptyFolder = document.querySelector(".empty-folder");
$editor = document.querySelector(".editor"); $editor = document.querySelector(".editor");
$userBtn = document.querySelector(".user-btn"); $loginBtn = document.querySelector(".login-btn");
$logoutBtn = document.querySelector(".logout-btn");
$userName = document.querySelector(".user-name"); $userName = document.querySelector(".user-name");
addBreadcrumb(DATA.href, DATA.uri_prefix); addBreadcrumb(DATA.href, DATA.uri_prefix);
if (DATA.kind == "Index") { if (DATA.kind === "Index") {
document.title = `Index of ${DATA.href} - Dufs`; document.title = `Index of ${DATA.href} - Dufs`;
document.querySelector(".index-page").classList.remove("hidden"); document.querySelector(".index-page").classList.remove("hidden");
setupIndexPage(); await setupIndexPage();
} else if (DATA.kind === "Edit") {
} else if (DATA.kind == "Edit") {
document.title = `Edit ${DATA.href} - Dufs`; document.title = `Edit ${DATA.href} - Dufs`;
document.querySelector(".editor-page").classList.remove("hidden");; document.querySelector(".editor-page").classList.remove("hidden");
setupEditorPage(); await setupEditorPage();
} else if (DATA.kind == "View") { } else if (DATA.kind === "View") {
document.title = `View ${DATA.href} - Dufs`; document.title = `View ${DATA.href} - Dufs`;
document.querySelector(".editor-page").classList.remove("hidden");; document.querySelector(".editor-page").classList.remove("hidden");
setupEditorPage(); await setupEditorPage();
} }
} }
class Uploader { class Uploader {
/** /**
* *
@@ -222,16 +244,16 @@ class Uploader {
uploadOffset = parseInt(value) || 0; uploadOffset = parseInt(value) || 0;
} }
this.uploadOffset = uploadOffset; this.uploadOffset = uploadOffset;
this.ajax() this.ajax();
} }
progress(event) { progress(event) {
const now = Date.now(); const now = Date.now();
const speed = (event.loaded - this.uploaded) / (now - this.lastUptime) * 1000; const speed = (event.loaded - this.uploaded) / (now - this.lastUptime) * 1000;
const [speedValue, speedUnit] = formatSize(speed); const [speedValue, speedUnit] = formatFileSize(speed);
const speedText = `${speedValue} ${speedUnit}/s`; const speedText = `${speedValue} ${speedUnit}/s`;
const progress = formatPercent(((event.loaded + this.uploadOffset) / this.file.size) * 100); const progress = formatPercent(((event.loaded + this.uploadOffset) / this.file.size) * 100);
const duration = formatDuration((event.total - event.loaded) / speed) const duration = formatDuration((event.total - event.loaded) / speed);
this.$uploadStatus.innerHTML = `<span style="width: 80px;">${speedText}</span><span>${progress} ${duration}</span>`; this.$uploadStatus.innerHTML = `<span style="width: 80px;">${speedText}</span><span>${progress} ${duration}</span>`;
this.uploaded = event.loaded; this.uploaded = event.loaded;
this.lastUptime = now; this.lastUptime = now;
@@ -275,7 +297,7 @@ Uploader.runQueue = async () => {
if (!Uploader.auth) { if (!Uploader.auth) {
Uploader.auth = true; Uploader.auth = true;
try { try {
await checkAuth() await checkAuth();
} catch { } catch {
Uploader.auth = false; Uploader.auth = false;
} }
@@ -320,11 +342,12 @@ function addBreadcrumb(href, uri_prefix) {
} }
} }
function setupIndexPage() { async function setupIndexPage() {
if (DATA.allow_archive) { if (DATA.allow_archive) {
const $download = document.querySelector(".download"); const $download = document.querySelector(".download");
$download.href = baseUrl() + "?zip"; $download.href = baseUrl() + "?zip";
$download.title = "Download folder as a .zip file"; $download.title = "Download folder as a .zip file";
$download.classList.add("dlwt");
$download.classList.remove("hidden"); $download.classList.remove("hidden");
} }
@@ -336,15 +359,19 @@ function setupIndexPage() {
} }
if (DATA.auth) { if (DATA.auth) {
setupAuth(); await setupAuth();
} }
if (DATA.allow_search) { if (DATA.allow_search) {
setupSearch() setupSearch();
} }
renderPathsTableHead(); renderPathsTableHead();
renderPathsTableBody(); renderPathsTableBody();
if (DATA.user) {
setupDownloadWithToken();
}
} }
/** /**
@@ -403,7 +430,7 @@ function renderPathsTableBody() {
addPath(DATA.paths[i], i); addPath(DATA.paths[i], i);
} }
} else { } else {
$emptyFolder.textContent = dirEmptyNote; $emptyFolder.textContent = DIR_EMPTY_NOTE;
$emptyFolder.classList.remove("hidden"); $emptyFolder.classList.remove("hidden");
} }
} }
@@ -415,7 +442,7 @@ function renderPathsTableBody() {
*/ */
function addPath(file, index) { function addPath(file, index) {
const encodedName = encodedStr(file.name); const encodedName = encodedStr(file.name);
let url = newUrl(file.name) let url = newUrl(file.name);
let actionDelete = ""; let actionDelete = "";
let actionDownload = ""; let actionDownload = "";
let actionMove = ""; let actionMove = "";
@@ -427,18 +454,18 @@ function addPath(file, index) {
if (DATA.allow_archive) { if (DATA.allow_archive) {
actionDownload = ` actionDownload = `
<div class="action-btn"> <div class="action-btn">
<a href="${url}?zip" title="Download folder as a .zip file">${ICONS.download}</a> <a class="dlwt" href="${url}?zip" title="Download folder as a .zip file" download>${ICONS.download}</a>
</div>`; </div>`;
} }
} else { } else {
actionDownload = ` actionDownload = `
<div class="action-btn" > <div class="action-btn" >
<a href="${url}" title="Download file" download>${ICONS.download}</a> <a class="dlwt" href="${url}" title="Download file" download>${ICONS.download}</a>
</div>`; </div>`;
} }
if (DATA.allow_delete) { if (DATA.allow_delete) {
if (DATA.allow_upload) { if (DATA.allow_upload) {
actionMove = `<div onclick="movePath(${index})" class="action-btn" id="moveBtn${index}" title="Move to new path">${ICONS.move}</div>`; actionMove = `<div onclick="movePath(${index})" class="action-btn" id="moveBtn${index}" title="Move & Rename">${ICONS.move}</div>`;
if (!isDir) { if (!isDir) {
actionEdit = `<a class="action-btn" title="Edit file" target="_blank" href="${url}?edit">${ICONS.edit}</a>`; actionEdit = `<a class="action-btn" title="Edit file" target="_blank" href="${url}?edit">${ICONS.edit}</a>`;
} }
@@ -456,7 +483,9 @@ function addPath(file, index) {
${actionMove} ${actionMove}
${actionDelete} ${actionDelete}
${actionEdit} ${actionEdit}
</td>` </td>`;
let sizeDisplay = isDir ? formatDirSize(file.size) : formatFileSize(file.size).join(" ");
$pathsTableBody.insertAdjacentHTML("beforeend", ` $pathsTableBody.insertAdjacentHTML("beforeend", `
<tr id="addPath${index}"> <tr id="addPath${index}">
@@ -467,9 +496,9 @@ function addPath(file, index) {
<a href="${url}" ${isDir ? "" : `target="_blank"`}>${encodedName}</a> <a href="${url}" ${isDir ? "" : `target="_blank"`}>${encodedName}</a>
</td> </td>
<td class="cell-mtime">${formatMtime(file.mtime)}</td> <td class="cell-mtime">${formatMtime(file.mtime)}</td>
<td class="cell-size">${formatSize(file.size).join(" ")}</td> <td class="cell-size">${sizeDisplay}</td>
${actionCell} ${actionCell}
</tr>`) </tr>`);
} }
function setupDropzone() { function setupDropzone() {
@@ -481,7 +510,7 @@ function setupDropzone() {
}); });
document.addEventListener("drop", async e => { document.addEventListener("drop", async e => {
if (!e.dataTransfer.items[0].webkitGetAsEntry) { if (!e.dataTransfer.items[0].webkitGetAsEntry) {
const files = e.dataTransfer.files.filter(v => v.size > 0); const files = Array.from(e.dataTransfer.files).filter(v => v.size > 0);
for (const file of files) { for (const file of files) {
new Uploader(file, []).upload(); new Uploader(file, []).upload();
} }
@@ -491,29 +520,54 @@ function setupDropzone() {
for (let i = 0; i < len; i++) { for (let i = 0; i < len; i++) {
entries.push(e.dataTransfer.items[i].webkitGetAsEntry()); entries.push(e.dataTransfer.items[i].webkitGetAsEntry());
} }
addFileEntries(entries, []) addFileEntries(entries, []);
} }
}); });
} }
function setupAuth() { async function setupAuth() {
if (DATA.user) { if (DATA.user) {
$userBtn.classList.remove("hidden"); $logoutBtn.classList.remove("hidden");
$logoutBtn.addEventListener("click", logout);
$userName.textContent = DATA.user; $userName.textContent = DATA.user;
} else { } else {
const $loginBtn = document.querySelector(".login-btn");
$loginBtn.classList.remove("hidden"); $loginBtn.classList.remove("hidden");
$loginBtn.addEventListener("click", async () => { $loginBtn.addEventListener("click", async () => {
try { try {
await checkAuth() await checkAuth("login");
location.reload(); } catch { }
} catch (err) { location.reload();
alert(err.message);
}
}); });
} }
} }
function setupDownloadWithToken() {
document.querySelectorAll("a.dlwt").forEach(link => {
link.addEventListener("click", async e => {
e.preventDefault();
try {
const link = e.currentTarget || e.target;
const originalHref = link.getAttribute("href");
const tokengenUrl = new URL(originalHref);
tokengenUrl.searchParams.set("tokengen", "");
const res = await fetch(tokengenUrl);
if (!res.ok) throw new Error("Failed to fetch token");
const token = await res.text();
const downloadUrl = new URL(originalHref);
downloadUrl.searchParams.set("token", token);
const tempA = document.createElement("a");
tempA.href = downloadUrl.toString();
tempA.download = "";
document.body.appendChild(tempA);
tempA.click();
document.body.removeChild(tempA);
} catch (err) {
alert(`Failed to download, ${err.message}`);
}
});
});
}
function setupSearch() { function setupSearch() {
const $searchbar = document.querySelector(".searchbar"); const $searchbar = document.querySelector(".searchbar");
$searchbar.classList.remove("hidden"); $searchbar.classList.remove("hidden");
@@ -586,11 +640,13 @@ async function setupEditorPage() {
await doDeletePath(name, url, () => { await doDeletePath(name, url, () => {
location.href = location.href.split("/").slice(0, -1).join("/"); location.href = location.href.split("/").slice(0, -1).join("/");
}); });
}) });
const $saveBtn = document.querySelector(".save-btn"); if (DATA.editable) {
$saveBtn.classList.remove("hidden"); const $saveBtn = document.querySelector(".save-btn");
$saveBtn.addEventListener("click", saveChange); $saveBtn.classList.remove("hidden");
$saveBtn.addEventListener("click", saveChange);
}
} else if (DATA.kind == "View") { } else if (DATA.kind == "View") {
$editor.readonly = true; $editor.readonly = true;
} }
@@ -600,7 +656,7 @@ async function setupEditorPage() {
const url = baseUrl(); const url = baseUrl();
const ext = extName(baseName(url)); const ext = extName(baseName(url));
if (IFRAME_FORMATS.find(v => v === ext)) { if (IFRAME_FORMATS.find(v => v === ext)) {
$notEditable.insertAdjacentHTML("afterend", `<iframe src="${url}" sandbox width="100%" height="${window.innerHeight - 100}px"></iframe>`) $notEditable.insertAdjacentHTML("afterend", `<iframe src="${url}" sandbox width="100%" height="${window.innerHeight - 100}px"></iframe>`);
} else { } else {
$notEditable.classList.remove("hidden"); $notEditable.classList.remove("hidden");
$notEditable.textContent = "Cannot edit because file is too large or binary."; $notEditable.textContent = "Cannot edit because file is too large or binary.";
@@ -617,12 +673,12 @@ async function setupEditorPage() {
$editor.value = await res.text(); $editor.value = await res.text();
} else { } else {
const bytes = await res.arrayBuffer(); const bytes = await res.arrayBuffer();
const dataView = new DataView(bytes) const dataView = new DataView(bytes);
const decoder = new TextDecoder(encoding) const decoder = new TextDecoder(encoding);
$editor.value = decoder.decode(dataView); $editor.value = decoder.decode(dataView);
} }
} catch (err) { } catch (err) {
alert(`Failed get file, ${err.message}`); alert(`Failed to get file, ${err.message}`);
} }
} }
@@ -639,10 +695,10 @@ async function deletePath(index) {
DATA.paths[index] = null; DATA.paths[index] = null;
if (!DATA.paths.find(v => !!v)) { if (!DATA.paths.find(v => !!v)) {
$pathsTable.classList.add("hidden"); $pathsTable.classList.add("hidden");
$emptyFolder.textContent = dirEmptyNote; $emptyFolder.textContent = DIR_EMPTY_NOTE;
$emptyFolder.classList.remove("hidden"); $emptyFolder.classList.remove("hidden");
} }
}) });
} }
async function doDeletePath(name, url, cb) { async function doDeletePath(name, url, cb) {
@@ -675,13 +731,13 @@ async function movePath(index) {
} }
async function doMovePath(fileUrl) { async function doMovePath(fileUrl) {
const fileUrlObj = new URL(fileUrl) const fileUrlObj = new URL(fileUrl);
const prefix = DATA.uri_prefix.slice(0, -1); const prefix = DATA.uri_prefix.slice(0, -1);
const filePath = decodeURIComponent(fileUrlObj.pathname.slice(prefix.length)); const filePath = decodeURIComponent(fileUrlObj.pathname.slice(prefix.length));
let newPath = prompt("Enter new path", filePath) let newPath = prompt("Enter new path", filePath);
if (!newPath) return; if (!newPath) return;
if (!newPath.startsWith("/")) newPath = "/" + newPath; if (!newPath.startsWith("/")) newPath = "/" + newPath;
if (filePath === newPath) return; if (filePath === newPath) return;
@@ -726,15 +782,27 @@ async function saveChange() {
} }
} }
async function checkAuth() { async function checkAuth(variant) {
if (!DATA.auth) return; if (!DATA.auth) return;
const res = await fetch(baseUrl(), { const qs = variant ? `?${variant}` : "";
method: "WRITEABLE", const res = await fetch(baseUrl() + qs, {
method: "CHECKAUTH",
}); });
await assertResOK(res); await assertResOK(res);
document.querySelector(".login-btn").classList.add("hidden"); $loginBtn.classList.add("hidden");
$userBtn.classList.remove("hidden"); $logoutBtn.classList.remove("hidden");
$userName.textContent = ""; $userName.textContent = await res.text();
}
function logout() {
if (!DATA.auth) return;
const url = baseUrl();
const xhr = new XMLHttpRequest();
xhr.open("LOGOUT", url, true, DATA.user);
xhr.onload = () => {
location.href = url;
}
xhr.send();
} }
/** /**
@@ -800,11 +868,11 @@ function newUrl(name) {
} }
function baseUrl() { function baseUrl() {
return location.href.split('?')[0]; return location.href.split(/[?#]/)[0];
} }
function baseName(url) { function baseName(url) {
return decodeURIComponent(url.split("/").filter(v => v.length > 0).slice(-1)[0]) return decodeURIComponent(url.split("/").filter(v => v.length > 0).slice(-1)[0]);
} }
function extName(filename) { function extName(filename) {
@@ -831,7 +899,7 @@ function getPathSvg(path_type) {
} }
function formatMtime(mtime) { function formatMtime(mtime) {
if (!mtime) return "" if (!mtime) return "";
const date = new Date(mtime); const date = new Date(mtime);
const year = date.getFullYear(); const year = date.getFullYear();
const month = padZero(date.getMonth() + 1, 2); const month = padZero(date.getMonth() + 1, 2);
@@ -842,17 +910,23 @@ function formatMtime(mtime) {
} }
function padZero(value, size) { function padZero(value, size) {
return ("0".repeat(size) + value).slice(-1 * size) return ("0".repeat(size) + value).slice(-1 * size);
} }
function formatSize(size) { function formatDirSize(size) {
if (size == null) return [0, "B"] const unit = size === 1 ? "item" : "items";
const num = size >= MAX_SUBPATHS_COUNT ? `>${MAX_SUBPATHS_COUNT - 1}` : `${size}`;
return ` ${num} ${unit}`;
}
function formatFileSize(size) {
if (size == null) return [0, "B"];
const sizes = ['B', 'KB', 'MB', 'GB', 'TB']; const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
if (size == 0) return [0, "B"]; if (size == 0) return [0, "B"];
const i = parseInt(Math.floor(Math.log(size) / Math.log(1024))); const i = parseInt(Math.floor(Math.log(size) / Math.log(1024)));
ratio = 1 let ratio = 1;
if (i >= 3) { if (i >= 3) {
ratio = 100 ratio = 100;
} }
return [Math.round(size * ratio / Math.pow(1024, i), 2) / ratio, sizes[i]]; return [Math.round(size * ratio / Math.pow(1024, i), 2) / ratio, sizes[i]];
} }
@@ -861,7 +935,7 @@ function formatDuration(seconds) {
seconds = Math.ceil(seconds); seconds = Math.ceil(seconds);
const h = Math.floor(seconds / 3600); const h = Math.floor(seconds / 3600);
const m = Math.floor((seconds - h * 3600) / 60); const m = Math.floor((seconds - h * 3600) / 60);
const s = seconds - h * 3600 - m * 60 const s = seconds - h * 3600 - m * 60;
return `${padZero(h, 2)}:${padZero(m, 2)}:${padZero(s, 2)}`; return `${padZero(h, 2)}:${padZero(m, 2)}:${padZero(s, 2)}`;
} }
@@ -886,12 +960,31 @@ async function assertResOK(res) {
} }
function getEncoding(contentType) { function getEncoding(contentType) {
const charset = contentType?.split(";")[1]; const charset = contentType?.split(";")[1];
if (/charset/i.test(charset)) { if (/charset/i.test(charset)) {
let encoding = charset.split("=")[1]; let encoding = charset.split("=")[1];
if (encoding) { if (encoding) {
return encoding.toLowerCase() return encoding.toLowerCase();
}
} }
return 'utf-8' }
return 'utf-8';
}
// Parsing base64 strings with Unicode characters
function decodeBase64(base64String) {
const binString = atob(base64String);
const len = binString.length;
const bytes = new Uint8Array(len);
const arr = new Uint32Array(bytes.buffer, 0, Math.floor(len / 4));
let i = 0;
for (; i < arr.length; i++) {
arr[i] = binString.charCodeAt(i * 4) |
(binString.charCodeAt(i * 4 + 1) << 8) |
(binString.charCodeAt(i * 4 + 2) << 16) |
(binString.charCodeAt(i * 4 + 3) << 24);
}
for (i = i * 4; i < len; i++) {
bytes[i] = binString.charCodeAt(i);
}
return new TextDecoder().decode(bytes);
} }

View File

@@ -146,7 +146,15 @@ pub fn build_cli() -> Command {
.hide_env(true) .hide_env(true)
.long("allow-archive") .long("allow-archive")
.action(ArgAction::SetTrue) .action(ArgAction::SetTrue)
.help("Allow zip archive generation"), .help("Allow download folders as archive file"),
)
.arg(
Arg::new("allow-hash")
.env("DUFS_ALLOW_HASH")
.hide_env(true)
.long("allow-hash")
.action(ArgAction::SetTrue)
.help("Allow ?hash query to get file sha256 hash"),
) )
.arg( .arg(
Arg::new("enable-cors") Arg::new("enable-cors")
@@ -197,6 +205,15 @@ pub fn build_cli() -> Command {
.value_name("format") .value_name("format")
.help("Customize http log format"), .help("Customize http log format"),
) )
.arg(
Arg::new("log-file")
.env("DUFS_LOG_FILE")
.hide_env(true)
.long("log-file")
.value_name("file")
.value_parser(value_parser!(PathBuf))
.help("Specify the file to save logs to, other than stdout/stderr"),
)
.arg( .arg(
Arg::new("compress") Arg::new("compress")
.env("DUFS_COMPRESS") .env("DUFS_COMPRESS")
@@ -272,6 +289,7 @@ pub struct Args {
pub allow_search: bool, pub allow_search: bool,
pub allow_symlink: bool, pub allow_symlink: bool,
pub allow_archive: bool, pub allow_archive: bool,
pub allow_hash: bool,
pub render_index: bool, pub render_index: bool,
pub render_spa: bool, pub render_spa: bool,
pub render_try_index: bool, pub render_try_index: bool,
@@ -280,6 +298,7 @@ pub struct Args {
#[serde(deserialize_with = "deserialize_log_http")] #[serde(deserialize_with = "deserialize_log_http")]
#[serde(rename = "log-format")] #[serde(rename = "log-format")]
pub http_logger: HttpLogger, pub http_logger: HttpLogger,
pub log_file: Option<PathBuf>,
pub compress: Compress, pub compress: Compress,
pub tls_cert: Option<PathBuf>, pub tls_cert: Option<PathBuf>,
pub tls_key: Option<PathBuf>, pub tls_key: Option<PathBuf>,
@@ -301,7 +320,7 @@ impl Args {
} }
if let Some(path) = matches.get_one::<PathBuf>("serve-path") { if let Some(path) = matches.get_one::<PathBuf>("serve-path") {
args.serve_path = path.clone() args.serve_path.clone_from(path)
} }
args.serve_path = Self::sanitize_path(args.serve_path)?; args.serve_path = Self::sanitize_path(args.serve_path)?;
@@ -317,7 +336,7 @@ impl Args {
args.path_is_file = args.serve_path.metadata()?.is_file(); args.path_is_file = args.serve_path.metadata()?.is_file();
if let Some(path_prefix) = matches.get_one::<String>("path-prefix") { if let Some(path_prefix) = matches.get_one::<String>("path-prefix") {
args.path_prefix = path_prefix.clone(); args.path_prefix.clone_from(path_prefix)
} }
args.path_prefix = args.path_prefix.trim_matches('/').to_string(); args.path_prefix = args.path_prefix.trim_matches('/').to_string();
@@ -365,6 +384,9 @@ impl Args {
if !args.allow_symlink { if !args.allow_symlink {
args.allow_symlink = allow_all || matches.get_flag("allow-symlink"); args.allow_symlink = allow_all || matches.get_flag("allow-symlink");
} }
if !args.allow_hash {
args.allow_hash = allow_all || matches.get_flag("allow-hash");
}
if !args.allow_archive { if !args.allow_archive {
args.allow_archive = allow_all || matches.get_flag("allow-archive"); args.allow_archive = allow_all || matches.get_flag("allow-archive");
} }
@@ -392,6 +414,10 @@ impl Args {
args.http_logger = log_format.parse()?; args.http_logger = log_format.parse()?;
} }
if let Some(log_file) = matches.get_one::<PathBuf>("log-file") {
args.log_file = Some(log_file.clone());
}
if let Some(compress) = matches.get_one::<Compress>("compress") { if let Some(compress) = matches.get_one::<Compress>("compress") {
args.compress = *compress; args.compress = *compress;
} }
@@ -447,28 +473,30 @@ impl Args {
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub enum BindAddr { pub enum BindAddr {
Address(IpAddr), IpAddr(IpAddr),
Path(PathBuf), #[cfg(unix)]
SocketPath(String),
} }
impl BindAddr { impl BindAddr {
fn parse_addrs(addrs: &[&str]) -> Result<Vec<Self>> { fn parse_addrs(addrs: &[&str]) -> Result<Vec<Self>> {
let mut bind_addrs = vec![]; let mut bind_addrs = vec![];
#[cfg(not(unix))]
let mut invalid_addrs = vec![]; let mut invalid_addrs = vec![];
for addr in addrs { for addr in addrs {
match addr.parse::<IpAddr>() { match addr.parse::<IpAddr>() {
Ok(v) => { Ok(v) => {
bind_addrs.push(BindAddr::Address(v)); bind_addrs.push(BindAddr::IpAddr(v));
} }
Err(_) => { Err(_) => {
if cfg!(unix) { #[cfg(unix)]
bind_addrs.push(BindAddr::Path(PathBuf::from(addr))); bind_addrs.push(BindAddr::SocketPath(addr.to_string()));
} else { #[cfg(not(unix))]
invalid_addrs.push(*addr); invalid_addrs.push(*addr);
}
} }
} }
} }
#[cfg(not(unix))]
if !invalid_addrs.is_empty() { if !invalid_addrs.is_empty() {
bail!("Invalid bind address `{}`", invalid_addrs.join(",")); bail!("Invalid bind address `{}`", invalid_addrs.join(","));
} }
@@ -476,21 +504,16 @@ impl BindAddr {
} }
} }
#[derive(Debug, Clone, Copy, PartialEq, Deserialize)] #[derive(Debug, Clone, Copy, PartialEq, Deserialize, Default)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
pub enum Compress { pub enum Compress {
None, None,
#[default]
Low, Low,
Medium, Medium,
High, High,
} }
impl Default for Compress {
fn default() -> Self {
Self::Low
}
}
impl ValueEnum for Compress { impl ValueEnum for Compress {
fn value_variants<'a>() -> &'a [Self] { fn value_variants<'a>() -> &'a [Self] {
&[Self::None, Self::Low, Self::Medium, Self::High] &[Self::None, Self::Low, Self::Medium, Self::High]
@@ -696,7 +719,7 @@ hidden: tmp,*.log,*.lock
assert_eq!(args.serve_path, Args::sanitize_path(&tmpdir).unwrap()); assert_eq!(args.serve_path, Args::sanitize_path(&tmpdir).unwrap());
assert_eq!( assert_eq!(
args.addrs, args.addrs,
vec![BindAddr::Address("0.0.0.0".parse().unwrap())] vec![BindAddr::IpAddr("0.0.0.0".parse().unwrap())]
); );
assert_eq!(args.hidden, ["tmp", "*.log", "*.lock"]); assert_eq!(args.hidden, ["tmp", "*.log", "*.lock"]);
assert_eq!(args.port, 3000); assert_eq!(args.port, 3000);
@@ -726,8 +749,8 @@ hidden:
assert_eq!( assert_eq!(
args.addrs, args.addrs,
vec![ vec![
BindAddr::Address("127.0.0.1".parse().unwrap()), BindAddr::IpAddr("127.0.0.1".parse().unwrap()),
BindAddr::Address("192.168.8.10".parse().unwrap()) BindAddr::IpAddr("192.168.8.10".parse().unwrap())
] ]
); );
assert_eq!(args.hidden, ["tmp", "*.log", "*.lock"]); assert_eq!(args.hidden, ["tmp", "*.log", "*.lock"]);

View File

@@ -1,12 +1,14 @@
use crate::{args::Args, server::Response, utils::unix_now}; use crate::{args::Args, server::Response, utils::unix_now};
use anyhow::{anyhow, bail, Result}; use anyhow::{anyhow, bail, Result};
use base64::{engine::general_purpose, Engine as _}; use base64::{engine::general_purpose::STANDARD, Engine as _};
use ed25519_dalek::{ed25519::signature::SignerMut, Signature, SigningKey};
use headers::HeaderValue; use headers::HeaderValue;
use hyper::{header::WWW_AUTHENTICATE, Method}; use hyper::{header::WWW_AUTHENTICATE, Method};
use indexmap::IndexMap; use indexmap::IndexMap;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use md5::Context; use md5::Context;
use sha2::{Digest, Sha256};
use std::{ use std::{
collections::HashMap, collections::HashMap,
path::{Path, PathBuf}, path::{Path, PathBuf},
@@ -14,7 +16,8 @@ use std::{
use uuid::Uuid; use uuid::Uuid;
const REALM: &str = "DUFS"; const REALM: &str = "DUFS";
const DIGEST_AUTH_TIMEOUT: u32 = 604800; // 7 days const DIGEST_AUTH_TIMEOUT: u32 = 60 * 60 * 24 * 7; // 7 days
const TOKEN_EXPIRATION: u64 = 1000 * 60 * 60 * 24 * 3; // 3 days
lazy_static! { lazy_static! {
static ref NONCESTARTHASH: Context = { static ref NONCESTARTHASH: Context = {
@@ -27,17 +30,19 @@ lazy_static! {
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub struct AccessControl { pub struct AccessControl {
empty: bool,
use_hashed_password: bool, use_hashed_password: bool,
users: IndexMap<String, (String, AccessPaths)>, users: IndexMap<String, (String, AccessPaths)>,
anony: Option<AccessPaths>, anonymous: Option<AccessPaths>,
} }
impl Default for AccessControl { impl Default for AccessControl {
fn default() -> Self { fn default() -> Self {
AccessControl { AccessControl {
empty: true,
use_hashed_password: false, use_hashed_password: false,
anony: Some(AccessPaths::new(AccessPerm::ReadWrite)),
users: IndexMap::new(), users: IndexMap::new(),
anonymous: Some(AccessPaths::new(AccessPerm::ReadWrite)),
} }
} }
} }
@@ -45,7 +50,7 @@ impl Default for AccessControl {
impl AccessControl { impl AccessControl {
pub fn new(raw_rules: &[&str]) -> Result<Self> { pub fn new(raw_rules: &[&str]) -> Result<Self> {
if raw_rules.is_empty() { if raw_rules.is_empty() {
return Ok(Default::default()); return Ok(Self::default());
} }
let new_raw_rules = split_rules(raw_rules); let new_raw_rules = split_rules(raw_rules);
let mut use_hashed_password = false; let mut use_hashed_password = false;
@@ -66,18 +71,23 @@ impl AccessControl {
account_paths_pairs.push((user, pass, paths)); account_paths_pairs.push((user, pass, paths));
} }
} }
let mut anony = None; let mut anonymous = None;
if let Some(paths) = annoy_paths { if let Some(paths) = annoy_paths {
let mut access_paths = AccessPaths::default(); let mut access_paths = AccessPaths::default();
access_paths.merge(paths); access_paths
anony = Some(access_paths); .merge(paths)
.ok_or_else(|| anyhow!("Invalid auth value `@{paths}"))?;
anonymous = Some(access_paths);
} }
let mut users = IndexMap::new(); let mut users = IndexMap::new();
for (user, pass, paths) in account_paths_pairs.into_iter() { for (user, pass, paths) in account_paths_pairs.into_iter() {
let mut access_paths = anony.clone().unwrap_or_default(); let mut access_paths = AccessPaths::default();
access_paths access_paths
.merge(paths) .merge(paths)
.ok_or_else(|| anyhow!("Invalid auth `{user}:{pass}@{paths}"))?; .ok_or_else(|| anyhow!("Invalid auth value `{user}:{pass}@{paths}"))?;
if let Some(paths) = annoy_paths {
access_paths.merge(paths);
}
if pass.starts_with("$6$") { if pass.starts_with("$6$") {
use_hashed_password = true; use_hashed_password = true;
} }
@@ -85,13 +95,14 @@ impl AccessControl {
} }
Ok(Self { Ok(Self {
empty: false,
use_hashed_password, use_hashed_password,
users, users,
anony, anonymous,
}) })
} }
pub fn exist(&self) -> bool { pub fn has_users(&self) -> bool {
!self.users.is_empty() !self.users.is_empty()
} }
@@ -100,32 +111,93 @@ impl AccessControl {
path: &str, path: &str,
method: &Method, method: &Method,
authorization: Option<&HeaderValue>, authorization: Option<&HeaderValue>,
token: Option<&String>,
guard_options: bool,
) -> (Option<String>, Option<AccessPaths>) { ) -> (Option<String>, Option<AccessPaths>) {
if let Some(authorization) = authorization { if self.empty {
if let Some(user) = get_auth_user(authorization) { return (None, Some(AccessPaths::new(AccessPerm::ReadWrite)));
if let Some((pass, paths)) = self.users.get(&user) { }
if method == Method::OPTIONS {
return (Some(user), Some(AccessPaths::new(AccessPerm::ReadOnly))); if method == Method::GET {
} if let Some(token) = token {
if check_auth(authorization, method.as_str(), &user, pass).is_some() { if let Ok((user, ap)) = self.verify_token(token, path) {
return (Some(user), paths.find(path, !is_readonly_method(method))); return (Some(user), ap.guard(path, method));
} else {
return (None, None);
}
} }
} }
} }
if method == Method::OPTIONS { if let Some(authorization) = authorization {
if let Some(user) = get_auth_user(authorization) {
if let Some((pass, ap)) = self.users.get(&user) {
if method == Method::OPTIONS {
return (Some(user), Some(AccessPaths::new(AccessPerm::ReadOnly)));
}
if check_auth(authorization, method.as_str(), &user, pass).is_some() {
return (Some(user), ap.guard(path, method));
}
}
}
return (None, None);
}
if !guard_options && method == Method::OPTIONS {
return (None, Some(AccessPaths::new(AccessPerm::ReadOnly))); return (None, Some(AccessPaths::new(AccessPerm::ReadOnly)));
} }
if let Some(paths) = self.anony.as_ref() { if let Some(ap) = self.anonymous.as_ref() {
return (None, paths.find(path, !is_readonly_method(method))); return (None, ap.guard(path, method));
} }
(None, None) (None, None)
} }
pub fn generate_token(&self, path: &str, user: &str) -> Result<String> {
let (pass, _) = self
.users
.get(user)
.ok_or_else(|| anyhow!("Not found user '{user}'"))?;
let exp = unix_now().as_millis() as u64 + TOKEN_EXPIRATION;
let message = format!("{path}:{exp}");
let mut signing_key = derive_secret_key(user, pass);
let sig = signing_key.sign(message.as_bytes()).to_bytes();
let mut raw = Vec::with_capacity(64 + 8 + user.len());
raw.extend_from_slice(&sig);
raw.extend_from_slice(&exp.to_be_bytes());
raw.extend_from_slice(user.as_bytes());
Ok(hex::encode(raw))
}
fn verify_token<'a>(&'a self, token: &str, path: &str) -> Result<(String, &'a AccessPaths)> {
let raw = hex::decode(token)?;
if raw.len() < 72 {
bail!("Invalid token");
}
let sig_bytes = &raw[..64];
let exp_bytes = &raw[64..72];
let user_bytes = &raw[72..];
let exp = u64::from_be_bytes(exp_bytes.try_into()?);
if unix_now().as_millis() as u64 > exp {
bail!("Token expired");
}
let user = std::str::from_utf8(user_bytes)?;
let (pass, ap) = self
.users
.get(user)
.ok_or_else(|| anyhow!("Not found user '{user}'"))?;
let sig = Signature::from_bytes(&<[u8; 64]>::try_from(sig_bytes)?);
let message = format!("{path}:{exp}");
derive_secret_key(user, pass).verify(message.as_bytes(), &sig)?;
Ok((user.to_string(), ap))
}
} }
#[derive(Debug, Default, Clone, PartialEq, Eq)] #[derive(Debug, Default, Clone, PartialEq, Eq)]
@@ -147,8 +219,9 @@ impl AccessPaths {
} }
pub fn set_perm(&mut self, perm: AccessPerm) { pub fn set_perm(&mut self, perm: AccessPerm) {
if !perm.inherit() { if self.perm < perm {
self.perm = perm; self.perm = perm;
self.recursively_purge_children(perm);
} }
} }
@@ -158,7 +231,6 @@ impl AccessPaths {
None => (item, AccessPerm::ReadOnly), None => (item, AccessPerm::ReadOnly),
Some((path, "ro")) => (path, AccessPerm::ReadOnly), Some((path, "ro")) => (path, AccessPerm::ReadOnly),
Some((path, "rw")) => (path, AccessPerm::ReadWrite), Some((path, "rw")) => (path, AccessPerm::ReadWrite),
Some((path, "-")) => (path, AccessPerm::Forbidden),
_ => return None, _ => return None,
}; };
self.add(path, perm); self.add(path, perm);
@@ -166,6 +238,25 @@ impl AccessPaths {
Some(()) Some(())
} }
pub fn guard(&self, path: &str, method: &Method) -> Option<Self> {
let target = self.find(path)?;
if !is_readonly_method(method) && !target.perm().readwrite() {
return None;
}
Some(target)
}
fn recursively_purge_children(&mut self, perm: AccessPerm) {
self.children.retain(|_, child| {
if child.perm <= perm {
false
} else {
child.recursively_purge_children(perm);
true
}
});
}
fn add(&mut self, path: &str, perm: AccessPerm) { fn add(&mut self, path: &str, perm: AccessPerm) {
let path = path.trim_matches('/'); let path = path.trim_matches('/');
if path.is_empty() { if path.is_empty() {
@@ -182,34 +273,30 @@ impl AccessPaths {
self.set_perm(perm); self.set_perm(perm);
return; return;
} }
if self.perm >= perm {
return;
}
let child = self.children.entry(parts[0].to_string()).or_default(); let child = self.children.entry(parts[0].to_string()).or_default();
child.add_impl(&parts[1..], perm) child.add_impl(&parts[1..], perm)
} }
pub fn find(&self, path: &str, writable: bool) -> Option<AccessPaths> { pub fn find(&self, path: &str) -> Option<AccessPaths> {
let parts: Vec<&str> = path let parts: Vec<&str> = path
.trim_matches('/') .trim_matches('/')
.split('/') .split('/')
.filter(|v| !v.is_empty()) .filter(|v| !v.is_empty())
.collect(); .collect();
let target = self.find_impl(&parts, self.perm)?; self.find_impl(&parts, self.perm)
if target.perm().forbidden() {
return None;
}
if writable && !target.perm().readwrite() {
return None;
}
Some(target)
} }
fn find_impl(&self, parts: &[&str], perm: AccessPerm) -> Option<AccessPaths> { fn find_impl(&self, parts: &[&str], perm: AccessPerm) -> Option<AccessPaths> {
let perm = if !self.perm.inherit() { let perm = if !self.perm.indexonly() {
self.perm self.perm
} else { } else {
perm perm
}; };
if parts.is_empty() { if parts.is_empty() {
if perm.inherit() { if perm.indexonly() {
return Some(self.clone()); return Some(self.clone());
} else { } else {
return Some(AccessPaths::new(perm)); return Some(AccessPaths::new(perm));
@@ -218,7 +305,7 @@ impl AccessPaths {
let child = match self.children.get(parts[0]) { let child = match self.children.get(parts[0]) {
Some(v) => v, Some(v) => v,
None => { None => {
if perm.inherit() { if perm.indexonly() {
return None; return None;
} else { } else {
return Some(AccessPaths::new(perm)); return Some(AccessPaths::new(perm));
@@ -232,20 +319,20 @@ impl AccessPaths {
self.children.keys().collect() self.children.keys().collect()
} }
pub fn child_paths(&self, base: &Path) -> Vec<PathBuf> { pub fn entry_paths(&self, base: &Path) -> Vec<PathBuf> {
if !self.perm().inherit() { if !self.perm().indexonly() {
return vec![base.to_path_buf()]; return vec![base.to_path_buf()];
} }
let mut output = vec![]; let mut output = vec![];
self.child_paths_impl(&mut output, base); self.entry_paths_impl(&mut output, base);
output output
} }
fn child_paths_impl(&self, output: &mut Vec<PathBuf>, base: &Path) { fn entry_paths_impl(&self, output: &mut Vec<PathBuf>, base: &Path) {
for (name, child) in self.children.iter() { for (name, child) in self.children.iter() {
let base = base.join(name); let base = base.join(name);
if child.perm().inherit() { if child.perm().indexonly() {
child.child_paths_impl(output, &base); child.entry_paths_impl(output, &base);
} else { } else {
output.push(base) output.push(base)
} }
@@ -256,37 +343,31 @@ impl AccessPaths {
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
pub enum AccessPerm { pub enum AccessPerm {
#[default] #[default]
Inherit, IndexOnly,
ReadOnly, ReadOnly,
ReadWrite, ReadWrite,
Forbidden,
} }
impl AccessPerm { impl AccessPerm {
pub fn inherit(&self) -> bool { pub fn indexonly(&self) -> bool {
self == &AccessPerm::Inherit self == &AccessPerm::IndexOnly
} }
pub fn readwrite(&self) -> bool { pub fn readwrite(&self) -> bool {
self == &AccessPerm::ReadWrite self == &AccessPerm::ReadWrite
} }
pub fn forbidden(&self) -> bool {
self == &AccessPerm::Forbidden
}
} }
pub fn www_authenticate(res: &mut Response, args: &Args) -> Result<()> { pub fn www_authenticate(res: &mut Response, args: &Args) -> Result<()> {
if args.auth.use_hashed_password { if args.auth.use_hashed_password {
let basic = HeaderValue::from_str(&format!("Basic realm=\"{}\"", REALM))?; let basic = HeaderValue::from_str(&format!("Basic realm=\"{REALM}\""))?;
res.headers_mut().insert(WWW_AUTHENTICATE, basic); res.headers_mut().insert(WWW_AUTHENTICATE, basic);
} else { } else {
let nonce = create_nonce()?; let nonce = create_nonce()?;
let digest = HeaderValue::from_str(&format!( let digest = HeaderValue::from_str(&format!(
"Digest realm=\"{}\", nonce=\"{}\", qop=\"auth\"", "Digest realm=\"{REALM}\", nonce=\"{nonce}\", qop=\"auth\""
REALM, nonce
))?; ))?;
let basic = HeaderValue::from_str(&format!("Basic realm=\"{}\"", REALM))?; let basic = HeaderValue::from_str(&format!("Basic realm=\"{REALM}\""))?;
res.headers_mut().append(WWW_AUTHENTICATE, digest); res.headers_mut().append(WWW_AUTHENTICATE, digest);
res.headers_mut().append(WWW_AUTHENTICATE, basic); res.headers_mut().append(WWW_AUTHENTICATE, basic);
} }
@@ -295,7 +376,7 @@ pub fn www_authenticate(res: &mut Response, args: &Args) -> Result<()> {
pub fn get_auth_user(authorization: &HeaderValue) -> Option<String> { pub fn get_auth_user(authorization: &HeaderValue) -> Option<String> {
if let Some(value) = strip_prefix(authorization.as_bytes(), b"Basic ") { if let Some(value) = strip_prefix(authorization.as_bytes(), b"Basic ") {
let value: Vec<u8> = general_purpose::STANDARD.decode(value).ok()?; let value: Vec<u8> = STANDARD.decode(value).ok()?;
let parts: Vec<&str> = std::str::from_utf8(&value).ok()?.split(':').collect(); let parts: Vec<&str> = std::str::from_utf8(&value).ok()?.split(':').collect();
Some(parts[0].to_string()) Some(parts[0].to_string())
} else if let Some(value) = strip_prefix(authorization.as_bytes(), b"Digest ") { } else if let Some(value) = strip_prefix(authorization.as_bytes(), b"Digest ") {
@@ -314,18 +395,18 @@ pub fn check_auth(
auth_pass: &str, auth_pass: &str,
) -> Option<()> { ) -> Option<()> {
if let Some(value) = strip_prefix(authorization.as_bytes(), b"Basic ") { if let Some(value) = strip_prefix(authorization.as_bytes(), b"Basic ") {
let value: Vec<u8> = general_purpose::STANDARD.decode(value).ok()?; let value: Vec<u8> = STANDARD.decode(value).ok()?;
let parts: Vec<&str> = std::str::from_utf8(&value).ok()?.split(':').collect(); let (user, pass) = std::str::from_utf8(&value).ok()?.split_once(':')?;
if parts[0] != auth_user { if user != auth_user {
return None; return None;
} }
if auth_pass.starts_with("$6$") { if auth_pass.starts_with("$6$") {
if let Ok(()) = sha_crypt::sha512_check(parts[1], auth_pass) { if let Ok(()) = sha_crypt::sha512_check(pass, auth_pass) {
return Some(()); return Some(());
} }
} else if parts[1] == auth_pass { } else if pass == auth_pass {
return Some(()); return Some(());
} }
@@ -348,8 +429,8 @@ pub fn check_auth(
} }
let mut h = Context::new(); let mut h = Context::new();
h.consume(format!("{}:{}:{}", auth_user, REALM, auth_pass).as_bytes()); h.consume(format!("{auth_user}:{REALM}:{auth_pass}").as_bytes());
let auth_pass = format!("{:x}", h.compute()); let auth_pass = format!("{:x}", h.finalize());
let mut ha = Context::new(); let mut ha = Context::new();
ha.consume(method); ha.consume(method);
@@ -357,7 +438,7 @@ pub fn check_auth(
if let Some(uri) = digest_map.get(b"uri".as_ref()) { if let Some(uri) = digest_map.get(b"uri".as_ref()) {
ha.consume(uri); ha.consume(uri);
} }
let ha = format!("{:x}", ha.compute()); let ha = format!("{:x}", ha.finalize());
let mut correct_response = None; let mut correct_response = None;
if let Some(qop) = digest_map.get(b"qop".as_ref()) { if let Some(qop) = digest_map.get(b"qop".as_ref()) {
if qop == &b"auth".as_ref() || qop == &b"auth-int".as_ref() { if qop == &b"auth".as_ref() || qop == &b"auth-int".as_ref() {
@@ -378,7 +459,7 @@ pub fn check_auth(
c.consume(qop); c.consume(qop);
c.consume(b":"); c.consume(b":");
c.consume(&*ha); c.consume(&*ha);
format!("{:x}", c.compute()) format!("{:x}", c.finalize())
}); });
} }
} }
@@ -391,7 +472,7 @@ pub fn check_auth(
c.consume(nonce); c.consume(nonce);
c.consume(b":"); c.consume(b":");
c.consume(&*ha); c.consume(&*ha);
format!("{:x}", c.compute()) format!("{:x}", c.finalize())
} }
}; };
if correct_response.as_bytes() == *user_response { if correct_response.as_bytes() == *user_response {
@@ -404,6 +485,13 @@ pub fn check_auth(
} }
} }
fn derive_secret_key(user: &str, pass: &str) -> SigningKey {
let mut hasher = Sha256::new();
hasher.update(format!("{user}:{pass}").as_bytes());
let hash = hasher.finalize();
SigningKey::from_bytes(&hash.into())
}
/// Check if a nonce is still valid. /// Check if a nonce is still valid.
/// Return an error if it was never valid /// Return an error if it was never valid
fn validate_nonce(nonce: &[u8]) -> Result<bool> { fn validate_nonce(nonce: &[u8]) -> Result<bool> {
@@ -415,14 +503,14 @@ fn validate_nonce(nonce: &[u8]) -> Result<bool> {
//get time //get time
if let Ok(secs_nonce) = u32::from_str_radix(&n[..8], 16) { if let Ok(secs_nonce) = u32::from_str_radix(&n[..8], 16) {
//check time //check time
let now = unix_now()?; let now = unix_now();
let secs_now = now.as_secs() as u32; let secs_now = now.as_secs() as u32;
if let Some(dur) = secs_now.checked_sub(secs_nonce) { if let Some(dur) = secs_now.checked_sub(secs_nonce) {
//check hash //check hash
let mut h = NONCESTARTHASH.clone(); let mut h = NONCESTARTHASH.clone();
h.consume(secs_nonce.to_be_bytes()); h.consume(secs_nonce.to_be_bytes());
let h = format!("{:x}", h.compute()); let h = format!("{:x}", h.finalize());
if h[..26] == n[8..34] { if h[..26] == n[8..34] {
return Ok(dur < DIGEST_AUTH_TIMEOUT); return Ok(dur < DIGEST_AUTH_TIMEOUT);
} }
@@ -437,6 +525,8 @@ fn is_readonly_method(method: &Method) -> bool {
|| method == Method::OPTIONS || method == Method::OPTIONS
|| method == Method::HEAD || method == Method::HEAD
|| method.as_str() == "PROPFIND" || method.as_str() == "PROPFIND"
|| method.as_str() == "CHECKAUTH"
|| method.as_str() == "LOGOUT"
} }
fn strip_prefix<'a>(search: &'a [u8], prefix: &[u8]) -> Option<&'a [u8]> { fn strip_prefix<'a>(search: &'a [u8], prefix: &[u8]) -> Option<&'a [u8]> {
@@ -493,12 +583,12 @@ fn to_headermap(header: &[u8]) -> Result<HashMap<&[u8], &[u8]>, ()> {
} }
fn create_nonce() -> Result<String> { fn create_nonce() -> Result<String> {
let now = unix_now()?; let now = unix_now();
let secs = now.as_secs() as u32; let secs = now.as_secs() as u32;
let mut h = NONCESTARTHASH.clone(); let mut h = NONCESTARTHASH.clone();
h.consume(secs.to_be_bytes()); h.consume(secs.to_be_bytes());
let n = format!("{:08x}{:032x}", secs, h.compute()); let n = format!("{:08x}{:032x}", secs, h.finalize());
Ok(n[..34].to_string()) Ok(n[..34].to_string())
} }
@@ -576,12 +666,11 @@ mod tests {
paths.add("/dir1", AccessPerm::ReadWrite); paths.add("/dir1", AccessPerm::ReadWrite);
paths.add("/dir2/dir21", AccessPerm::ReadWrite); paths.add("/dir2/dir21", AccessPerm::ReadWrite);
paths.add("/dir2/dir21/dir211", AccessPerm::ReadOnly); paths.add("/dir2/dir21/dir211", AccessPerm::ReadOnly);
paths.add("/dir2/dir21/dir212", AccessPerm::Forbidden);
paths.add("/dir2/dir22", AccessPerm::ReadOnly); paths.add("/dir2/dir22", AccessPerm::ReadOnly);
paths.add("/dir2/dir22/dir221", AccessPerm::ReadWrite); paths.add("/dir2/dir22/dir221", AccessPerm::ReadWrite);
paths.add("/dir2/dir23/dir231", AccessPerm::ReadWrite); paths.add("/dir2/dir23/dir231", AccessPerm::ReadWrite);
assert_eq!( assert_eq!(
paths.child_paths(Path::new("/tmp")), paths.entry_paths(Path::new("/tmp")),
[ [
"/tmp/dir1", "/tmp/dir1",
"/tmp/dir2/dir21", "/tmp/dir2/dir21",
@@ -594,8 +683,8 @@ mod tests {
); );
assert_eq!( assert_eq!(
paths paths
.find("dir2", false) .find("dir2")
.map(|v| v.child_paths(Path::new("/tmp/dir2"))), .map(|v| v.entry_paths(Path::new("/tmp/dir2"))),
Some( Some(
[ [
"/tmp/dir2/dir21", "/tmp/dir2/dir21",
@@ -607,20 +696,30 @@ mod tests {
.collect::<Vec<_>>() .collect::<Vec<_>>()
) )
); );
assert_eq!(paths.find("dir2", true), None);
assert_eq!( assert_eq!(
paths.find("dir1/file", true), paths.find("dir1/file"),
Some(AccessPaths::new(AccessPerm::ReadWrite)) Some(AccessPaths::new(AccessPerm::ReadWrite))
); );
assert_eq!( assert_eq!(
paths.find("dir2/dir21/file", true), paths.find("dir2/dir21/file"),
Some(AccessPaths::new(AccessPerm::ReadWrite)) Some(AccessPaths::new(AccessPerm::ReadWrite))
); );
assert_eq!( assert_eq!(
paths.find("dir2/dir21/dir211/file", false), paths.find("dir2/dir21/dir211/file"),
Some(AccessPaths::new(AccessPerm::ReadWrite))
);
assert_eq!(
paths.find("dir2/dir22/file"),
Some(AccessPaths::new(AccessPerm::ReadOnly)) Some(AccessPaths::new(AccessPerm::ReadOnly))
); );
assert_eq!(paths.find("dir2/dir21/dir211/file", true), None); assert_eq!(
assert_eq!(paths.find("dir2/dir21/dir212", false), None); paths.find("dir2/dir22/dir221/file"),
Some(AccessPaths::new(AccessPerm::ReadWrite))
);
assert_eq!(paths.find("dir2/dir23/file"), None);
assert_eq!(
paths.find("dir2/dir23//dir231/file"),
Some(AccessPaths::new(AccessPerm::ReadWrite))
);
} }
} }

View File

@@ -1,6 +1,6 @@
use std::{collections::HashMap, str::FromStr}; use std::{collections::HashMap, str::FromStr};
use crate::{auth::get_auth_user, server::Request}; use crate::{auth::get_auth_user, server::Request, utils::decode_uri};
pub const DEFAULT_LOG_FORMAT: &str = r#"$remote_addr "$request" $status"#; pub const DEFAULT_LOG_FORMAT: &str = r#"$remote_addr "$request" $status"#;
@@ -29,7 +29,9 @@ impl HttpLogger {
match element { match element {
LogElement::Variable(name) => match name.as_str() { LogElement::Variable(name) => match name.as_str() {
"request" => { "request" => {
data.insert(name.to_string(), format!("{} {}", req.method(), req.uri())); let uri = req.uri().to_string();
let uri = decode_uri(&uri).map(|s| s.to_string()).unwrap_or(uri);
data.insert(name.to_string(), format!("{} {uri}", req.method()));
} }
"remote_user" => { "remote_user" => {
if let Some(user) = if let Some(user) =
@@ -50,6 +52,7 @@ impl HttpLogger {
} }
data data
} }
pub fn log(&self, data: &HashMap<String, String>, err: Option<String>) { pub fn log(&self, data: &HashMap<String, String>, err: Option<String>) {
if self.elements.is_empty() { if self.elements.is_empty() {
return; return;
@@ -64,8 +67,8 @@ impl HttpLogger {
} }
} }
match err { match err {
Some(err) => error!("{} {}", output, err), Some(err) => error!("{output} {err}"),
None => info!("{}", output), None => info!("{output}"),
} }
} }
} }

View File

@@ -1,8 +1,14 @@
use anyhow::{Context, Result};
use chrono::{Local, SecondsFormat}; use chrono::{Local, SecondsFormat};
use log::{Level, Metadata, Record}; use log::{Level, LevelFilter, Metadata, Record};
use log::{LevelFilter, SetLoggerError}; use std::fs::{File, OpenOptions};
use std::io::Write;
use std::path::PathBuf;
use std::sync::Mutex;
struct SimpleLogger; struct SimpleLogger {
file: Option<Mutex<File>>,
}
impl log::Log for SimpleLogger { impl log::Log for SimpleLogger {
fn enabled(&self, metadata: &Metadata) -> bool { fn enabled(&self, metadata: &Metadata) -> bool {
@@ -12,10 +18,20 @@ impl log::Log for SimpleLogger {
fn log(&self, record: &Record) { fn log(&self, record: &Record) {
if self.enabled(record.metadata()) { if self.enabled(record.metadata()) {
let timestamp = Local::now().to_rfc3339_opts(SecondsFormat::Secs, true); let timestamp = Local::now().to_rfc3339_opts(SecondsFormat::Secs, true);
if record.level() < Level::Info { let text = format!("{} {} - {}", timestamp, record.level(), record.args());
eprintln!("{} {} - {}", timestamp, record.level(), record.args()); match &self.file {
} else { Some(file) => {
println!("{} {} - {}", timestamp, record.level(), record.args()); if let Ok(mut file) = file.lock() {
let _ = writeln!(file, "{text}");
}
}
None => {
if record.level() < Level::Info {
eprintln!("{text}");
} else {
println!("{text}");
}
}
} }
} }
} }
@@ -23,8 +39,23 @@ impl log::Log for SimpleLogger {
fn flush(&self) {} fn flush(&self) {}
} }
static LOGGER: SimpleLogger = SimpleLogger; pub fn init(log_file: Option<PathBuf>) -> Result<()> {
let file = match log_file {
pub fn init() -> Result<(), SetLoggerError> { None => None,
log::set_logger(&LOGGER).map(|()| log::set_max_level(LevelFilter::Info)) Some(log_file) => {
let file = OpenOptions::new()
.create(true)
.append(true)
.open(&log_file)
.with_context(|| {
format!("Failed to open the log file at '{}'", log_file.display())
})?;
Some(Mutex::new(file))
}
};
let logger = SimpleLogger { file };
log::set_boxed_logger(Box::new(logger))
.map(|_| log::set_max_level(LevelFilter::Info))
.with_context(|| "Failed to init logger")?;
Ok(())
} }

View File

@@ -3,6 +3,7 @@ mod auth;
mod http_logger; mod http_logger;
mod http_utils; mod http_utils;
mod logger; mod logger;
mod noscript;
mod server; mod server;
mod utils; mod utils;
@@ -29,13 +30,14 @@ use std::sync::{
atomic::{AtomicBool, Ordering}, atomic::{AtomicBool, Ordering},
Arc, Arc,
}; };
use std::time::Duration;
use tokio::time::timeout;
use tokio::{net::TcpListener, task::JoinHandle}; use tokio::{net::TcpListener, task::JoinHandle};
#[cfg(feature = "tls")] #[cfg(feature = "tls")]
use tokio_rustls::{rustls::ServerConfig, TlsAcceptor}; use tokio_rustls::{rustls::ServerConfig, TlsAcceptor};
#[tokio::main] #[tokio::main]
async fn main() -> Result<()> { async fn main() -> Result<()> {
logger::init().map_err(|e| anyhow!("Failed to init logger, {e}"))?;
let cmd = build_cli(); let cmd = build_cli();
let matches = cmd.get_matches(); let matches = cmd.get_matches();
if let Some(generator) = matches.get_one::<Shell>("completions") { if let Some(generator) = matches.get_one::<Shell>("completions") {
@@ -43,9 +45,12 @@ async fn main() -> Result<()> {
print_completions(*generator, &mut cmd); print_completions(*generator, &mut cmd);
return Ok(()); return Ok(());
} }
let args = Args::parse(matches)?; let mut args = Args::parse(matches)?;
logger::init(args.log_file.clone()).map_err(|e| anyhow!("Failed to init logger, {e}"))?;
let (new_addrs, print_addrs) = check_addrs(&args)?;
args.addrs = new_addrs;
let running = Arc::new(AtomicBool::new(true)); let running = Arc::new(AtomicBool::new(true));
let listening = print_listening(&args)?; let listening = print_listening(&args, &print_addrs)?;
let handles = serve(args, running.clone())?; let handles = serve(args, running.clone())?;
println!("{listening}"); println!("{listening}");
@@ -53,7 +58,7 @@ async fn main() -> Result<()> {
ret = join_all(handles) => { ret = join_all(handles) => {
for r in ret { for r in ret {
if let Err(e) = r { if let Err(e) = r {
error!("{}", e); error!("{e}");
} }
} }
Ok(()) Ok(())
@@ -74,7 +79,7 @@ fn serve(args: Args, running: Arc<AtomicBool>) -> Result<Vec<JoinHandle<()>>> {
for bind_addr in addrs.iter() { for bind_addr in addrs.iter() {
let server_handle = server_handle.clone(); let server_handle = server_handle.clone();
match bind_addr { match bind_addr {
BindAddr::Address(ip) => { BindAddr::IpAddr(ip) => {
let listener = create_listener(SocketAddr::new(*ip, port)) let listener = create_listener(SocketAddr::new(*ip, port))
.with_context(|| format!("Failed to bind `{ip}:{port}`"))?; .with_context(|| format!("Failed to bind `{ip}:{port}`"))?;
@@ -89,12 +94,19 @@ fn serve(args: Args, running: Arc<AtomicBool>) -> Result<Vec<JoinHandle<()>>> {
config.alpn_protocols = vec![b"h2".to_vec(), b"http/1.1".to_vec()]; config.alpn_protocols = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
let config = Arc::new(config); let config = Arc::new(config);
let tls_accepter = TlsAcceptor::from(config); let tls_accepter = TlsAcceptor::from(config);
let handshake_timeout = Duration::from_secs(10);
let handle = tokio::spawn(async move { let handle = tokio::spawn(async move {
loop { loop {
let (cnx, addr) = listener.accept().await.unwrap(); let Ok((stream, addr)) = listener.accept().await else {
let Ok(stream) = tls_accepter.accept(cnx).await else { continue;
warn!("During cls handshake connection from {}", addr); };
let Some(stream) =
timeout(handshake_timeout, tls_accepter.accept(stream))
.await
.ok()
.and_then(|v| v.ok())
else {
continue; continue;
}; };
let stream = TokioIo::new(stream); let stream = TokioIo::new(stream);
@@ -111,8 +123,10 @@ fn serve(args: Args, running: Arc<AtomicBool>) -> Result<Vec<JoinHandle<()>>> {
(None, None) => { (None, None) => {
let handle = tokio::spawn(async move { let handle = tokio::spawn(async move {
loop { loop {
let (cnx, addr) = listener.accept().await.unwrap(); let Ok((stream, addr)) = listener.accept().await else {
let stream = TokioIo::new(cnx); continue;
};
let stream = TokioIo::new(stream);
tokio::spawn(handle_stream( tokio::spawn(handle_stream(
server_handle.clone(), server_handle.clone(),
stream, stream,
@@ -127,24 +141,32 @@ fn serve(args: Args, running: Arc<AtomicBool>) -> Result<Vec<JoinHandle<()>>> {
} }
}; };
} }
BindAddr::Path(path) => { #[cfg(unix)]
if path.exists() { BindAddr::SocketPath(path) => {
std::fs::remove_file(path)?; let socket_path = if path.starts_with("@")
} && cfg!(any(target_os = "linux", target_os = "android"))
#[cfg(unix)]
{ {
let listener = tokio::net::UnixListener::bind(path) let mut path_buf = path.as_bytes().to_vec();
.with_context(|| format!("Failed to bind `{}`", path.display()))?; path_buf[0] = b'\0';
let handle = tokio::spawn(async move { unsafe { std::ffi::OsStr::from_encoded_bytes_unchecked(&path_buf) }
loop { .to_os_string()
let (cnx, _) = listener.accept().await.unwrap(); } else {
let stream = TokioIo::new(cnx); let _ = std::fs::remove_file(path);
tokio::spawn(handle_stream(server_handle.clone(), stream, None)); path.into()
} };
}); let listener = tokio::net::UnixListener::bind(socket_path)
.with_context(|| format!("Failed to bind `{path}`"))?;
let handle = tokio::spawn(async move {
loop {
let Ok((stream, _addr)) = listener.accept().await else {
continue;
};
let stream = TokioIo::new(stream);
tokio::spawn(handle_stream(server_handle.clone(), stream, None));
}
});
handles.push(handle); handles.push(handle);
}
} }
} }
} }
@@ -158,18 +180,15 @@ where
let hyper_service = let hyper_service =
service_fn(move |request: Request<Incoming>| handle.clone().call(request, addr)); service_fn(move |request: Request<Incoming>| handle.clone().call(request, addr));
let ret = Builder::new(TokioExecutor::new()) match Builder::new(TokioExecutor::new())
.serve_connection_with_upgrades(stream, hyper_service) .serve_connection_with_upgrades(stream, hyper_service)
.await; .await
{
if let Err(err) = ret { Ok(()) => {}
let scope = match addr { Err(_err) => {
Some(addr) => format!(" from {}", addr), // This error only appears when the client doesn't send a request and terminate the connection.
None => String::new(), //
}; // If client sends one request then terminate connection whenever, it doesn't appear.
match err.downcast_ref::<std::io::Error>() {
Some(err) if err.kind() == std::io::ErrorKind::UnexpectedEof => {}
_ => warn!("Serving connection{}: {}", scope, err),
} }
} }
} }
@@ -189,44 +208,67 @@ fn create_listener(addr: SocketAddr) -> Result<TcpListener> {
Ok(listener) Ok(listener)
} }
fn print_listening(args: &Args) -> Result<String> { fn check_addrs(args: &Args) -> Result<(Vec<BindAddr>, Vec<BindAddr>)> {
let mut output = String::new(); let mut new_addrs = vec![];
let mut bind_addrs = vec![]; let mut print_addrs = vec![];
let (mut ipv4, mut ipv6) = (false, false); let (ipv4_addrs, ipv6_addrs) = interface_addrs()?;
for bind_addr in args.addrs.iter() { for bind_addr in args.addrs.iter() {
match bind_addr { match bind_addr {
BindAddr::Address(ip) => { BindAddr::IpAddr(ip) => match &ip {
if ip.is_unspecified() { IpAddr::V4(_) => {
if ip.is_ipv6() { if !ipv4_addrs.is_empty() {
ipv6 = true; new_addrs.push(bind_addr.clone());
} else { if ip.is_unspecified() {
ipv4 = true; print_addrs.extend(ipv4_addrs.clone());
} else {
print_addrs.push(bind_addr.clone());
}
} }
} else {
bind_addrs.push(bind_addr.clone());
} }
} IpAddr::V6(_) => {
_ => bind_addrs.push(bind_addr.clone()), if !ipv6_addrs.is_empty() {
} new_addrs.push(bind_addr.clone());
} if ip.is_unspecified() {
if ipv4 || ipv6 { print_addrs.extend(ipv6_addrs.clone());
let ifaces = } else {
if_addrs::get_if_addrs().with_context(|| "Failed to get local interface addresses")?; print_addrs.push(bind_addr.clone())
for iface in ifaces.into_iter() { }
let local_ip = iface.ip(); }
if ipv4 && local_ip.is_ipv4() { }
bind_addrs.push(BindAddr::Address(local_ip)) },
} #[cfg(unix)]
if ipv6 && local_ip.is_ipv6() { _ => {
bind_addrs.push(BindAddr::Address(local_ip)) new_addrs.push(bind_addr.clone());
print_addrs.push(bind_addr.clone())
} }
} }
} }
bind_addrs.sort_unstable(); print_addrs.sort_unstable();
let urls = bind_addrs Ok((new_addrs, print_addrs))
.into_iter() }
fn interface_addrs() -> Result<(Vec<BindAddr>, Vec<BindAddr>)> {
let (mut ipv4_addrs, mut ipv6_addrs) = (vec![], vec![]);
let ifaces =
if_addrs::get_if_addrs().with_context(|| "Failed to get local interface addresses")?;
for iface in ifaces.into_iter() {
let ip = iface.ip();
if ip.is_ipv4() {
ipv4_addrs.push(BindAddr::IpAddr(ip))
}
if ip.is_ipv6() {
ipv6_addrs.push(BindAddr::IpAddr(ip))
}
}
Ok((ipv4_addrs, ipv6_addrs))
}
fn print_listening(args: &Args, print_addrs: &[BindAddr]) -> Result<String> {
let mut output = String::new();
let urls = print_addrs
.iter()
.map(|bind_addr| match bind_addr { .map(|bind_addr| match bind_addr {
BindAddr::Address(addr) => { BindAddr::IpAddr(addr) => {
let addr = match addr { let addr = match addr {
IpAddr::V4(_) => format!("{}:{}", addr, args.port), IpAddr::V4(_) => format!("{}:{}", addr, args.port),
IpAddr::V6(_) => format!("[{}]:{}", addr, args.port), IpAddr::V6(_) => format!("[{}]:{}", addr, args.port),
@@ -238,7 +280,8 @@ fn print_listening(args: &Args) -> Result<String> {
}; };
format!("{}://{}{}", protocol, addr, args.uri_prefix) format!("{}://{}{}", protocol, addr, args.uri_prefix)
} }
BindAddr::Path(path) => path.display().to_string(), #[cfg(unix)]
BindAddr::SocketPath(path) => path.to_string(),
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();

103
src/noscript.rs Normal file
View File

@@ -0,0 +1,103 @@
use crate::{
server::{IndexData, PathItem, PathType, MAX_SUBPATHS_COUNT},
utils::encode_uri,
};
use anyhow::Result;
use chrono::{DateTime, Utc};
use xml::escape::escape_str_pcdata;
pub fn detect_noscript(user_agent: &str) -> bool {
[
"lynx/", "w3m/", "links ", "elinks/", "curl/", "wget/", "httpie/", "aria2/",
]
.iter()
.any(|v| user_agent.starts_with(v))
}
pub fn generate_noscript_html(data: &IndexData) -> Result<String> {
let mut html = String::new();
let title = format!("Index of {}", escape_str_pcdata(&data.href));
html.push_str("<html>\n");
html.push_str("<head>\n");
html.push_str(&format!("<title>{title}</title>\n"));
html.push_str(
r#"<style>
td {
padding: 0.2rem;
text-align: left;
}
td:nth-child(3) {
text-align: right;
}
</style>
"#,
);
html.push_str("</head>\n");
html.push_str("<body>\n");
html.push_str(&format!("<h1>{title}</h1>\n"));
html.push_str("<table>\n");
html.push_str(" <tbody>\n");
html.push_str(&format!(" {}\n", render_parent()));
for path in &data.paths {
html.push_str(&format!(" {}\n", render_path_item(path)));
}
html.push_str(" </tbody>\n");
html.push_str("</table>\n");
html.push_str("</body>\n");
Ok(html)
}
fn render_parent() -> String {
let value = "../";
format!("<tr><td><a href=\"{value}?noscript\">{value}</a></td><td></td><td></td></tr>")
}
fn render_path_item(path: &PathItem) -> String {
let mut href = encode_uri(&path.name);
let mut name = escape_str_pcdata(&path.name).to_string();
if path.path_type.is_dir() {
href.push_str("/?noscript");
name.push('/');
};
let mtime = format_mtime(path.mtime).unwrap_or_default();
let size = format_size(path.size, path.path_type);
format!("<tr><td><a href=\"{href}\">{name}</a></td><td>{mtime}</td><td>{size}</td></tr>")
}
fn format_mtime(mtime: u64) -> Option<String> {
let datetime = DateTime::<Utc>::from_timestamp_millis(mtime as _)?;
Some(datetime.format("%Y-%m-%dT%H:%M:%S.%3fZ").to_string())
}
fn format_size(size: u64, path_type: PathType) -> String {
if path_type.is_dir() {
let unit = if size == 1 { "item" } else { "items" };
let num = match size >= MAX_SUBPATHS_COUNT {
true => format!(">{}", MAX_SUBPATHS_COUNT - 1),
false => size.to_string(),
};
format!("{num} {unit}")
} else {
if size == 0 {
return "0 B".to_string();
}
const UNITS: [&str; 5] = ["B", "KB", "MB", "GB", "TB"];
let i = (size as f64).log2() / 10.0;
let i = i.floor() as usize;
if i >= UNITS.len() {
// Handle extremely large numbers beyond Terabytes
return format!("{:.2} PB", size as f64 / 1024.0f64.powi(5));
}
let size = size as f64 / 1024.0f64.powi(i as i32);
format!("{:.2} {}", size, UNITS[i])
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,17 +1,17 @@
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
#[cfg(feature = "tls")] #[cfg(feature = "tls")]
use rustls_pki_types::{CertificateDer, PrivateKeyDer}; use rustls_pki_types::{pem::PemObject, CertificateDer, PrivateKeyDer};
use std::{ use std::{
borrow::Cow, borrow::Cow,
path::Path, path::Path,
time::{Duration, SystemTime, UNIX_EPOCH}, time::{Duration, SystemTime, UNIX_EPOCH},
}; };
pub fn unix_now() -> Result<Duration> { pub fn unix_now() -> Duration {
SystemTime::now() SystemTime::now()
.duration_since(UNIX_EPOCH) .duration_since(UNIX_EPOCH)
.with_context(|| "Invalid system time") .expect("Unable to get unix epoch time")
} }
pub fn encode_uri(v: &str) -> String { pub fn encode_uri(v: &str) -> String {
@@ -19,7 +19,7 @@ pub fn encode_uri(v: &str) -> String {
parts.join("/") parts.join("/")
} }
pub fn decode_uri(v: &str) -> Option<Cow<str>> { pub fn decode_uri(v: &str) -> Option<Cow<'_, str>> {
percent_encoding::percent_decode(v.as_bytes()) percent_encoding::percent_decode(v.as_bytes())
.decode_utf8() .decode_utf8()
.ok() .ok()
@@ -62,74 +62,78 @@ pub fn glob(pattern: &str, target: &str) -> bool {
// Load public certificate from file. // Load public certificate from file.
#[cfg(feature = "tls")] #[cfg(feature = "tls")]
pub fn load_certs<T: AsRef<Path>>(filename: T) -> Result<Vec<CertificateDer<'static>>> { pub fn load_certs<T: AsRef<Path>>(file_name: T) -> Result<Vec<CertificateDer<'static>>> {
// Open certificate file.
let cert_file = std::fs::File::open(filename.as_ref())
.with_context(|| format!("Failed to access `{}`", filename.as_ref().display()))?;
let mut reader = std::io::BufReader::new(cert_file);
// Load and return certificate.
let mut certs = vec![]; let mut certs = vec![];
for cert in rustls_pemfile::certs(&mut reader) { for cert in CertificateDer::pem_file_iter(file_name.as_ref()).with_context(|| {
let cert = cert.with_context(|| "Failed to load certificate")?; format!(
"Failed to load cert file at `{}`",
file_name.as_ref().display()
)
})? {
let cert = cert.with_context(|| {
format!(
"Invalid certificate data in file `{}`",
file_name.as_ref().display()
)
})?;
certs.push(cert) certs.push(cert)
} }
if certs.is_empty() { if certs.is_empty() {
anyhow::bail!("No supported certificate in file"); anyhow::bail!(
"No supported certificate in file `{}`",
file_name.as_ref().display()
);
} }
Ok(certs) Ok(certs)
} }
// Load private key from file. // Load private key from file.
#[cfg(feature = "tls")] #[cfg(feature = "tls")]
pub fn load_private_key<T: AsRef<Path>>(filename: T) -> Result<PrivateKeyDer<'static>> { pub fn load_private_key<T: AsRef<Path>>(file_name: T) -> Result<PrivateKeyDer<'static>> {
let key_file = std::fs::File::open(filename.as_ref()) PrivateKeyDer::from_pem_file(file_name.as_ref()).with_context(|| {
.with_context(|| format!("Failed to access `{}`", filename.as_ref().display()))?; format!(
let mut reader = std::io::BufReader::new(key_file); "Failed to load key file at `{}`",
file_name.as_ref().display()
// Load and return a single private key. )
for key in rustls_pemfile::read_all(&mut reader) { })
let key = key.with_context(|| "There was a problem with reading private key")?;
match key {
rustls_pemfile::Item::Pkcs1Key(key) => return Ok(PrivateKeyDer::Pkcs1(key)),
rustls_pemfile::Item::Pkcs8Key(key) => return Ok(PrivateKeyDer::Pkcs8(key)),
rustls_pemfile::Item::Sec1Key(key) => return Ok(PrivateKeyDer::Sec1(key)),
_ => {}
}
}
anyhow::bail!("No supported private key in file");
} }
pub fn parse_range(range: &str, size: u64) -> Option<(u64, u64)> { pub fn parse_range(range: &str, size: u64) -> Option<Vec<(u64, u64)>> {
let (unit, range) = range.split_once('=')?; let (unit, ranges) = range.split_once('=')?;
if unit != "bytes" || range.contains(',') { if unit != "bytes" {
return None; return None;
} }
let (start, end) = range.split_once('-')?;
if start.is_empty() { let mut result = Vec::new();
let offset = end.parse::<u64>().ok()?; for range in ranges.split(',') {
if offset <= size { let (start, end) = range.trim().split_once('-')?;
Some((size - offset, size - 1)) if start.is_empty() {
} else { let offset = end.parse::<u64>().ok()?;
None if offset <= size {
} result.push((size - offset, size - 1));
} else {
let start = start.parse::<u64>().ok()?;
if start < size {
if end.is_empty() {
Some((start, size - 1))
} else { } else {
let end = end.parse::<u64>().ok()?; return None;
if end < size {
Some((start, end))
} else {
None
}
} }
} else { } else {
None let start = start.parse::<u64>().ok()?;
if start < size {
if end.is_empty() {
result.push((start, size - 1));
} else {
let end = end.parse::<u64>().ok()?;
if end < size {
result.push((start, end));
} else {
return None;
}
}
} else {
return None;
}
} }
} }
Some(result)
} }
#[cfg(test)] #[cfg(test)]
@@ -162,13 +166,19 @@ mod tests {
#[test] #[test]
fn test_parse_range() { fn test_parse_range() {
assert_eq!(parse_range("bytes=0-499", 500), Some((0, 499))); assert_eq!(parse_range("bytes=0-499", 500), Some(vec![(0, 499)]));
assert_eq!(parse_range("bytes=0-", 500), Some((0, 499))); assert_eq!(parse_range("bytes=0-", 500), Some(vec![(0, 499)]));
assert_eq!(parse_range("bytes=299-", 500), Some((299, 499))); assert_eq!(parse_range("bytes=299-", 500), Some(vec![(299, 499)]));
assert_eq!(parse_range("bytes=-500", 500), Some((0, 499))); assert_eq!(parse_range("bytes=-500", 500), Some(vec![(0, 499)]));
assert_eq!(parse_range("bytes=-300", 500), Some((200, 499))); assert_eq!(parse_range("bytes=-300", 500), Some(vec![(200, 499)]));
assert_eq!(
parse_range("bytes=0-199, 100-399, 400-, -200", 500),
Some(vec![(0, 199), (100, 399), (400, 499), (300, 499)])
);
assert_eq!(parse_range("bytes=500-", 500), None); assert_eq!(parse_range("bytes=500-", 500), None);
assert_eq!(parse_range("bytes=-501", 500), None); assert_eq!(parse_range("bytes=-501", 500), None);
assert_eq!(parse_range("bytes=0-500", 500), None); assert_eq!(parse_range("bytes=0-500", 500), None);
assert_eq!(parse_range("bytes=0-199,", 500), None);
assert_eq!(parse_range("bytes=0-199, 500-", 500), None);
} }
} }

View File

@@ -1,7 +1,6 @@
mod fixtures; mod fixtures;
mod utils; mod utils;
use assert_cmd::prelude::*;
use assert_fs::fixture::TempDir; use assert_fs::fixture::TempDir;
use fixtures::{port, server, tmpdir, wait_for_port, Error, TestServer, DIR_ASSETS}; use fixtures::{port, server, tmpdir, wait_for_port, Error, TestServer, DIR_ASSETS};
use rstest::rstest; use rstest::rstest;
@@ -11,10 +10,11 @@ use std::process::{Command, Stdio};
fn assets(server: TestServer) -> Result<(), Error> { fn assets(server: TestServer) -> Result<(), Error> {
let ver = env!("CARGO_PKG_VERSION"); let ver = env!("CARGO_PKG_VERSION");
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;
let index_js = format!("/__dufs_v{ver}_index.js"); let index_js = format!("/__dufs_v{ver}__/index.js");
let index_css = format!("/__dufs_v{ver}_index.css"); let index_css = format!("/__dufs_v{ver}__/index.css");
let favicon_ico = format!("/__dufs_v{ver}_favicon.ico"); let favicon_ico = format!("/__dufs_v{ver}__/favicon.ico");
let text = resp.text()?; let text = resp.text()?;
println!("{text}");
assert!(text.contains(&format!(r#"href="{index_css}""#))); assert!(text.contains(&format!(r#"href="{index_css}""#)));
assert!(text.contains(&format!(r#"href="{favicon_ico}""#))); assert!(text.contains(&format!(r#"href="{favicon_ico}""#)));
assert!(text.contains(&format!(r#"src="{index_js}""#))); assert!(text.contains(&format!(r#"src="{index_js}""#)));
@@ -24,7 +24,7 @@ fn assets(server: TestServer) -> Result<(), Error> {
#[rstest] #[rstest]
fn asset_js(server: TestServer) -> Result<(), Error> { fn asset_js(server: TestServer) -> Result<(), Error> {
let url = format!( let url = format!(
"{}__dufs_v{}_index.js", "{}__dufs_v{}__/index.js",
server.url(), server.url(),
env!("CARGO_PKG_VERSION") env!("CARGO_PKG_VERSION")
); );
@@ -40,7 +40,7 @@ fn asset_js(server: TestServer) -> Result<(), Error> {
#[rstest] #[rstest]
fn asset_css(server: TestServer) -> Result<(), Error> { fn asset_css(server: TestServer) -> Result<(), Error> {
let url = format!( let url = format!(
"{}__dufs_v{}_index.css", "{}__dufs_v{}__/index.css",
server.url(), server.url(),
env!("CARGO_PKG_VERSION") env!("CARGO_PKG_VERSION")
); );
@@ -56,7 +56,7 @@ fn asset_css(server: TestServer) -> Result<(), Error> {
#[rstest] #[rstest]
fn asset_ico(server: TestServer) -> Result<(), Error> { fn asset_ico(server: TestServer) -> Result<(), Error> {
let url = format!( let url = format!(
"{}__dufs_v{}_favicon.ico", "{}__dufs_v{}__/favicon.ico",
server.url(), server.url(),
env!("CARGO_PKG_VERSION") env!("CARGO_PKG_VERSION")
); );
@@ -70,9 +70,9 @@ fn asset_ico(server: TestServer) -> Result<(), Error> {
fn assets_with_prefix(#[with(&["--path-prefix", "xyz"])] server: TestServer) -> Result<(), Error> { fn assets_with_prefix(#[with(&["--path-prefix", "xyz"])] server: TestServer) -> Result<(), Error> {
let ver = env!("CARGO_PKG_VERSION"); let ver = env!("CARGO_PKG_VERSION");
let resp = reqwest::blocking::get(format!("{}xyz/", server.url()))?; let resp = reqwest::blocking::get(format!("{}xyz/", server.url()))?;
let index_js = format!("/xyz/__dufs_v{ver}_index.js"); let index_js = format!("/xyz/__dufs_v{ver}__/index.js");
let index_css = format!("/xyz/__dufs_v{ver}_index.css"); let index_css = format!("/xyz/__dufs_v{ver}__/index.css");
let favicon_ico = format!("/xyz/__dufs_v{ver}_favicon.ico"); let favicon_ico = format!("/xyz/__dufs_v{ver}__/favicon.ico");
let text = resp.text()?; let text = resp.text()?;
assert!(text.contains(&format!(r#"href="{index_css}""#))); assert!(text.contains(&format!(r#"href="{index_css}""#)));
assert!(text.contains(&format!(r#"href="{favicon_ico}""#))); assert!(text.contains(&format!(r#"href="{favicon_ico}""#)));
@@ -85,7 +85,7 @@ fn asset_js_with_prefix(
#[with(&["--path-prefix", "xyz"])] server: TestServer, #[with(&["--path-prefix", "xyz"])] server: TestServer,
) -> Result<(), Error> { ) -> Result<(), Error> {
let url = format!( let url = format!(
"{}xyz/__dufs_v{}_index.js", "{}xyz/__dufs_v{}__/index.js",
server.url(), server.url(),
env!("CARGO_PKG_VERSION") env!("CARGO_PKG_VERSION")
); );
@@ -100,7 +100,7 @@ fn asset_js_with_prefix(
#[rstest] #[rstest]
fn assets_override(tmpdir: TempDir, port: u16) -> Result<(), Error> { fn assets_override(tmpdir: TempDir, port: u16) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")? let mut child = Command::new(assert_cmd::cargo::cargo_bin!())
.arg(tmpdir.path()) .arg(tmpdir.path())
.arg("-p") .arg("-p")
.arg(port.to_string()) .arg(port.to_string())
@@ -114,7 +114,7 @@ fn assets_override(tmpdir: TempDir, port: u16) -> Result<(), Error> {
let url = format!("http://localhost:{port}"); let url = format!("http://localhost:{port}");
let resp = reqwest::blocking::get(&url)?; let resp = reqwest::blocking::get(&url)?;
assert!(resp.text()?.starts_with(&format!( assert!(resp.text()?.starts_with(&format!(
"/__dufs_v{}_index.js;DATA", "/__dufs_v{}__/index.js;<template id=\"index-data\">",
env!("CARGO_PKG_VERSION") env!("CARGO_PKG_VERSION")
))); )));
let resp = reqwest::blocking::get(&url)?; let resp = reqwest::blocking::get(&url)?;

View File

@@ -1,7 +1,8 @@
mod digest_auth_util;
mod fixtures; mod fixtures;
mod utils; mod utils;
use diqwest::blocking::WithDigestAuth; use digest_auth_util::send_with_digest_auth;
use fixtures::{server, Error, TestServer}; use fixtures::{server, Error, TestServer};
use indexmap::IndexSet; use indexmap::IndexSet;
use rstest::rstest; use rstest::rstest;
@@ -32,34 +33,51 @@ fn auth(#[case] server: TestServer, #[case] user: &str, #[case] pass: &str) -> R
let url = format!("{}file1", server.url()); let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?; let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
let resp = fetch!(b"PUT", &url) let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), user, pass)?;
.body(b"abc".to_vec())
.send_with_digest_auth(user, pass)?;
assert_eq!(resp.status(), 201); assert_eq!(resp.status(), 201);
Ok(()) Ok(())
} }
const HASHED_PASSWORD_AUTH: &str = "user:$6$gQxZwKyWn/ZmWEA2$4uV7KKMnSUnET2BtWTj/9T5.Jq3h/MdkOlnIl5hdlTxDZ4MZKmJ.kl6C.NL9xnNPqC4lVHC1vuI0E5cLpTJX81@/:rw"; // user:pass #[rstest]
fn invalid_auth(
#[with(&["-a", "user:pass@/:rw", "-a", "@/", "-A"])] server: TestServer,
) -> Result<(), Error> {
let resp = fetch!(b"GET", server.url())
.basic_auth("user", Some("-"))
.send()?;
assert_eq!(resp.status(), 401);
let resp = fetch!(b"GET", server.url())
.basic_auth("-", Some("pass"))
.send()?;
assert_eq!(resp.status(), 401);
let resp = fetch!(b"GET", server.url())
.header("Authorization", "Basic Og==")
.send()?;
assert_eq!(resp.status(), 401);
Ok(())
}
#[rstest] #[rstest]
#[case(server(&["--auth", "user:$6$gQxZwKyWn/ZmWEA2$4uV7KKMnSUnET2BtWTj/9T5.Jq3h/MdkOlnIl5hdlTxDZ4MZKmJ.kl6C.NL9xnNPqC4lVHC1vuI0E5cLpTJX81@/:rw", "-A"]), "user", "pass")]
#[case(server(&["--auth", "user:$6$YV1J6OHZAAgbzCbS$V55ZEgvJ6JFdz1nLO4AD696PRHAJYhfQf.Gy2HafrCz5itnbgNTtTgfUSqZrt4BJ7FcpRfSt/QZzAan68pido0@/:rw", "-A"]), "user", "pa:ss@1")]
fn auth_hashed_password( fn auth_hashed_password(
#[with(&["--auth", HASHED_PASSWORD_AUTH, "-A"])] server: TestServer, #[case] server: TestServer,
#[case] user: &str,
#[case] pass: &str,
) -> Result<(), Error> { ) -> Result<(), Error> {
let url = format!("{}file1", server.url()); let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?; let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
if let Err(err) = fetch!(b"PUT", &url) if let Err(err) = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), user, pass)
.body(b"abc".to_vec())
.send_with_digest_auth("user", "pass")
{ {
assert_eq!( assert_eq!(
format!("{err:?}"), err.to_string(),
r#"DigestAuth(MissingRequired("realm", "Basic realm=\"DUFS\""))"# r#"Missing "realm" in header: Basic realm="DUFS""#
); );
} }
let resp = fetch!(b"PUT", &url) let resp = fetch!(b"PUT", &url)
.body(b"abc".to_vec()) .body(b"abc".to_vec())
.basic_auth("user", Some("pass")) .basic_auth(user, Some(pass))
.send()?; .send()?;
assert_eq!(resp.status(), 201); assert_eq!(resp.status(), 201);
Ok(()) Ok(())
@@ -72,9 +90,7 @@ fn auth_and_public(
let url = format!("{}file1", server.url()); let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?; let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
let resp = fetch!(b"PUT", &url) let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user", "pass")?;
.body(b"abc".to_vec())
.send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 201); assert_eq!(resp.status(), 201);
let resp = fetch!(b"GET", &url).send()?; let resp = fetch!(b"GET", &url).send()?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
@@ -100,30 +116,82 @@ fn auth_skip_on_options_method(
} }
#[rstest] #[rstest]
fn auth_check( fn auth_skip_if_no_auth_user(server: TestServer) -> Result<(), Error> {
#[with(&["--auth", "user:pass@/:rw", "--auth", "user2:pass2@/", "-A"])] server: TestServer,
) -> Result<(), Error> {
let url = format!("{}index.html", server.url()); let url = format!("{}index.html", server.url());
let resp = fetch!(b"WRITEABLE", &url).send()?; let resp = fetch!(b"GET", &url)
assert_eq!(resp.status(), 401); .basic_auth("user", Some("pass"))
let resp = fetch!(b"WRITEABLE", &url).send_with_digest_auth("user2", "pass2")?; .send()?;
assert_eq!(resp.status(), 403);
let resp = fetch!(b"WRITEABLE", &url).send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
Ok(()) Ok(())
} }
#[rstest] #[rstest]
fn auth_compact_rules( fn auth_no_skip_if_anonymous(
#[with(&["--auth", "user:pass@/:rw|user2:pass2@/", "-A"])] server: TestServer, #[with(&["--auth", "@/:ro"])] server: TestServer,
) -> Result<(), Error> { ) -> Result<(), Error> {
let url = format!("{}index.html", server.url()); let url = format!("{}index.html", server.url());
let resp = fetch!(b"WRITEABLE", &url).send()?; let resp = fetch!(b"GET", &url)
.basic_auth("user", Some("pass"))
.send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
let resp = fetch!(b"WRITEABLE", &url).send_with_digest_auth("user2", "pass2")?; let resp = fetch!(b"GET", &url).send()?;
assert_eq!(resp.status(), 403);
let resp = fetch!(b"WRITEABLE", &url).send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let resp = fetch!(b"DELETE", &url)
.basic_auth("user", Some("pass"))
.send()?;
assert_eq!(resp.status(), 401);
Ok(())
}
#[rstest]
fn auth_check(
#[with(&["--auth", "user:pass@/:rw", "--auth", "user2:pass2@/", "-A"])] server: TestServer,
) -> Result<(), Error> {
let url = format!("{}", server.url());
let resp = fetch!(b"CHECKAUTH", &url).send()?;
assert_eq!(resp.status(), 401);
let resp = send_with_digest_auth(fetch!(b"CHECKAUTH", &url), "user", "pass")?;
assert_eq!(resp.status(), 200);
let resp = send_with_digest_auth(fetch!(b"CHECKAUTH", &url), "user2", "pass2")?;
assert_eq!(resp.status(), 200);
Ok(())
}
#[rstest]
fn auth_check2(
#[with(&["--auth", "user:pass@/:rw|user2:pass2@/", "-A"])] server: TestServer,
) -> Result<(), Error> {
let url = format!("{}", server.url());
let resp = fetch!(b"CHECKAUTH", &url).send()?;
assert_eq!(resp.status(), 401);
let resp = send_with_digest_auth(fetch!(b"CHECKAUTH", &url), "user", "pass")?;
assert_eq!(resp.status(), 200);
let resp = send_with_digest_auth(fetch!(b"CHECKAUTH", &url), "user2", "pass2")?;
assert_eq!(resp.status(), 200);
Ok(())
}
#[rstest]
fn auth_check3(
#[with(&["--auth", "user:pass@/:rw", "--auth", "@/dir1:rw", "-A"])] server: TestServer,
) -> Result<(), Error> {
let url = format!("{}dir1/", server.url());
let resp = fetch!(b"CHECKAUTH", &url).send()?;
assert_eq!(resp.status(), 200);
let resp = fetch!(b"CHECKAUTH", format!("{url}?login")).send()?;
assert_eq!(resp.status(), 401);
Ok(())
}
#[rstest]
fn auth_logout(
#[with(&["--auth", "user:pass@/:rw", "-A"])] server: TestServer,
) -> Result<(), Error> {
let url = format!("{}index.html", server.url());
let resp = fetch!(b"LOGOUT", &url).send()?;
assert_eq!(resp.status(), 401);
let resp = send_with_digest_auth(fetch!(b"LOGOUT", &url), "user", "pass")?;
assert_eq!(resp.status(), 401);
Ok(()) Ok(())
} }
@@ -134,29 +202,10 @@ fn auth_readonly(
let url = format!("{}index.html", server.url()); let url = format!("{}index.html", server.url());
let resp = fetch!(b"GET", &url).send()?; let resp = fetch!(b"GET", &url).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
let resp = fetch!(b"GET", &url).send_with_digest_auth("user2", "pass2")?; let resp = send_with_digest_auth(fetch!(b"GET", &url), "user2", "pass2")?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let url = format!("{}file1", server.url()); let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url) let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user2", "pass2")?;
.body(b"abc".to_vec())
.send_with_digest_auth("user2", "pass2")?;
assert_eq!(resp.status(), 403);
Ok(())
}
#[rstest]
fn auth_forbidden(
#[with(&["--auth", "user:pass@/:rw,/dir1:-", "-A"])] server: TestServer,
) -> Result<(), Error> {
let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url)
.body(b"abc".to_vec())
.send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 201);
let url = format!("{}dir1/file1", server.url());
let resp = fetch!(b"PUT", &url)
.body(b"abc".to_vec())
.send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 403); assert_eq!(resp.status(), 403);
Ok(()) Ok(())
} }
@@ -169,13 +218,9 @@ fn auth_nest(
let url = format!("{}dir1/file1", server.url()); let url = format!("{}dir1/file1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?; let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
let resp = fetch!(b"PUT", &url) let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user3", "pass3")?;
.body(b"abc".to_vec())
.send_with_digest_auth("user3", "pass3")?;
assert_eq!(resp.status(), 201); assert_eq!(resp.status(), 201);
let resp = fetch!(b"PUT", &url) let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user", "pass")?;
.body(b"abc".to_vec())
.send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 201); assert_eq!(resp.status(), 201);
Ok(()) Ok(())
} }
@@ -217,9 +262,11 @@ fn auth_webdav_move(
) -> Result<(), Error> { ) -> Result<(), Error> {
let origin_url = format!("{}dir1/test.html", server.url()); let origin_url = format!("{}dir1/test.html", server.url());
let new_url = format!("{}test2.html", server.url()); let new_url = format!("{}test2.html", server.url());
let resp = fetch!(b"MOVE", &origin_url) let resp = send_with_digest_auth(
.header("Destination", &new_url) fetch!(b"MOVE", &origin_url).header("Destination", &new_url),
.send_with_digest_auth("user3", "pass3")?; "user3",
"pass3",
)?;
assert_eq!(resp.status(), 403); assert_eq!(resp.status(), 403);
Ok(()) Ok(())
} }
@@ -231,9 +278,11 @@ fn auth_webdav_copy(
) -> Result<(), Error> { ) -> Result<(), Error> {
let origin_url = format!("{}dir1/test.html", server.url()); let origin_url = format!("{}dir1/test.html", server.url());
let new_url = format!("{}test2.html", server.url()); let new_url = format!("{}test2.html", server.url());
let resp = fetch!(b"COPY", &origin_url) let resp = send_with_digest_auth(
.header("Destination", &new_url) fetch!(b"COPY", &origin_url).header("Destination", &new_url),
.send_with_digest_auth("user3", "pass3")?; "user3",
"pass3",
)?;
assert_eq!(resp.status(), 403); assert_eq!(resp.status(), 403);
Ok(()) Ok(())
} }
@@ -245,7 +294,7 @@ fn auth_path_prefix(
let url = format!("{}xyz/index.html", server.url()); let url = format!("{}xyz/index.html", server.url());
let resp = fetch!(b"GET", &url).send()?; let resp = fetch!(b"GET", &url).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
let resp = fetch!(b"GET", &url).send_with_digest_auth("user", "pass")?; let resp = send_with_digest_auth(fetch!(b"GET", &url), "user", "pass")?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
Ok(()) Ok(())
} }
@@ -254,12 +303,15 @@ fn auth_path_prefix(
fn auth_partial_index( fn auth_partial_index(
#[with(&["--auth", "user:pass@/dir1:rw,/dir2:rw", "-A"])] server: TestServer, #[with(&["--auth", "user:pass@/dir1:rw,/dir2:rw", "-A"])] server: TestServer,
) -> Result<(), Error> { ) -> Result<(), Error> {
let resp = fetch!(b"GET", server.url()).send_with_digest_auth("user", "pass")?; let resp = send_with_digest_auth(fetch!(b"GET", server.url()), "user", "pass")?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let paths = utils::retrieve_index_paths(&resp.text()?); let paths = utils::retrieve_index_paths(&resp.text()?);
assert_eq!(paths, IndexSet::from(["dir1/".into(), "dir2/".into()])); assert_eq!(paths, IndexSet::from(["dir1/".into(), "dir2/".into()]));
let resp = fetch!(b"GET", format!("{}?q={}", server.url(), "test.html")) let resp = send_with_digest_auth(
.send_with_digest_auth("user", "pass")?; fetch!(b"GET", format!("{}?q={}", server.url(), "test.html")),
"user",
"pass",
)?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let paths = utils::retrieve_index_paths(&resp.text()?); let paths = utils::retrieve_index_paths(&resp.text()?);
assert_eq!( assert_eq!(
@@ -286,7 +338,7 @@ fn auth_propfind_dir(
#[with(&["--auth", "admin:admin@/:rw", "--auth", "user:pass@/dir-assets", "-A"])] #[with(&["--auth", "admin:admin@/:rw", "--auth", "user:pass@/dir-assets", "-A"])]
server: TestServer, server: TestServer,
) -> Result<(), Error> { ) -> Result<(), Error> {
let resp = fetch!(b"PROPFIND", server.url()).send_with_digest_auth("user", "pass")?; let resp = send_with_digest_auth(fetch!(b"PROPFIND", server.url()), "user", "pass")?;
assert_eq!(resp.status(), 207); assert_eq!(resp.status(), 207);
let body = resp.text()?; let body = resp.text()?;
assert!(body.contains("<D:href>/dir-assets/</D:href>")); assert!(body.contains("<D:href>/dir-assets/</D:href>"));
@@ -300,34 +352,45 @@ fn auth_data(
) -> Result<(), Error> { ) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;
let content = resp.text()?; let content = resp.text()?;
let json = utils::retrive_json(&content).unwrap(); let json = utils::retrieve_json(&content).unwrap();
assert_eq!(json["allow_delete"], serde_json::Value::Bool(false)); assert_eq!(json["allow_delete"], serde_json::Value::Bool(false));
assert_eq!(json["allow_upload"], serde_json::Value::Bool(false)); assert_eq!(json["allow_upload"], serde_json::Value::Bool(false));
let resp = fetch!(b"GET", server.url()) let resp = fetch!(b"GET", server.url())
.basic_auth("user", Some("pass")) .basic_auth("user", Some("pass"))
.send()?; .send()?;
let content = resp.text()?; let content = resp.text()?;
let json = utils::retrive_json(&content).unwrap(); let json = utils::retrieve_json(&content).unwrap();
assert_eq!(json["allow_delete"], serde_json::Value::Bool(true)); assert_eq!(json["allow_delete"], serde_json::Value::Bool(true));
assert_eq!(json["allow_upload"], serde_json::Value::Bool(true)); assert_eq!(json["allow_upload"], serde_json::Value::Bool(true));
Ok(()) Ok(())
} }
#[rstest] #[rstest]
fn auth_precedence( fn auth_shadow(
#[with(&["--auth", "user:pass@/dir1:rw,/dir1/test.txt", "-A"])] server: TestServer, #[with(&["--auth", "user:pass@/:rw", "-a", "@/dir1", "-A"])] server: TestServer,
) -> Result<(), Error> { ) -> Result<(), Error> {
let url = format!("{}dir1/test.txt", server.url()); let url = format!("{}dir1/test.txt", server.url());
let resp = fetch!(b"PUT", &url) let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
.body(b"abc".to_vec()) assert_eq!(resp.status(), 401);
.send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 403);
let url = format!("{}dir1/file1", server.url()); let resp = send_with_digest_auth(fetch!(b"PUT", &url).body(b"abc".to_vec()), "user", "pass")?;
let resp = fetch!(b"PUT", &url)
.body(b"abc".to_vec())
.send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 201); assert_eq!(resp.status(), 201);
Ok(()) Ok(())
} }
#[rstest]
fn token_auth(#[with(&["-a", "user:pass@/"])] server: TestServer) -> Result<(), Error> {
let url = format!("{}index.html", server.url());
let resp = fetch!(b"GET", &url).send()?;
assert_eq!(resp.status(), 401);
let url = format!("{}index.html?tokengen", server.url());
let resp = fetch!(b"GET", &url)
.basic_auth("user", Some("pass"))
.send()?;
let token = resp.text()?;
let url = format!("{}index.html?token={token}", server.url());
let resp = fetch!(b"GET", &url).send()?;
assert_eq!(resp.status(), 200);
Ok(())
}

View File

@@ -12,7 +12,7 @@ use std::process::{Command, Stdio};
#[rstest] #[rstest]
#[case(&["-b", "20.205.243.166"])] #[case(&["-b", "20.205.243.166"])]
fn bind_fails(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> { fn bind_fails(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> {
Command::cargo_bin("dufs")? Command::new(assert_cmd::cargo::cargo_bin!())
.arg(tmpdir.path()) .arg(tmpdir.path())
.arg("-p") .arg("-p")
.arg(port.to_string()) .arg(port.to_string())
@@ -49,7 +49,7 @@ fn bind_ipv4_ipv6(
#[case(&[] as &[&str])] #[case(&[] as &[&str])]
#[case(&["--path-prefix", "/prefix"])] #[case(&["--path-prefix", "/prefix"])]
fn validate_printed_urls(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> { fn validate_printed_urls(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")? let mut child = Command::new(assert_cmd::cargo::cargo_bin!())
.arg(tmpdir.path()) .arg(tmpdir.path())
.arg("-p") .arg("-p")
.arg(port.to_string()) .arg(port.to_string())

80
tests/cache.rs Normal file
View File

@@ -0,0 +1,80 @@
mod fixtures;
mod utils;
use chrono::{DateTime, Duration};
use fixtures::{server, Error, TestServer};
use reqwest::header::{
HeaderName, ETAG, IF_MATCH, IF_MODIFIED_SINCE, IF_NONE_MATCH, IF_UNMODIFIED_SINCE,
LAST_MODIFIED,
};
use reqwest::StatusCode;
use rstest::rstest;
#[rstest]
#[case(IF_UNMODIFIED_SINCE, Duration::days(1), StatusCode::OK)]
#[case(IF_UNMODIFIED_SINCE, Duration::days(0), StatusCode::OK)]
#[case(IF_UNMODIFIED_SINCE, Duration::days(-1), StatusCode::PRECONDITION_FAILED)]
#[case(IF_MODIFIED_SINCE, Duration::days(1), StatusCode::NOT_MODIFIED)]
#[case(IF_MODIFIED_SINCE, Duration::days(0), StatusCode::NOT_MODIFIED)]
#[case(IF_MODIFIED_SINCE, Duration::days(-1), StatusCode::OK)]
fn get_file_with_if_modified_since_condition(
#[case] header_condition: HeaderName,
#[case] duration_after_file_modified: Duration,
#[case] expected_code: StatusCode,
server: TestServer,
) -> Result<(), Error> {
let resp = fetch!(b"HEAD", format!("{}index.html", server.url())).send()?;
let last_modified = resp
.headers()
.get(LAST_MODIFIED)
.and_then(|h| h.to_str().ok())
.and_then(|s| DateTime::parse_from_rfc2822(s).ok())
.expect("Received no valid last modified header");
let req_modified_time = (last_modified + duration_after_file_modified)
.format("%a, %d %b %Y %T GMT")
.to_string();
let resp = fetch!(b"GET", format!("{}index.html", server.url()))
.header(header_condition, req_modified_time)
.send()?;
assert_eq!(resp.status(), expected_code);
Ok(())
}
fn same_etag(etag: &str) -> String {
etag.to_owned()
}
fn different_etag(etag: &str) -> String {
format!("{etag}1234")
}
#[rstest]
#[case(IF_MATCH, same_etag, StatusCode::OK)]
#[case(IF_MATCH, different_etag, StatusCode::PRECONDITION_FAILED)]
#[case(IF_NONE_MATCH, same_etag, StatusCode::NOT_MODIFIED)]
#[case(IF_NONE_MATCH, different_etag, StatusCode::OK)]
fn get_file_with_etag_match(
#[case] header_condition: HeaderName,
#[case] etag_modifier: fn(&str) -> String,
#[case] expected_code: StatusCode,
server: TestServer,
) -> Result<(), Error> {
let resp = fetch!(b"HEAD", format!("{}index.html", server.url())).send()?;
let etag = resp
.headers()
.get(ETAG)
.and_then(|h| h.to_str().ok())
.expect("Received no valid etag header");
let resp = fetch!(b"GET", format!("{}index.html", server.url()))
.header(header_condition, etag_modifier(etag))
.send()?;
assert_eq!(resp.status(), expected_code);
Ok(())
}

View File

@@ -11,7 +11,10 @@ use std::process::Command;
#[test] #[test]
/// Show help and exit. /// Show help and exit.
fn help_shows() -> Result<(), Error> { fn help_shows() -> Result<(), Error> {
Command::cargo_bin("dufs")?.arg("-h").assert().success(); Command::new(assert_cmd::cargo::cargo_bin!())
.arg("-h")
.assert()
.success();
Ok(()) Ok(())
} }
@@ -21,7 +24,7 @@ fn help_shows() -> Result<(), Error> {
fn print_completions() -> Result<(), Error> { fn print_completions() -> Result<(), Error> {
// let shell_enums = EnumValueParser::<Shell>::new(); // let shell_enums = EnumValueParser::<Shell>::new();
for shell in Shell::value_variants() { for shell in Shell::value_variants() {
Command::cargo_bin("dufs")? Command::new(assert_cmd::cargo::cargo_bin!())
.arg("--completions") .arg("--completions")
.arg(shell.to_string()) .arg(shell.to_string())
.assert() .assert()

View File

@@ -1,9 +1,9 @@
mod digest_auth_util;
mod fixtures; mod fixtures;
mod utils; mod utils;
use assert_cmd::prelude::*;
use assert_fs::TempDir; use assert_fs::TempDir;
use diqwest::blocking::WithDigestAuth; use digest_auth_util::send_with_digest_auth;
use fixtures::{port, tmpdir, wait_for_port, Error}; use fixtures::{port, tmpdir, wait_for_port, Error};
use rstest::rstest; use rstest::rstest;
use std::path::PathBuf; use std::path::PathBuf;
@@ -12,7 +12,7 @@ use std::process::{Command, Stdio};
#[rstest] #[rstest]
fn use_config_file(tmpdir: TempDir, port: u16) -> Result<(), Error> { fn use_config_file(tmpdir: TempDir, port: u16) -> Result<(), Error> {
let config_path = get_config_path().display().to_string(); let config_path = get_config_path().display().to_string();
let mut child = Command::cargo_bin("dufs")? let mut child = Command::new(assert_cmd::cargo::cargo_bin!())
.arg(tmpdir.path()) .arg(tmpdir.path())
.arg("-p") .arg("-p")
.arg(port.to_string()) .arg(port.to_string())
@@ -27,20 +27,18 @@ fn use_config_file(tmpdir: TempDir, port: u16) -> Result<(), Error> {
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
let url = format!("http://localhost:{port}/dufs/index.html"); let url = format!("http://localhost:{port}/dufs/index.html");
let resp = fetch!(b"GET", &url).send_with_digest_auth("user", "pass")?; let resp = send_with_digest_auth(fetch!(b"GET", &url), "user", "pass")?;
assert_eq!(resp.text()?, "This is index.html"); assert_eq!(resp.text()?, "This is index.html");
let url = format!("http://localhost:{port}/dufs?simple"); let url = format!("http://localhost:{port}/dufs?simple");
let resp = fetch!(b"GET", &url).send_with_digest_auth("user", "pass")?; let resp = send_with_digest_auth(fetch!(b"GET", &url), "user", "pass")?;
let text: String = resp.text().unwrap(); let text: String = resp.text().unwrap();
assert!(text.split('\n').any(|c| c == "dir1/")); assert!(text.split('\n').any(|c| c == "dir1/"));
assert!(!text.split('\n').any(|c| c == "dir3/")); assert!(!text.split('\n').any(|c| c == "dir3/"));
assert!(!text.split('\n').any(|c| c == "test.txt")); assert!(!text.split('\n').any(|c| c == "test.txt"));
let url = format!("http://localhost:{port}/dufs/dir1/upload.txt"); let url = format!("http://localhost:{port}/dufs/dir1/upload.txt");
let resp = fetch!(b"PUT", &url) let resp = send_with_digest_auth(fetch!(b"PUT", &url).body("Hello"), "user", "pass")?;
.body("Hello")
.send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 201); assert_eq!(resp.status(), 201);
child.kill()?; child.kill()?;

91
tests/digest_auth_util.rs Normal file
View File

@@ -0,0 +1,91 @@
/// Refs https://github.dev/maoertel/diqwest/blob/main/src/blocking.rs
use anyhow::{anyhow, Result};
use digest_auth::{AuthContext, AuthorizationHeader, HttpMethod};
use hyper::{header::AUTHORIZATION, HeaderMap, StatusCode};
use reqwest::blocking::{RequestBuilder, Response};
use url::Position;
pub fn send_with_digest_auth(
request_builder: RequestBuilder,
username: &str,
password: &str,
) -> Result<Response> {
let first_response = try_clone_request_builder(&request_builder)?.send()?;
match first_response.status() {
StatusCode::UNAUTHORIZED => {
try_digest_auth(request_builder, first_response, username, password)
}
_ => Ok(first_response),
}
}
fn try_digest_auth(
request_builder: RequestBuilder,
first_response: Response,
username: &str,
password: &str,
) -> Result<Response> {
if let Some(answer) = get_answer(
&request_builder,
first_response.headers(),
username,
password,
)? {
return Ok(request_builder
.header(AUTHORIZATION, answer.to_header_string())
.send()?);
};
Ok(first_response)
}
fn try_clone_request_builder(request_builder: &RequestBuilder) -> Result<RequestBuilder> {
request_builder
.try_clone()
.ok_or_else(|| anyhow!("Request body must not be a stream"))
}
fn get_answer(
request_builder: &RequestBuilder,
first_response: &HeaderMap,
username: &str,
password: &str,
) -> Result<Option<AuthorizationHeader>> {
let answer = calculate_answer(request_builder, first_response, username, password);
match answer {
Ok(answer) => Ok(Some(answer)),
Err(error) => Err(error),
}
}
fn calculate_answer(
request_builder: &RequestBuilder,
headers: &HeaderMap,
username: &str,
password: &str,
) -> Result<AuthorizationHeader> {
let request = try_clone_request_builder(request_builder)?.build()?;
let path = &request.url()[Position::AfterPort..];
let method = HttpMethod::from(request.method().as_str());
let body = request.body().and_then(|b| b.as_bytes());
parse_digest_auth_header(headers, path, method, body, username, password)
}
fn parse_digest_auth_header(
header: &HeaderMap,
path: &str,
method: HttpMethod,
body: Option<&[u8]>,
username: &str,
password: &str,
) -> Result<AuthorizationHeader> {
let www_auth = header
.get("www-authenticate")
.ok_or_else(|| anyhow!("The header 'www-authenticate' is missing."))?
.to_str()?;
let context = AuthContext::new_with_method(username, password, path, body, method);
let mut prompt = digest_auth::parse(www_auth)?;
Ok(prompt.respond(&context)?)
}

View File

@@ -1,4 +1,3 @@
use assert_cmd::prelude::*;
use assert_fs::fixture::TempDir; use assert_fs::fixture::TempDir;
use assert_fs::prelude::*; use assert_fs::prelude::*;
use port_check::free_local_port; use port_check::free_local_port;
@@ -65,7 +64,7 @@ pub fn tmpdir() -> TempDir {
if *directory == DIR_ASSETS { if *directory == DIR_ASSETS {
tmpdir tmpdir
.child(format!("{}{}", directory, "index.html")) .child(format!("{}{}", directory, "index.html"))
.write_str("__ASSETS_PREFIX__index.js;DATA = __INDEX_DATA__") .write_str("__ASSETS_PREFIX__index.js;<template id=\"index-data\">__INDEX_DATA__</template>")
.unwrap(); .unwrap();
} else { } else {
for file in FILES { for file in FILES {
@@ -129,8 +128,7 @@ where
{ {
let port = port(); let port = port();
let tmpdir = tmpdir(); let tmpdir = tmpdir();
let child = Command::cargo_bin("dufs") let child = Command::new(assert_cmd::cargo::cargo_bin!())
.expect("Couldn't find test binary")
.arg(tmpdir.path()) .arg(tmpdir.path())
.arg("-p") .arg("-p")
.arg(port.to_string()) .arg(port.to_string())
@@ -146,14 +144,14 @@ where
TestServer::new(port, tmpdir, child, is_tls) TestServer::new(port, tmpdir, child, is_tls)
} }
/// Wait a max of 1s for the port to become available. /// Wait a max of 2s for the port to become available.
pub fn wait_for_port(port: u16) { pub fn wait_for_port(port: u16) {
let start_wait = Instant::now(); let start_wait = Instant::now();
while !port_check::is_port_reachable(format!("localhost:{port}")) { while !port_check::is_port_reachable(format!("localhost:{port}")) {
sleep(Duration::from_millis(100)); sleep(Duration::from_millis(250));
if start_wait.elapsed().as_secs() > 1 { if start_wait.elapsed().as_secs() > 2 {
panic!("timeout waiting for port {port}"); panic!("timeout waiting for port {port}");
} }
} }

31
tests/health.rs Normal file
View File

@@ -0,0 +1,31 @@
mod fixtures;
mod utils;
use fixtures::{server, Error, TestServer};
use rstest::rstest;
const HEALTH_CHECK_PATH: &str = "__dufs__/health";
const HEALTH_CHECK_RESPONSE: &str = r#"{"status":"OK"}"#;
#[rstest]
fn normal_health(server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}{HEALTH_CHECK_PATH}", server.url()))?;
assert_eq!(resp.text()?, HEALTH_CHECK_RESPONSE);
Ok(())
}
#[rstest]
fn auth_health(
#[with(&["--auth", "user:pass@/:rw", "-A"])] server: TestServer,
) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}{HEALTH_CHECK_PATH}", server.url()))?;
assert_eq!(resp.text()?, HEALTH_CHECK_RESPONSE);
Ok(())
}
#[rstest]
fn path_prefix_health(#[with(&["--path-prefix", "xyz"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}xyz/{HEALTH_CHECK_PATH}", server.url()))?;
assert_eq!(resp.text()?, HEALTH_CHECK_RESPONSE);
Ok(())
}

View File

@@ -59,7 +59,7 @@ fn hidden_search_dir(#[case] server: TestServer, #[case] exist: bool) -> Result<
#[rstest] #[rstest]
#[case(server(&["--hidden", "hidden/"]), "dir4/", 1)] #[case(server(&["--hidden", "hidden/"]), "dir4/", 1)]
#[case(server(&["--hidden", "hidden"]), "dir4/", 0)] #[case(server(&["--hidden", "hidden"]), "dir4/", 0)]
fn hidden_dir_noly( fn hidden_dir_only(
#[case] server: TestServer, #[case] server: TestServer,
#[case] dir: &str, #[case] dir: &str,
#[case] count: usize, #[case] count: usize,

View File

@@ -4,7 +4,7 @@ mod utils;
use fixtures::{server, Error, TestServer, BIN_FILE}; use fixtures::{server, Error, TestServer, BIN_FILE};
use rstest::rstest; use rstest::rstest;
use serde_json::Value; use serde_json::Value;
use utils::retrive_edit_file; use utils::retrieve_edit_file;
#[rstest] #[rstest]
fn get_dir(server: TestServer) -> Result<(), Error> { fn get_dir(server: TestServer) -> Result<(), Error> {
@@ -82,6 +82,19 @@ fn get_dir_simple(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
Ok(()) Ok(())
} }
#[rstest]
fn get_dir_noscript(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?noscript", server.url()))?;
assert_eq!(resp.status(), 200);
assert_eq!(
resp.headers().get("content-type").unwrap(),
"text/html; charset=utf-8"
);
let text = resp.text().unwrap();
assert!(text.contains(r#"<td><a href="index.html">index.html</a></td>"#));
Ok(())
}
#[rstest] #[rstest]
fn head_dir_zip(#[with(&["-A"])] server: TestServer) -> Result<(), Error> { fn head_dir_zip(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"HEAD", format!("{}?zip", server.url())).send()?; let resp = fetch!(b"HEAD", format!("{}?zip", server.url())).send()?;
@@ -189,6 +202,28 @@ fn head_file(server: TestServer) -> Result<(), Error> {
Ok(()) Ok(())
} }
#[rstest]
fn hash_file(#[with(&["--allow-hash"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}index.html?hash", server.url()))?;
assert_eq!(
resp.headers().get("content-type").unwrap(),
"text/html; charset=utf-8"
);
assert_eq!(resp.status(), 200);
assert_eq!(
resp.text()?,
"c8dd395e3202674b9512f7b7f956e0d96a8ba8f572e785b0d5413ab83766dbc4"
);
Ok(())
}
#[rstest]
fn no_hash_file(server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}index.html?hash", server.url()))?;
assert_eq!(resp.status(), 403);
Ok(())
}
#[rstest] #[rstest]
fn get_file_404(server: TestServer) -> Result<(), Error> { fn get_file_404(server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}404", server.url()))?; let resp = reqwest::blocking::get(format!("{}404", server.url()))?;
@@ -223,7 +258,7 @@ fn get_file_newline_path(server: TestServer) -> Result<(), Error> {
fn get_file_edit(server: TestServer) -> Result<(), Error> { fn get_file_edit(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"GET", format!("{}index.html?edit", server.url())).send()?; let resp = fetch!(b"GET", format!("{}index.html?edit", server.url())).send()?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let editable = retrive_edit_file(&resp.text().unwrap()).unwrap(); let editable = retrieve_edit_file(&resp.text().unwrap()).unwrap();
assert!(editable); assert!(editable);
Ok(()) Ok(())
} }
@@ -232,7 +267,7 @@ fn get_file_edit(server: TestServer) -> Result<(), Error> {
fn get_file_edit_bin(server: TestServer) -> Result<(), Error> { fn get_file_edit_bin(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"GET", format!("{}{BIN_FILE}?edit", server.url())).send()?; let resp = fetch!(b"GET", format!("{}{BIN_FILE}?edit", server.url())).send()?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let editable = retrive_edit_file(&resp.text().unwrap()).unwrap(); let editable = retrieve_edit_file(&resp.text().unwrap()).unwrap();
assert!(!editable); assert!(!editable);
Ok(()) Ok(())
} }
@@ -250,12 +285,9 @@ fn options_dir(server: TestServer) -> Result<(), Error> {
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
assert_eq!( assert_eq!(
resp.headers().get("allow").unwrap(), resp.headers().get("allow").unwrap(),
"GET,HEAD,PUT,OPTIONS,DELETE,PATCH,PROPFIND,COPY,MOVE" "GET,HEAD,PUT,OPTIONS,DELETE,PATCH,PROPFIND,COPY,MOVE,CHECKAUTH,LOGOUT"
);
assert_eq!(
resp.headers().get("dav").unwrap(),
"1, 2, 3, sabredav-partialupdate"
); );
assert_eq!(resp.headers().get("dav").unwrap(), "1, 2, 3");
Ok(()) Ok(())
} }

View File

@@ -1,10 +1,10 @@
mod digest_auth_util;
mod fixtures; mod fixtures;
mod utils; mod utils;
use diqwest::blocking::WithDigestAuth; use digest_auth_util::send_with_digest_auth;
use fixtures::{port, tmpdir, wait_for_port, Error}; use fixtures::{port, tmpdir, wait_for_port, Error};
use assert_cmd::prelude::*;
use assert_fs::fixture::TempDir; use assert_fs::fixture::TempDir;
use rstest::rstest; use rstest::rstest;
use std::io::Read; use std::io::Read;
@@ -19,7 +19,7 @@ fn log_remote_user(
#[case] args: &[&str], #[case] args: &[&str],
#[case] is_basic: bool, #[case] is_basic: bool,
) -> Result<(), Error> { ) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")? let mut child = Command::new(assert_cmd::cargo::cargo_bin!())
.arg(tmpdir.path()) .arg(tmpdir.path())
.arg("-p") .arg("-p")
.arg(port.to_string()) .arg(port.to_string())
@@ -31,12 +31,12 @@ fn log_remote_user(
let stdout = child.stdout.as_mut().expect("Failed to get stdout"); let stdout = child.stdout.as_mut().expect("Failed to get stdout");
let req = fetch!(b"GET", &format!("http://localhost:{port}")); let req_builder = fetch!(b"GET", &format!("http://localhost:{port}"));
let resp = if is_basic { let resp = if is_basic {
req.basic_auth("user", Some("pass")).send()? req_builder.basic_auth("user", Some("pass")).send()?
} else { } else {
req.send_with_digest_auth("user", "pass")? send_with_digest_auth(req_builder, "user", "pass")?
}; };
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
@@ -54,7 +54,7 @@ fn log_remote_user(
#[rstest] #[rstest]
#[case(&["--log-format", ""])] #[case(&["--log-format", ""])]
fn no_log(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> { fn no_log(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")? let mut child = Command::new(assert_cmd::cargo::cargo_bin!())
.arg(tmpdir.path()) .arg(tmpdir.path())
.arg("-p") .arg("-p")
.arg(port.to_string()) .arg(port.to_string())

View File

@@ -2,7 +2,7 @@ mod fixtures;
mod utils; mod utils;
use fixtures::{server, Error, TestServer}; use fixtures::{server, Error, TestServer};
use reqwest::header::HeaderValue; use reqwest::header::{HeaderMap, HeaderName, HeaderValue};
use rstest::rstest; use rstest::rstest;
#[rstest] #[rstest]
@@ -39,3 +39,68 @@ fn get_file_range_invalid(server: TestServer) -> Result<(), Error> {
assert_eq!(resp.headers().get("content-range").unwrap(), "bytes */18"); assert_eq!(resp.headers().get("content-range").unwrap(), "bytes */18");
Ok(()) Ok(())
} }
fn parse_multipart_body<'a>(body: &'a str, boundary: &str) -> Vec<(HeaderMap, &'a str)> {
body.split(&format!("--{boundary}"))
.filter(|part| !part.is_empty() && *part != "--\r\n")
.map(|part| {
let (head, body) = part.trim_ascii().split_once("\r\n\r\n").unwrap();
let headers = head
.split("\r\n")
.fold(HeaderMap::new(), |mut headers, header| {
let (key, value) = header.split_once(":").unwrap();
let key = HeaderName::from_bytes(key.as_bytes()).unwrap();
let value = HeaderValue::from_str(value.trim_ascii_start()).unwrap();
headers.insert(key, value);
headers
});
(headers, body)
})
.collect()
}
#[rstest]
fn get_file_multipart_range(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"GET", format!("{}index.html", server.url()))
.header("range", HeaderValue::from_static("bytes=0-11, 6-17"))
.send()?;
assert_eq!(resp.status(), 206);
assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes");
let content_type = resp
.headers()
.get("content-type")
.unwrap()
.to_str()?
.to_string();
assert!(content_type.starts_with("multipart/byteranges; boundary="));
let boundary = content_type.split_once('=').unwrap().1.trim_ascii_start();
assert!(!boundary.is_empty());
let body = resp.text()?;
let parts = parse_multipart_body(&body, boundary);
assert_eq!(parts.len(), 2);
let (headers, body) = &parts[0];
assert_eq!(headers.get("content-range").unwrap(), "bytes 0-11/18");
assert_eq!(*body, "This is inde");
let (headers, body) = &parts[1];
assert_eq!(headers.get("content-range").unwrap(), "bytes 6-17/18");
assert_eq!(*body, "s index.html");
Ok(())
}
#[rstest]
fn get_file_multipart_range_invalid(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"GET", format!("{}index.html", server.url()))
.header("range", HeaderValue::from_static("bytes=0-6, 20-30"))
.send()?;
assert_eq!(resp.status(), 416);
assert_eq!(resp.headers().get("content-range").unwrap(), "bytes */18");
assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes");
assert_eq!(resp.headers().get("content-length").unwrap(), "0");
Ok(())
}

View File

@@ -3,7 +3,6 @@
mod fixtures; mod fixtures;
mod utils; mod utils;
use assert_cmd::prelude::*;
use assert_fs::fixture::TempDir; use assert_fs::fixture::TempDir;
use fixtures::{port, tmpdir, wait_for_port, Error}; use fixtures::{port, tmpdir, wait_for_port, Error};
use rstest::rstest; use rstest::rstest;
@@ -12,7 +11,7 @@ use std::process::{Command, Stdio};
#[rstest] #[rstest]
#[case("index.html")] #[case("index.html")]
fn single_file(tmpdir: TempDir, port: u16, #[case] file: &str) -> Result<(), Error> { fn single_file(tmpdir: TempDir, port: u16, #[case] file: &str) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")? let mut child = Command::new(assert_cmd::cargo::cargo_bin!())
.arg(tmpdir.path().join(file)) .arg(tmpdir.path().join(file))
.arg("-p") .arg("-p")
.arg(port.to_string()) .arg(port.to_string())
@@ -35,7 +34,7 @@ fn single_file(tmpdir: TempDir, port: u16, #[case] file: &str) -> Result<(), Err
#[rstest] #[rstest]
#[case("index.html")] #[case("index.html")]
fn path_prefix_single_file(tmpdir: TempDir, port: u16, #[case] file: &str) -> Result<(), Error> { fn path_prefix_single_file(tmpdir: TempDir, port: u16, #[case] file: &str) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")? let mut child = Command::new(assert_cmd::cargo::cargo_bin!())
.arg(tmpdir.path().join(file)) .arg(tmpdir.path().join(file))
.arg("-p") .arg("-p")
.arg(port.to_string()) .arg(port.to_string())
@@ -53,7 +52,7 @@ fn path_prefix_single_file(tmpdir: TempDir, port: u16, #[case] file: &str) -> Re
let resp = reqwest::blocking::get(format!("http://localhost:{port}/xyz/index.html"))?; let resp = reqwest::blocking::get(format!("http://localhost:{port}/xyz/index.html"))?;
assert_eq!(resp.text()?, "This is index.html"); assert_eq!(resp.text()?, "This is index.html");
let resp = reqwest::blocking::get(format!("http://localhost:{port}"))?; let resp = reqwest::blocking::get(format!("http://localhost:{port}"))?;
assert_eq!(resp.status(), 403); assert_eq!(resp.status(), 400);
child.kill()?; child.kill()?;
Ok(()) Ok(())

View File

@@ -1,7 +1,6 @@
mod fixtures; mod fixtures;
mod utils; mod utils;
use assert_cmd::Command;
use fixtures::{server, Error, TestServer}; use fixtures::{server, Error, TestServer};
use predicates::str::contains; use predicates::str::contains;
use reqwest::blocking::ClientBuilder; use reqwest::blocking::ClientBuilder;
@@ -25,7 +24,7 @@ use crate::fixtures::port;
]))] ]))]
fn tls_works(#[case] server: TestServer) -> Result<(), Error> { fn tls_works(#[case] server: TestServer) -> Result<(), Error> {
let client = ClientBuilder::new() let client = ClientBuilder::new()
.danger_accept_invalid_certs(true) .tls_danger_accept_invalid_certs(true)
.build()?; .build()?;
let resp = client.get(server.url()).send()?.error_for_status()?; let resp = client.get(server.url()).send()?.error_for_status()?;
assert_resp_paths!(resp); assert_resp_paths!(resp);
@@ -36,7 +35,7 @@ fn tls_works(#[case] server: TestServer) -> Result<(), Error> {
#[rstest] #[rstest]
fn wrong_path_cert() -> Result<(), Error> { fn wrong_path_cert() -> Result<(), Error> {
let port = port().to_string(); let port = port().to_string();
Command::cargo_bin("dufs")? assert_cmd::cargo::cargo_bin_cmd!()
.args([ .args([
"--tls-cert", "--tls-cert",
"wrong", "wrong",
@@ -47,7 +46,7 @@ fn wrong_path_cert() -> Result<(), Error> {
]) ])
.assert() .assert()
.failure() .failure()
.stderr(contains("Failed to access `wrong`")); .stderr(contains("Failed to load cert file at `wrong`"));
Ok(()) Ok(())
} }
@@ -56,7 +55,7 @@ fn wrong_path_cert() -> Result<(), Error> {
#[rstest] #[rstest]
fn wrong_path_key() -> Result<(), Error> { fn wrong_path_key() -> Result<(), Error> {
let port = port().to_string(); let port = port().to_string();
Command::cargo_bin("dufs")? assert_cmd::cargo::cargo_bin_cmd!()
.args([ .args([
"--tls-cert", "--tls-cert",
"tests/data/cert.pem", "tests/data/cert.pem",
@@ -67,7 +66,7 @@ fn wrong_path_key() -> Result<(), Error> {
]) ])
.assert() .assert()
.failure() .failure()
.stderr(contains("Failed to access `wrong`")); .stderr(contains("Failed to load key file at `wrong`"));
Ok(()) Ok(())
} }

View File

@@ -1,3 +1,4 @@
use base64::{engine::general_purpose::STANDARD, Engine as _};
use indexmap::IndexSet; use indexmap::IndexSet;
use serde_json::Value; use serde_json::Value;
@@ -26,7 +27,7 @@ macro_rules! fetch {
#[allow(dead_code)] #[allow(dead_code)]
pub fn retrieve_index_paths(content: &str) -> IndexSet<String> { pub fn retrieve_index_paths(content: &str) -> IndexSet<String> {
let value = retrive_json(content).unwrap(); let value = retrieve_json(content).unwrap();
let paths = value let paths = value
.get("paths") .get("paths")
.unwrap() .unwrap()
@@ -47,8 +48,8 @@ pub fn retrieve_index_paths(content: &str) -> IndexSet<String> {
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn retrive_edit_file(content: &str) -> Option<bool> { pub fn retrieve_edit_file(content: &str) -> Option<bool> {
let value = retrive_json(content)?; let value = retrieve_json(content).unwrap();
let value = value.get("editable").unwrap(); let value = value.get("editable").unwrap();
Some(value.as_bool().unwrap()) Some(value.as_bool().unwrap())
} }
@@ -60,10 +61,22 @@ pub fn encode_uri(v: &str) -> String {
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn retrive_json(content: &str) -> Option<Value> { pub fn retrieve_json(content: &str) -> Option<Value> {
let lines: Vec<&str> = content.lines().collect(); let lines: Vec<&str> = content.lines().collect();
let line = lines.iter().find(|v| v.contains("DATA ="))?; let start_tag = "<template id=\"index-data\">";
let line_col = line.find("DATA =").unwrap() + 6; let end_tag = "</template>";
let value: Value = line[line_col..].parse().unwrap();
let line = lines.iter().find(|v| v.contains(start_tag))?;
let start_index = line.find(start_tag)?;
let start_content_index = start_index + start_tag.len();
let end_index = line[start_content_index..].find(end_tag)?;
let end_content_index = start_content_index + end_index;
let value = &line[start_content_index..end_content_index];
let value = STANDARD.decode(value).ok()?;
let value = serde_json::from_slice(&value).ok()?;
Some(value) Some(value)
} }

View File

@@ -40,6 +40,17 @@ fn propfind_dir_depth0(server: TestServer) -> Result<(), Error> {
Ok(()) Ok(())
} }
#[rstest]
fn propfind_dir_depth2(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"PROPFIND", format!("{}dir1", server.url()))
.header("depth", "2")
.send()?;
assert_eq!(resp.status(), 400);
let body = resp.text()?;
assert_eq!(body, "Invalid depth: only 0 and 1 are allowed.");
Ok(())
}
#[rstest] #[rstest]
fn propfind_404(server: TestServer) -> Result<(), Error> { fn propfind_404(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"PROPFIND", format!("{}404", server.url())).send()?; let resp = fetch!(b"PROPFIND", format!("{}404", server.url())).send()?;
@@ -49,7 +60,7 @@ fn propfind_404(server: TestServer) -> Result<(), Error> {
#[rstest] #[rstest]
fn propfind_double_slash(server: TestServer) -> Result<(), Error> { fn propfind_double_slash(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"PROPFIND", format!("{}/", server.url())).send()?; let resp = fetch!(b"PROPFIND", server.url()).send()?;
assert_eq!(resp.status(), 207); assert_eq!(resp.status(), 207);
Ok(()) Ok(())
} }