Compare commits

..

102 Commits

Author SHA1 Message Date
sigoden
10ec34872d chore(release): version 0.33.0 2023-03-17 09:06:01 +08:00
sigoden
3ff16d254b chore: update deps 2023-03-17 08:54:38 +08:00
sigoden
29a04c8d74 refactor: improve error handle (#195) 2023-03-12 15:20:40 +08:00
sigoden
c92e45f2da fix: basic auth sometimes does not work (#194) 2023-03-12 12:58:36 +08:00
sigoden
8d7a9053e2 chore: update deps 2023-03-06 10:09:24 +08:00
sigoden
0e12b285cd fix: hidden don't works on some files (#188)
like --hidden '*.abc-cba' matches xyz.abc-cba but do not matches 123.xyz.abc-cba
2023-03-03 07:15:46 +08:00
sigoden
45f4f5fc58 feat: guess plain text encoding then set content-type charset (#186) 2023-03-01 09:36:59 +08:00
horizon
6dcb4dcd76 fix: cors allow-request-header add content-type (#184)
* fix: cors allow-request-header add content-type

* add content-type test
2023-02-27 07:28:33 +08:00
sigoden
65da9bedee chore(release): version 0.32.0 (#183) 2023-02-24 08:21:57 +08:00
sigoden
e468d823cc chore: update readme 2023-02-22 11:26:17 +08:00
sigoden
902a60563d chore: ui change edit icon 2023-02-22 10:37:54 +08:00
sigoden
f6c2ed2974 chore: optimize ui 2023-02-22 10:09:34 +08:00
sigoden
8f4cbb4826 chore: use anyhow to handle error 2023-02-21 17:23:24 +08:00
sigoden
2064d7803a chore: bump deps 2023-02-21 16:39:57 +08:00
sigoden
ad0be71557 chore: optimize for test auth 2023-02-21 16:16:49 +08:00
sigoden
6d9758c71d feat: ui improves the login experience (#182)
close #157 #158
2023-02-21 12:42:40 +08:00
sigoden
a61fda6e80 feat: support new file (#180) 2023-02-21 08:45:52 +08:00
sigoden
6625c4d3d0 chore: optimize ui 2023-02-21 08:14:03 +08:00
sigoden
dd6973468c feat: support edit files (#179)
close #172
2023-02-20 22:50:24 +08:00
sigoden
c6c78a16c5 chore: optimize ui 2023-02-20 17:23:31 +08:00
sigoden
111103f26b fix: clear search input also clear query (#178)
close #161
2023-02-20 12:07:40 +08:00
sigoden
7d6d7d49ca feat: API to search and list directories (#177)
use `?simple` to output path name only.
use `?json` to output paths in json format.
By default, output html page.

close #166
2023-02-20 11:05:53 +08:00
sigoden
c6dcaf95d4 chore: hide env keys from help text (#176) 2023-02-19 22:48:41 +08:00
sigoden
b7c5119c2e feat: hiding only directories instead of files (#175)
A `--hidden` pattern with `/` suffix means hiding only directories not files.
A `--hidden` pattern without `/` will hide matching files and directories.
2023-02-19 22:03:59 +08:00
horizon
0000bd27f5 fix: remove Method::Options auth check (#168)
* fix: remove Method::Options auth check

* add tests

---------

Co-authored-by: sigoden <sigoden@gmail.com>
2023-02-19 12:30:14 +08:00
sigoden
47883376c1 chore: fix cargo clippy (#174) 2023-02-19 12:24:42 +08:00
MuXiu1997
fea9bf988a feat: use env var for args (#170)
closed #160
2023-02-19 11:40:14 +08:00
MoonFruit
b6d555158c chore: add install instruction for Homebrew (#163) 2022-12-27 10:16:23 +08:00
sigoden
628d863d2e chore: improve code quanity 2022-12-11 15:18:44 +08:00
sigoden
8d9705caa4 feat: add option --allow-archive (#152)
BREAKING CHANGE: explicitly allow download folder as zip file
2022-12-10 11:09:42 +08:00
Kian-Meng Ang
7eef4407fc docs: fix typos (#147)
Found via `codespell -S target -L crate,nd`
2022-12-10 09:18:54 +08:00
Sylvain Prat
f061365587 fix: set the STOPSIGNAL to SIGINT for Dockerfile 2022-12-10 08:31:46 +08:00
sigoden
d35cea4c36 chore(release): version 0.31.0 2022-11-12 08:43:13 +08:00
sigoden
1329e42b9a chore: upgrade clap to v4 (#146) 2022-11-11 21:46:07 +08:00
sigoden
6ebf619430 feat: support unix sockets (#145) 2022-11-11 08:57:44 +08:00
sigoden
8b4727c3a4 fix: panic on PROPFIND // (#144) 2022-11-10 19:28:01 +08:00
Aneesh Agrawal
604ccc6556 fix: status code for MKCOL on existing resource (#142)
* Fix status code for MKCOL on existing resource

Per https://datatracker.ietf.org/doc/html/rfc4918#section-9.3.1,
MKCOL should return a 405 if the resource already exists.

Impetus for this change:
I am using dufs as a webdav server for [Joplin](https://joplinapp.org/)
which interpreted the previous behavior of returning a 403 as an error,
preventing syncing from working.

* add test

Co-authored-by: sigoden <sigoden@gmail.com>
2022-11-10 18:41:10 +08:00
David Politis
1a9990f04e fix: don't search on empty query string (#140)
* fix: don't search on empty query string

* refactor

Co-authored-by: sigoden <sigoden@gmail.com>
2022-11-10 18:02:55 +08:00
sigoden
bd07783cde chore: cargo clippy 2022-11-10 15:38:35 +08:00
sigoden
dbf2de9cb9 fix: auth not works with --path-prefix (#138)
close #137
2022-10-08 09:14:42 +08:00
sigoden
3b3ea718d9 chore: improve readme 2022-09-09 21:43:40 +08:00
sigoden
3debf88da1 chore: improve readme 2022-09-09 21:37:07 +08:00
sigoden
7eaa6f2484 chore: undo hidden arg changes 2022-09-09 21:30:27 +08:00
sigoden
68def1c1d9 chore: update screenshot.png in readme 2022-09-09 21:22:03 +08:00
sigoden
868f4158f5 chore(release): version 0.30.0 2022-09-09 21:04:05 +08:00
sigoden
3063dca0a6 chore: update readme 2022-09-05 10:34:18 +08:00
sigoden
a74e40aee5 feat: add --assets options to override assets (#134)
* feat: add --assets options to override assets

* update readme
2022-09-05 10:30:45 +08:00
sigoden
bde06fef94 chore: refactor clap multiple_occurrences and multiple_values (#130) 2022-08-27 10:30:08 +08:00
sigoden
31c832a742 feat: support sort by name, mtime, size (#128) 2022-08-23 14:24:42 +08:00
Daniel Flannery
9f8171a22f chore: Corrected type in README (#127) 2022-08-17 07:41:02 +08:00
sigoden
0fb9f3b2c8 chore: update readme 2022-08-06 08:30:19 +08:00
sigoden
3ae75d3558 fix: hide path by ext name (#126) 2022-08-06 07:48:34 +08:00
sigoden
dff489398e chore(release): version v0.29.0 2022-08-03 09:05:39 +08:00
sigoden
64e397d18a chore: update --hidden help message 2022-08-03 08:58:52 +08:00
sigoden
cc0014c183 chore: fix typo 2022-08-03 08:51:12 +08:00
sigoden
a489c5647a fix: table row hover highlighting in dark mode (#122) 2022-08-03 07:02:58 +08:00
sigoden
0918fb3fe4 feat: support ecdsa tls cert (#119) 2022-08-02 09:32:11 +08:00
sigoden
14efeb6360 chore: update readme 2022-08-02 07:07:53 +08:00
sigoden
30b8f75bba chore: update deps and remove dependabot 2022-08-02 07:07:33 +08:00
sigoden
a39065beff chore: update readme 2022-08-01 15:12:25 +08:00
sigoden
a493c13734 chore(release): version v0.28.0 2022-08-01 08:47:18 +08:00
sigoden
ae2f878e62 feat: support customize http log format (#116) 2022-07-31 08:27:09 +08:00
sigoden
277d9d22d4 feat(ui): add table row hover (#115) 2022-07-30 08:04:31 +08:00
sigoden
c62926d19c fix(ui): file path contains special charactors (#114) 2022-07-30 07:53:27 +08:00
sigoden
cccbbe9ea4 chore: update deps 2022-07-29 08:54:46 +08:00
sigoden
147048690f chore(release): version v0.27.0 2022-07-25 09:59:32 +08:00
sigoden
9cfd66dab9 feat: adjust digest auth timeout to 1day (#110) 2022-07-21 11:47:47 +08:00
sigoden
b791549ec7 feat: improve hidden to support glob (#108) 2022-07-19 20:37:14 +08:00
sigoden
f148817c52 chore(release): version v0.26.0 2022-07-11 08:54:29 +08:00
sigoden
00ae36d486 chore: improve readme 2022-07-08 22:36:16 +08:00
sigoden
4e823e8bba feat: make --path-prefix works on serving single file (#102) 2022-07-08 19:30:05 +08:00
sigoden
4e84e6c532 fix: cors headers (#100) 2022-07-08 16:18:10 +08:00
sigoden
f49b590a56 chore: update description of --path-prefix 2022-07-07 15:44:25 +08:00
sigoden
cb1f3cddea chore(release): version v0.25.0 2022-07-07 07:51:51 +08:00
sigoden
05dbcfb2df feat: limit the number of concurrent uploads (#98) 2022-07-06 19:17:30 +08:00
sigoden
76e967fa59 feat: add completions (#97) 2022-07-06 12:11:00 +08:00
sigoden
140a360e37 chore: optimize move path default value 2022-07-05 09:16:21 +08:00
sigoden
604cbb7412 feat: check permission on move/copy destination (#93) 2022-07-04 23:25:05 +08:00
sigoden
c6541b1c36 feat: ui supports move folder/file to new path (#92) 2022-07-04 21:20:00 +08:00
sigoden
b6729a3d64 feat: ui supports creating folder (#91) 2022-07-04 20:12:35 +08:00
sigoden
4f1a35de5d chore(release): version v0.24.0 2022-07-03 06:47:49 +08:00
sigoden
2ffdcdf106 feat: allow search with --render-try-index (#88) 2022-07-02 23:25:57 +08:00
sigoden
1e0cdafbcf fix: unexpect stack overflow when searching a lot (#87) 2022-07-02 22:55:22 +08:00
sigoden
0a03941e05 chore: update deps 2022-07-02 11:48:47 +08:00
sigoden
07a7322748 chore: update readme 2022-07-01 21:37:56 +08:00
sigoden
936d08545b chore(release): version v0.23.1 2022-07-01 06:47:34 +08:00
sigoden
2e6af671ca fix: permissions of unzipped files (#84) 2022-06-30 19:29:47 +08:00
sigoden
583117c01f fix: safari layout and compatibility (#83) 2022-06-30 10:00:42 +08:00
sigoden
6e1df040b4 chore: update deps 2022-06-29 20:36:18 +08:00
sigoden
f5aa3354e1 chore: add github issule templates 2022-06-29 15:16:04 +08:00
sigoden
3ed0d885fe chore(release): version v0.23.0 2022-06-29 11:01:40 +08:00
sigoden
542e9a4ec5 chore: remove aarch64-linux-android platform 2022-06-29 10:58:43 +08:00
sigoden
5ee2c5504c ci: support more platforms (#76) 2022-06-29 10:51:59 +08:00
sigoden
fd02a53823 chore: replace old get-if-addrs with new if-addrs (#78) 2022-06-29 10:01:01 +08:00
sigoden
6554c1c308 feat: use feature to conditional support tls (#77) 2022-06-29 09:19:09 +08:00
sigoden
fe71600bd2 chore(release): version v0.22.0 2022-06-26 12:43:20 +08:00
sigoden
9cfeee0df0 chore: update args help message and readme 2022-06-25 09:58:39 +08:00
sigoden
eb7a536a3f feat: support hiding folders with --hidden (#73) 2022-06-25 08:15:16 +08:00
sigoden
c1c6dbc356 chore(release): version v0.21.0 2022-06-23 19:34:38 +08:00
sigoden
e29cf4c752 refactor: split css/js from index.html (#68) 2022-06-21 23:01:00 +08:00
sigoden
7f062b6705 feat: use custom logger with timestamp in rfc3339 (#67) 2022-06-21 21:19:51 +08:00
sigoden
ea8b9e9cce fix: escape name contains html escape code (#65) 2022-06-21 19:23:34 +08:00
42 changed files with 3846 additions and 2020 deletions

17
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@@ -0,0 +1,17 @@
---
name: Bug report
about: Create a report to help us improve
---
**Problem**
<!-- A clear and concise description of what the bug is. -->
**Log**
If applicable, add logs to help explain your problem.
**Environment:**
- Dufs version:
- Browser/Webdav Info:
- OS Info:

View File

@@ -0,0 +1,16 @@
---
name: Feature Request
about: If you have any interesting advice, you can tell us.
---
## Specific Demand
<!--
What feature do you need, please describe it in detail.
-->
## Implement Suggestion
<!--
If you have any suggestion for complete this feature, you can tell us.
-->

View File

@@ -7,33 +7,69 @@ on:
jobs: jobs:
release: release:
name: Publish to Github Reelases name: Publish to Github Releases
permissions:
contents: write
outputs: outputs:
rc: ${{ steps.check-tag.outputs.rc }} rc: ${{ steps.check-tag.outputs.rc }}
strategy: strategy:
matrix: matrix:
target:
- aarch64-unknown-linux-musl
- aarch64-apple-darwin
- x86_64-apple-darwin
- x86_64-pc-windows-msvc
- x86_64-unknown-linux-musl
include: include:
- target: aarch64-unknown-linux-musl - target: aarch64-unknown-linux-musl
os: ubuntu-latest os: ubuntu-latest
use-cross: true use-cross: true
cargo-flags: ""
- target: aarch64-apple-darwin - target: aarch64-apple-darwin
os: macos-latest os: macos-latest
use-cross: true use-cross: true
cargo-flags: ""
- target: aarch64-pc-windows-msvc
os: windows-latest
use-cross: true
cargo-flags: "--no-default-features"
- target: x86_64-apple-darwin - target: x86_64-apple-darwin
os: macos-latest os: macos-latest
cargo-flags: ""
- target: x86_64-pc-windows-msvc - target: x86_64-pc-windows-msvc
os: windows-latest os: windows-latest
cargo-flags: ""
- target: x86_64-unknown-linux-musl - target: x86_64-unknown-linux-musl
os: ubuntu-latest os: ubuntu-latest
use-cross: true use-cross: true
cargo-flags: ""
- target: i686-unknown-linux-musl
os: ubuntu-latest
use-cross: true
cargo-flags: ""
- target: i686-pc-windows-msvc
os: windows-latest
use-cross: true
cargo-flags: ""
- target: armv7-unknown-linux-musleabihf
os: ubuntu-latest
use-cross: true
cargo-flags: ""
- target: arm-unknown-linux-musleabihf
os: ubuntu-latest
use-cross: true
cargo-flags: ""
- target: mips-unknown-linux-musl
os: ubuntu-latest
use-cross: true
cargo-flags: "--no-default-features"
- target: mipsel-unknown-linux-musl
os: ubuntu-latest
use-cross: true
cargo-flags: "--no-default-features"
- target: mips64-unknown-linux-gnuabi64
os: ubuntu-latest
use-cross: true
cargo-flags: "--no-default-features"
- target: mips64el-unknown-linux-gnuabi64
os: ubuntu-latest
use-cross: true
cargo-flags: "--no-default-features"
runs-on: ${{matrix.os}} runs-on: ${{matrix.os}}
steps: steps:
@@ -60,13 +96,6 @@ jobs:
toolchain: stable toolchain: stable
profile: minimal # minimal component installation (ie, no documentation) profile: minimal # minimal component installation (ie, no documentation)
- name: Install prerequisites
shell: bash
run: |
case ${{ matrix.target }} in
aarch64-unknown-linux-musl) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;;
esac
- name: Show Version Information (Rust, cargo, GCC) - name: Show Version Information (Rust, cargo, GCC)
shell: bash shell: bash
run: | run: |
@@ -82,7 +111,7 @@ jobs:
with: with:
use-cross: ${{ matrix.use-cross }} use-cross: ${{ matrix.use-cross }}
command: build command: build
args: --locked --release --target=${{ matrix.target }} args: --locked --release --target=${{ matrix.target }} ${{ matrix.cargo-flags }}
- name: Build Archive - name: Build Archive
shell: bash shell: bash
@@ -133,6 +162,8 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: release needs: release
steps: steps:
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1 uses: docker/setup-buildx-action@v1
- name: Login to DockerHub - name: Login to DockerHub
@@ -141,9 +172,16 @@ jobs:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push - name: Build and push
id: docker_build
uses: docker/build-push-action@v2 uses: docker/build-push-action@v2
with: with:
build-args: |
REPO=${{ github.repository }}
VER=${{ github.ref_name }}
platforms: |
linux/amd64
linux/arm64
linux/386
linux/arm/v7
push: ${{ needs.release.outputs.rc == 'false' }} push: ${{ needs.release.outputs.rc == 'false' }}
tags: ${{ github.repository }}:latest, ${{ github.repository }}:${{ github.ref_name }} tags: ${{ github.repository }}:latest, ${{ github.repository }}:${{ github.ref_name }}

View File

@@ -2,6 +2,159 @@
All notable changes to this project will be documented in this file. All notable changes to this project will be documented in this file.
## [0.33.0] - 2023-03-17
### Bug Fixes
- Cors allow-request-header add content-type ([#184](https://github.com/sigoden/dufs/issues/184))
- Hidden don't works on some files ([#188](https://github.com/sigoden/dufs/issues/188))
- Basic auth sometimes does not work ([#194](https://github.com/sigoden/dufs/issues/194))
### Features
- Guess plain text encoding then set content-type charset ([#186](https://github.com/sigoden/dufs/issues/186))
### Refactor
- Improve error handle ([#195](https://github.com/sigoden/dufs/issues/195))
## [0.32.0] - 2023-02-22
### Bug Fixes
- Set the STOPSIGNAL to SIGINT for Dockerfile
- Remove Method::Options auth check ([#168](https://github.com/sigoden/dufs/issues/168))
- Clear search input also clear query ([#178](https://github.com/sigoden/dufs/issues/178))
### Features
- [**breaking**] Add option --allow-archive ([#152](https://github.com/sigoden/dufs/issues/152))
- Use env var for args ([#170](https://github.com/sigoden/dufs/issues/170))
- Hiding only directories instead of files ([#175](https://github.com/sigoden/dufs/issues/175))
- API to search and list directories ([#177](https://github.com/sigoden/dufs/issues/177))
- Support edit files ([#179](https://github.com/sigoden/dufs/issues/179))
- Support new file ([#180](https://github.com/sigoden/dufs/issues/180))
- Ui improves the login experience ([#182](https://github.com/sigoden/dufs/issues/182))
## [0.31.0] - 2022-11-11
### Bug Fixes
- Auth not works with --path-prefix ([#138](https://github.com/sigoden/dufs/issues/138))
- Don't search on empty query string ([#140](https://github.com/sigoden/dufs/issues/140))
- Status code for MKCOL on existing resource ([#142](https://github.com/sigoden/dufs/issues/142))
- Panic on PROPFIND // ([#144](https://github.com/sigoden/dufs/issues/144))
### Features
- Support unix sockets ([#145](https://github.com/sigoden/dufs/issues/145))
## [0.30.0] - 2022-09-09
### Bug Fixes
- Hide path by ext name ([#126](https://github.com/sigoden/dufs/issues/126))
### Features
- Support sort by name, mtime, size ([#128](https://github.com/sigoden/dufs/issues/128))
- Add --assets options to override assets ([#134](https://github.com/sigoden/dufs/issues/134))
## [0.29.0] - 2022-08-03
### Bug Fixes
- Table row hover highlighting in dark mode ([#122](https://github.com/sigoden/dufs/issues/122))
### Features
- Support ecdsa tls cert ([#119](https://github.com/sigoden/dufs/issues/119))
## [0.28.0] - 2022-08-01
### Bug Fixes
- File path contains special characters ([#114](https://github.com/sigoden/dufs/issues/114))
### Features
- Add table row hover ([#115](https://github.com/sigoden/dufs/issues/115))
- Support customize http log format ([#116](https://github.com/sigoden/dufs/issues/116))
## [0.27.0] - 2022-07-25
### Features
- Improve hidden to support glob ([#108](https://github.com/sigoden/dufs/issues/108))
- Adjust digest auth timeout to 1day ([#110](https://github.com/sigoden/dufs/issues/110))
## [0.26.0] - 2022-07-11
### Bug Fixes
- Cors headers ([#100](https://github.com/sigoden/dufs/issues/100))
### Features
- Make --path-prefix works on serving single file ([#102](https://github.com/sigoden/dufs/issues/102))
## [0.25.0] - 2022-07-06
### Features
- Ui supports creating folder ([#91](https://github.com/sigoden/dufs/issues/91))
- Ui supports move folder/file to new path ([#92](https://github.com/sigoden/dufs/issues/92))
- Check permission on move/copy destination ([#93](https://github.com/sigoden/dufs/issues/93))
- Add completions ([#97](https://github.com/sigoden/dufs/issues/97))
- Limit the number of concurrent uploads ([#98](https://github.com/sigoden/dufs/issues/98))
## [0.24.0] - 2022-07-02
### Bug Fixes
- Unexpected stack overflow when searching a lot ([#87](https://github.com/sigoden/dufs/issues/87))
### Features
- Allow search with --render-try-index ([#88](https://github.com/sigoden/dufs/issues/88))
## [0.23.1] - 2022-06-30
### Bug Fixes
- Safari layout and compatibility ([#83](https://github.com/sigoden/dufs/issues/83))
- Permissions of unzipped files ([#84](https://github.com/sigoden/dufs/issues/84))
## [0.23.0] - 2022-06-29
### Features
- Use feature to conditional support tls ([#77](https://github.com/sigoden/dufs/issues/77))
### Ci
- Support more platforms ([#76](https://github.com/sigoden/dufs/issues/76))
## [0.22.0] - 2022-06-26
### Features
- Support hiding folders with --hidden ([#73](https://github.com/sigoden/dufs/issues/73))
## [0.21.0] - 2022-06-23
### Bug Fixes
- Escape name contains html escape code ([#65](https://github.com/sigoden/dufs/issues/65))
### Features
- Use custom logger with timestamp in rfc3339 ([#67](https://github.com/sigoden/dufs/issues/67))
### Refactor
- Split css/js from index.html ([#68](https://github.com/sigoden/dufs/issues/68))
## [0.20.0] - 2022-06-20 ## [0.20.0] - 2022-06-20
### Bug Fixes ### Bug Fixes
@@ -52,7 +205,7 @@ All notable changes to this project will be documented in this file.
### Refactor ### Refactor
- Trival changes ([#41](https://github.com/sigoden/dufs/issues/41)) - Trivial changes ([#41](https://github.com/sigoden/dufs/issues/41))
## [0.16.0] - 2022-06-12 ## [0.16.0] - 2022-06-12

1684
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "dufs" name = "dufs"
version = "0.20.0" version = "0.33.0"
edition = "2021" edition = "2021"
authors = ["sigoden <sigoden@gmail.com>"] authors = ["sigoden <sigoden@gmail.com>"]
description = "Dufs is a distinctive utility file server" description = "Dufs is a distinctive utility file server"
@@ -11,46 +11,55 @@ categories = ["command-line-utilities", "web-programming::http-server"]
keywords = ["static", "file", "server", "webdav", "cli"] keywords = ["static", "file", "server", "webdav", "cli"]
[dependencies] [dependencies]
clap = { version = "3", default-features = false, features = ["std", "wrap_help"] } clap = { version = "4", features = ["wrap_help", "env"] }
clap_complete = "4"
chrono = "0.4" chrono = "0.4"
tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util", "signal"]} tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util", "signal"]}
tokio-rustls = "0.23"
tokio-util = { version = "0.7", features = ["io-util"] } tokio-util = { version = "0.7", features = ["io-util"] }
hyper = { version = "0.14", features = ["http1", "server", "tcp", "stream"] } hyper = { version = "0.14", features = ["http1", "server", "tcp", "stream"] }
percent-encoding = "2.1" percent-encoding = "2.1"
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
serde_json = "1" serde_json = "1"
futures = "0.3" futures = "0.3"
base64 = "0.13" base64 = "0.21"
async_zip = "0.0.7" async_zip = { version = "0.0.12", default-features = false, features = ["deflate"] }
async-walkdir = "0.2"
headers = "0.3" headers = "0.3"
mime_guess = "2.0" mime_guess = "2.0"
get_if_addrs = "0.5" if-addrs = "0.10.1"
rustls = { version = "0.20", default-features = false, features = ["tls12"] } rustls = { version = "0.20", default-features = false, features = ["tls12"], optional = true }
rustls-pemfile = "1" rustls-pemfile = { version = "1", optional = true }
tokio-rustls = { version = "0.23", optional = true }
md5 = "0.7" md5 = "0.7"
lazy_static = "1.4" lazy_static = "1.4"
uuid = { version = "1.1", features = ["v4", "fast-rng"] } uuid = { version = "1.1", features = ["v4", "fast-rng"] }
urlencoding = "2.1" urlencoding = "2.1"
xml-rs = "0.8" xml-rs = "0.8"
env_logger = { version = "0.9", default-features = false, features = ["humantime"] }
log = "0.4" log = "0.4"
socket2 = "0.4" socket2 = "0.5"
async-stream = "0.3" async-stream = "0.3"
walkdir = "2.3"
form_urlencoded = "1.0"
alphanumeric-sort = "1.4"
content_inspector = "0.2"
anyhow = "1.0"
chardetng = "0.1"
glob = "0.3.1"
[features]
default = ["tls"]
tls = ["rustls", "rustls-pemfile", "tokio-rustls"]
[dev-dependencies] [dev-dependencies]
assert_cmd = "2" assert_cmd = "2"
reqwest = { version = "0.11", features = ["blocking", "multipart", "rustls-tls"], default-features = false } reqwest = { version = "0.11", features = ["blocking", "multipart", "rustls-tls"], default-features = false }
assert_fs = "1" assert_fs = "1"
select = "0.5"
port_check = "0.1" port_check = "0.1"
rstest = "0.13" rstest = "0.16"
regex = "1" regex = "1"
pretty_assertions = "1.2"
url = "2" url = "2"
diqwest = { version = "1", features = ["blocking"] } diqwest = { version = "1", features = ["blocking"] }
predicates = "2" predicates = "3"
indexmap = "1.9"
[profile.release] [profile.release]
lto = true lto = true

View File

@@ -1,10 +1,19 @@
FROM rust:1.61 as builder FROM alpine as builder
RUN rustup target add x86_64-unknown-linux-musl ARG REPO VER TARGETPLATFORM
RUN apt-get update && apt-get install --no-install-recommends -y musl-tools RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \
WORKDIR /app TARGET="x86_64-unknown-linux-musl"; \
COPY . . elif [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
RUN cargo build --target x86_64-unknown-linux-musl --release TARGET="aarch64-unknown-linux-musl"; \
elif [ "$TARGETPLATFORM" = "linux/386" ]; then \
TARGET="i686-unknown-linux-musl"; \
elif [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then \
TARGET="armv7-unknown-linux-musleabihf"; \
fi && \
wget https://github.com/${REPO}/releases/download/${VER}/dufs-${VER}-${TARGET}.tar.gz && \
tar -xf dufs-${VER}-${TARGET}.tar.gz && \
mv dufs /bin/
FROM scratch FROM scratch
COPY --from=builder /app/target/x86_64-unknown-linux-musl/release/dufs /bin/ COPY --from=builder /bin/dufs /bin/dufs
STOPSIGNAL SIGINT
ENTRYPOINT ["/bin/dufs"] ENTRYPOINT ["/bin/dufs"]

260
README.md
View File

@@ -1,18 +1,18 @@
# Dufs (Old Name: Duf) # Dufs
[![CI](https://github.com/sigoden/dufs/actions/workflows/ci.yaml/badge.svg)](https://github.com/sigoden/dufs/actions/workflows/ci.yaml) [![CI](https://github.com/sigoden/dufs/actions/workflows/ci.yaml/badge.svg)](https://github.com/sigoden/dufs/actions/workflows/ci.yaml)
[![Crates](https://img.shields.io/crates/v/dufs.svg)](https://crates.io/crates/dufs) [![Crates](https://img.shields.io/crates/v/dufs.svg)](https://crates.io/crates/dufs)
Dufs is a distinctive utility file server that supports static serving, uploading, searching, accessing control, webdav... Dufs is a distinctive utility file server that supports static serving, uploading, searching, accessing control, webdav...
![demo](https://user-images.githubusercontent.com/4012553/174486522-7af350e6-0195-4f4a-8480-d9464fc6452f.png) ![demo](https://user-images.githubusercontent.com/4012553/220513063-ff0f186b-ac54-4682-9af4-47a9781dee0d.png)
## Features ## Features
- Serve static files - Serve static files
- Download folder as zip file - Download folder as zip file
- Upload files and folders (Drag & Drop) - Upload files and folders (Drag & Drop)
- Search files - Create/Edit/Search files
- Partial responses (Parallel/Resume download) - Partial responses (Parallel/Resume download)
- Path level access control - Path level access control
- Support https - Support https
@@ -30,7 +30,13 @@ cargo install dufs
### With docker ### With docker
``` ```
docker run -v `pwd`:/data -p 5000:5000 --rm -it sigoden/dufs /data docker run -v `pwd`:/data -p 5000:5000 --rm -it sigoden/dufs /data -A
```
### With [Homebrew](https://brew.sh)
```
brew install dufs
``` ```
### Binaries on macOS, Linux, Windows ### Binaries on macOS, Linux, Windows
@@ -42,42 +48,46 @@ Download from [Github Releases](https://github.com/sigoden/dufs/releases), unzip
``` ```
Dufs is a distinctive utility file server - https://github.com/sigoden/dufs Dufs is a distinctive utility file server - https://github.com/sigoden/dufs
USAGE: Usage: dufs [OPTIONS] [root]
dufs [OPTIONS] [--] [path]
ARGS: Arguments:
<path> Specific path to serve [default: .] [root] Specific path to serve [default: .]
OPTIONS: Options:
-b, --bind <addr>... Specify bind address -b, --bind <addrs> Specify bind address or unix socket
-p, --port <port> Specify port to listen on [default: 5000] -p, --port <port> Specify port to listen on [default: 5000]
--path-prefix <path> Specify an path prefix --path-prefix <path> Specify a path prefix
-a, --auth <rule>... Add auth for path --hidden <value> Hide paths from directory listings, separated by `,`
--auth-method <value> Select auth method [default: digest] [possible values: basic, digest] -a, --auth <rules> Add auth for path
-A, --allow-all Allow all operations --auth-method <value> Select auth method [default: digest] [possible values: basic, digest]
--allow-upload Allow upload files/folders -A, --allow-all Allow all operations
--allow-delete Allow delete files/folders --allow-upload Allow upload files/folders
--allow-search Allow search files/folders --allow-delete Allow delete files/folders
--allow-symlink Allow symlink to files/folders outside root directory --allow-search Allow search files/folders
--enable-cors Enable CORS, sets `Access-Control-Allow-Origin: *` --allow-symlink Allow symlink to files/folders outside root directory
--render-index Serve index.html when requesting a directory, returns 404 if not found index.html --allow-archive Allow zip archive generation
--render-try-index Serve index.html when requesting a directory, returns file listing if not found index.html --enable-cors Enable CORS, sets `Access-Control-Allow-Origin: *`
--render-spa Serve SPA(Single Page Application) --render-index Serve index.html when requesting a directory, returns 404 if not found index.html
--tls-cert <path> Path to an SSL/TLS certificate to serve with HTTPS --render-try-index Serve index.html when requesting a directory, returns directory listing if not found index.html
--tls-key <path> Path to the SSL/TLS certificate's private key --render-spa Serve SPA(Single Page Application)
-h, --help Print help information --assets <path> Use custom assets to override builtin assets
-V, --version Print version information --tls-cert <path> Path to an SSL/TLS certificate to serve with HTTPS
--tls-key <path> Path to the SSL/TLS certificate's private key
--log-format <format> Customize http log format
--completions <shell> Print shell completion script for <shell> [possible values: bash, elvish, fish, powershell, zsh]
-h, --help Print help information
-V, --version Print version information
``` ```
## Examples ## Examples
Serve current working directory Serve current working directory in readonly mode
``` ```
dufs dufs
``` ```
Explicitly allow all operations including upload/delete Allow all operations like upload/delete/search/create/edit...
``` ```
dufs -A dufs -A
@@ -89,7 +99,7 @@ Only allow upload operation
dufs --allow-upload dufs --allow-upload
``` ```
Serve a directory Serve a specific directory
``` ```
dufs Downloads dufs Downloads
@@ -101,28 +111,33 @@ Serve a single file
dufs linux-distro.iso dufs linux-distro.iso
``` ```
Serve index.html when requesting a directory Serve a single-page application like react/vue
```
dufs --render-index
```
Serve SPA(Single Page Application)
``` ```
dufs --render-spa dufs --render-spa
``` ```
Serve a static website with index.html
```
dufs --render-index
```
Require username/password Require username/password
``` ```
dufs -a /@admin:123 dufs -a /@admin:123
``` ```
Listen on a specific port Listen on specific host:ip
``` ```
dufs -p 80 dufs -b 127.0.0.1 -p 80
```
Listen on unix socket
```
dufs -b /tmp/dufs.socket
``` ```
Use https Use https
@@ -133,6 +148,12 @@ dufs --tls-cert my.crt --tls-key my.key
## API ## API
Upload a file
```
curl -T path-to-file http://127.0.0.1:5000/new-path/path-to-file
```
Download a file Download a file
``` ```
curl http://127.0.0.1:5000/path-to-file curl http://127.0.0.1:5000/path-to-file
@@ -144,40 +165,161 @@ Download a folder as zip file
curl -o path-to-folder.zip http://127.0.0.1:5000/path-to-folder?zip curl -o path-to-folder.zip http://127.0.0.1:5000/path-to-folder?zip
``` ```
Upload a file
```
curl --upload-file path-to-file http://127.0.0.1:5000/path-to-file
```
Delete a file/folder Delete a file/folder
``` ```
curl -X DELETE http://127.0.0.1:5000/path-to-file curl -X DELETE http://127.0.0.1:5000/path-to-file-or-folder
``` ```
## Access Control Create a directory
```
curl -X MKCOL https://127.0.0.1:5000/path-to-folder
```
Move the file/folder to the new path
```
curl -X MOVE https://127.0.0.1:5000/path -H "Destination: https://127.0.0.1:5000/new-path"
```
List/search directory contents
```
curl http://127.0.0.1:5000?simple # output names only, just like `ls -1`
curl http://127.0.0.1:5000?json # output paths in json format
curl http://127.0.0.1:5000?q=Dockerfile&simple # search for files, just like `find -name Dockerfile`
```
With authorization
```
curl --user user:pass --digest http://192.168.8.10:5000/file # digest auth
curl --user user:pass http://192.168.8.10:5000/file # basic auth
```
<details>
<summary><h2>Advanced topics</h2></summary>
### Access Control
Dufs supports path level access control. You can control who can do what on which path with `--auth`/`-a`. Dufs supports path level access control. You can control who can do what on which path with `--auth`/`-a`.
``` ```
dufs -a <path>@<readwrite>[@<readonly>] dufs -a <path>@<readwrite>
dufs -a <path>@<readwrite>@<readonly>
dufs -a <path>@<readwrite>@*
``` ```
- `<path>`: Path to protected - `<path>`: Protected url path
- `<readwrite>`: Account with readwrite permission, required - `<readwrite>`: Account with readwrite permissions. If dufs is run with `dufs --allow-all`, the permissions are upload/delete/search/view/download. If dufs is run with `dufs --allow-upload`, the permissions are upload/view/download.
- `<readonly>`: Account with readonly permission, optional - `<readonly>`: Account with readonly permissions. The permissions are search/view/download if dufs allow search, otherwise view/download..
> `<readonly>` can be `*` means `<path>` is public, everyone can access/download it.
For example:
``` ```
dufs -a /@admin:pass@* -a /ui@designer:pass1 -A dufs -A -a /@admin:admin
``` ```
- All files/folders are public to access/download. `admin` has all permissions for all paths.
- Account `admin:pass` can upload/delete/download any files/folders.
- Account `designer:pass1` can upload/delete/download any files/folders in the `ui` folder. ```
dufs -A -a /@admin:admin@guest:guest
```
`guest` has readonly permissions for all paths.
```
dufs -A -a /@admin:admin@*
```
All paths is public, everyone can view/download it.
```
dufs -A -a /@admin:admin -a /user1@user1:pass1 -a /user2@pass2:user2
```
`user1` has all permissions for `/user1*` path.
`user2` has all permissions for `/user2*` path.
```
dufs -a /@admin:admin
```
Since dufs only allows viewing/downloading, `admin` can only view/download files.
### Hide Paths
Dufs supports hiding paths from directory listings via option `--hidden <glob>,...`.
```
dufs --hidden .git,.DS_Store,tmp
```
> The glob used in --hidden only matches file and directory names, not paths. So `--hidden dir1/file` is invalid.
```sh
dufs --hidden '.*' # hidden dotfiles
dufs --hidden '*/' # hidden all folders
dufs --hidden '*.log,*.lock' # hidden by exts
```
### Log Format
Dufs supports customize http log format with option `--log-format`.
The log format can use following variables.
| variable | description |
| ------------ | ------------------------------------------------------------------------- |
| $remote_addr | client address |
| $remote_user | user name supplied with authentication |
| $request | full original request line |
| $status | response status |
| $http_ | arbitrary request header field. examples: $http_user_agent, $http_referer |
The default log format is `'$remote_addr "$request" $status'`.
```
2022-08-06T06:59:31+08:00 INFO - 127.0.0.1 "GET /" 200
```
Disable http log
```
dufs --log-format=''
```
Log user-agent
```
dufs --log-format '$remote_addr "$request" $status $http_user_agent'
```
```
2022-08-06T06:53:55+08:00 INFO - 127.0.0.1 "GET /" 200 Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36
```
Log remote-user
```
dufs --log-format '$remote_addr $remote_user "$request" $status' -a /@admin:admin -a /folder1@user1:pass1
```
```
2022-08-06T07:04:37+08:00 INFO - 127.0.0.1 admin "GET /" 200
```
## Environment variables
All options can be set using environment variables prefixed with `DUFS_`.
`dufs --port 8080 --allow-all` is equal to `DUFS_PORT=8080 DUFS_ALLOW_ALL=true dufs`.
### Customize UI
Dufs allows users to customize the UI with your own assets.
```
dufs --assets my-assets-dir/
```
Your assets folder must contains a `index.html` file.
`index.html` can use the following placeholder variables to retrieve internal data.
- `__INDEX_DATA__`: directory listing data
- `__ASSERTS_PREFIX__`: assets url prefix
</details>
## License ## License

View File

@@ -1,5 +1,5 @@
html { html {
font-family: -apple-system,BlinkMacSystemFont,Roboto,Helvetica,Arial,sans-serif; font-family: -apple-system, BlinkMacSystemFont, Roboto, Helvetica, Arial, sans-serif;
line-height: 1.5; line-height: 1.5;
color: #24292e; color: #24292e;
} }
@@ -7,6 +7,7 @@ html {
body { body {
/* prevent premature breadcrumb wrapping on mobile */ /* prevent premature breadcrumb wrapping on mobile */
min-width: 500px; min-width: 500px;
margin: 0;
} }
.hidden { .hidden {
@@ -17,7 +18,10 @@ body {
display: flex; display: flex;
flex-wrap: wrap; flex-wrap: wrap;
align-items: center; align-items: center;
padding: 1em 1em 0; padding: 0.6em 1em;
position: fixed;
width: 100%;
background-color: white;
} }
.breadcrumb { .breadcrumb {
@@ -25,28 +29,28 @@ body {
padding-right: 0.6em; padding-right: 0.6em;
} }
.breadcrumb > a { .breadcrumb>a {
color: #0366d6; color: #0366d6;
text-decoration: none; text-decoration: none;
} }
.breadcrumb > a:hover { .breadcrumb>a:hover {
text-decoration: underline; text-decoration: underline;
} }
/* final breadcrumb */ /* final breadcrumb */
.breadcrumb > b { .breadcrumb>b {
color: #24292e; color: #24292e;
} }
.breadcrumb > .separator { .breadcrumb>.separator {
color: #586069; color: #586069;
padding: 0 0.25em; padding: 0 0.25em;
} }
.breadcrumb svg { .breadcrumb svg {
height: 100%; height: 100%;
fill: rgba(3,47,98,0.5); fill: rgba(3, 47, 98, 0.5);
} }
.toolbox { .toolbox {
@@ -54,11 +58,21 @@ body {
margin-right: 10px; margin-right: 10px;
} }
.toolbox > div { .toolbox>a,
.toolbox>div {
/* vertically align with breadcrumb text */ /* vertically align with breadcrumb text */
height: 1.1rem; height: 1.1rem;
} }
.toolbox .control {
cursor: pointer;
padding-left: 0.25em;
}
.upload-file input {
display: none;
}
.searchbar { .searchbar {
display: flex; display: flex;
flex-wrap: nowrap; flex-wrap: nowrap;
@@ -78,7 +92,6 @@ body {
font-size: 16px; font-size: 16px;
line-height: 16px; line-height: 16px;
padding: 1px; padding: 1px;
font-family: helvetica neue,luxi sans,Tahoma,hiragino sans gb,STHeiti,sans-serif;
background-color: transparent; background-color: transparent;
border: none; border: none;
outline: none; outline: none;
@@ -90,26 +103,16 @@ body {
cursor: pointer; cursor: pointer;
} }
.upload-control {
cursor: pointer;
padding-left: 0.25em;
}
.upload-control input {
display: none;
}
.upload-status span { .upload-status span {
width: 70px; width: 70px;
display: inline-block; display: inline-block;
} }
.main { .main {
padding: 0 1em; padding: 3.3em 1em 0;
} }
.empty-folder { .empty-folder {
padding-top: 1rem;
font-style: italic; font-style: italic;
} }
@@ -131,8 +134,21 @@ body {
padding-left: 0.6em; padding-left: 0.6em;
} }
.paths-table thead a {
color: unset;
text-decoration: none;
}
.paths-table thead a>span {
padding-left: 2px;
}
.paths-table tbody tr:hover {
background-color: #fafafa;
}
.paths-table .cell-actions { .paths-table .cell-actions {
width: 60px; width: 75px;
display: flex; display: flex;
padding-left: 0.6em; padding-left: 0.6em;
} }
@@ -149,8 +165,8 @@ body {
} }
.path svg { .path svg {
height: 100%; height: 16px;
fill: rgba(3,47,98,0.5); fill: rgba(3, 47, 98, 0.5);
padding-right: 0.5em; padding-right: 0.5em;
vertical-align: text-top; vertical-align: text-top;
} }
@@ -175,7 +191,8 @@ body {
} }
.action-btn { .action-btn {
padding-left: 0.4em; padding-right: 0.3em;
cursor: pointer;
} }
.uploaders-table { .uploaders-table {
@@ -186,6 +203,27 @@ body {
padding-right: 1em; padding-right: 1em;
} }
.editor {
width: 100%;
height: calc(100vh - 5rem);
border: 1px solid #ced4da;
outline: none;
}
.toolbox2 {
margin-left: auto;
margin-right: 2em;
}
.save-btn {
cursor: pointer;
user-select: none;
}
.not-editable {
font-style: italic;
}
@media (min-width: 768px) { @media (min-width: 768px) {
.path a { .path a {
min-width: 400px; min-width: 400px;
@@ -215,6 +253,10 @@ body {
fill: #fff; fill: #fff;
} }
.head {
background-color: #111;
}
.searchbar { .searchbar {
background-color: #111; background-color: #111;
border-color: #fff6; border-color: #fff6;
@@ -227,4 +269,13 @@ body {
.path a { .path a {
color: #3191ff; color: #3191ff;
} }
.paths-table tbody tr:hover {
background-color: #1a1a1a;
}
.editor {
background: black;
color: white;
}
} }

View File

@@ -4,57 +4,129 @@
<head> <head>
<meta charset="utf-8" /> <meta charset="utf-8" />
<meta name="viewport" content="width=device-width" /> <meta name="viewport" content="width=device-width" />
__SLOT__ <link rel="icon" type="image/x-icon" href="__ASSERTS_PREFIX__favicon.ico">
<link rel="stylesheet" href="__ASSERTS_PREFIX__index.css">
<script>
DATA = __INDEX_DATA__
</script>
<script src="__ASSERTS_PREFIX__index.js"></script>
</head> </head>
<body> <body>
<div class="head"> <div class="head">
<div class="breadcrumb"></div> <div class="breadcrumb"></div>
<div class="toolbox"> <div class="toolbox">
<div> <a href="" class="control download hidden" title="Download file" download="">
<a href="?zip" title="Download folder as a .zip file"> <svg width="16" height="16" viewBox="0 0 16 16">
<svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg> <path
</a> d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z" />
<path
d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z" />
</svg>
</a>
<div class="control move-file hidden" title="Move to new path">
<svg class="icon-move" width="16" height="16" viewBox="0 0 16 16">
<path fill-rule="evenodd"
d="M1.5 1.5A.5.5 0 0 0 1 2v4.8a2.5 2.5 0 0 0 2.5 2.5h9.793l-3.347 3.346a.5.5 0 0 0 .708.708l4.2-4.2a.5.5 0 0 0 0-.708l-4-4a.5.5 0 0 0-.708.708L13.293 8.3H3.5A1.5 1.5 0 0 1 2 6.8V2a.5.5 0 0 0-.5-.5z">
</path>
</svg>
</div> </div>
<div class="upload-control hidden" title="Upload files"> <div class="control delete-file hidden" title="Delete">
<svg class="icon-delete" width="16" height="16" fill="currentColor" viewBox="0 0 16 16">
<path
d="M6.854 7.146a.5.5 0 1 0-.708.708L7.293 9l-1.147 1.146a.5.5 0 0 0 .708.708L8 9.707l1.146 1.147a.5.5 0 0 0 .708-.708L8.707 9l1.147-1.146a.5.5 0 0 0-.708-.708L8 8.293 6.854 7.146z" />
<path
d="M14 14V4.5L9.5 0H4a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h8a2 2 0 0 0 2-2zM9.5 3A1.5 1.5 0 0 0 11 4.5h2V14a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1V2a1 1 0 0 1 1-1h5.5v2z" />
</svg>
</div>
<div class="control upload-file hidden" title="Upload files">
<label for="file"> <label for="file">
<svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 1.146a.5.5 0 0 1 .708 0l3 3a.5.5 0 0 1-.708.708L8.5 2.707V11.5a.5.5 0 0 1-1 0V2.707L5.354 4.854a.5.5 0 1 1-.708-.708l3-3z"/></svg> <svg width="16" height="16" viewBox="0 0 16 16">
<path
d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z" />
<path
d="M7.646 1.146a.5.5 0 0 1 .708 0l3 3a.5.5 0 0 1-.708.708L8.5 2.707V11.5a.5.5 0 0 1-1 0V2.707L5.354 4.854a.5.5 0 1 1-.708-.708l3-3z" />
</svg>
</label> </label>
<input type="file" id="file" name="file" multiple> <input type="file" id="file" name="file" multiple>
</div> </div>
<div class="control new-folder hidden" title="New folder">
<svg width="16" height="16" viewBox="0 0 16 16">
<path
d="m.5 3 .04.87a1.99 1.99 0 0 0-.342 1.311l.637 7A2 2 0 0 0 2.826 14H9v-1H2.826a1 1 0 0 1-.995-.91l-.637-7A1 1 0 0 1 2.19 4h11.62a1 1 0 0 1 .996 1.09L14.54 8h1.005l.256-2.819A2 2 0 0 0 13.81 3H9.828a2 2 0 0 1-1.414-.586l-.828-.828A2 2 0 0 0 6.172 1H2.5a2 2 0 0 0-2 2zm5.672-1a1 1 0 0 1 .707.293L7.586 3H2.19c-.24 0-.47.042-.683.12L1.5 2.98a1 1 0 0 1 1-.98h3.672z" />
<path
d="M13.5 10a.5.5 0 0 1 .5.5V12h1.5a.5.5 0 1 1 0 1H14v1.5a.5.5 0 1 1-1 0V13h-1.5a.5.5 0 0 1 0-1H13v-1.5a.5.5 0 0 1 .5-.5z" />
</svg>
</div>
<div class="control new-file hidden" title="New File">
<svg width="16" height="16" viewBox="0 0 16 16">
<path
d="M8 6.5a.5.5 0 0 1 .5.5v1.5H10a.5.5 0 0 1 0 1H8.5V11a.5.5 0 0 1-1 0V9.5H6a.5.5 0 0 1 0-1h1.5V7a.5.5 0 0 1 .5-.5z" />
<path
d="M14 4.5V14a2 2 0 0 1-2 2H4a2 2 0 0 1-2-2V2a2 2 0 0 1 2-2h5.5L14 4.5zm-3 0A1.5 1.5 0 0 1 9.5 3V1H4a1 1 0 0 0-1 1v12a1 1 0 0 0 1 1h8a1 1 0 0 0 1-1V4.5h-2z" />
</svg>
</div>
</div> </div>
<form class="searchbar hidden"> <form class="searchbar hidden">
<div class="icon"> <div class="icon">
<svg width="16" height="16" fill="currentColor" viewBox="0 0 16 16"><path d="M11.742 10.344a6.5 6.5 0 1 0-1.397 1.398h-.001c.03.04.062.078.098.115l3.85 3.85a1 1 0 0 0 1.415-1.414l-3.85-3.85a1.007 1.007 0 0 0-.115-.1zM12 6.5a5.5 5.5 0 1 1-11 0 5.5 5.5 0 0 1 11 0z"/></svg> <svg width="16" height="16" viewBox="0 0 16 16">
<path
d="M11.742 10.344a6.5 6.5 0 1 0-1.397 1.398h-.001c.03.04.062.078.098.115l3.85 3.85a1 1 0 0 0 1.415-1.414l-3.85-3.85a1.007 1.007 0 0 0-.115-.1zM12 6.5a5.5 5.5 0 1 1-11 0 5.5 5.5 0 0 1 11 0z" />
</svg>
</div> </div>
<input id="search" name="q" type="text" maxlength="128" autocomplete="off" tabindex="1"> <input id="search" name="q" type="text" maxlength="128" autocomplete="off" tabindex="1">
<input type="submit" hidden /> <input type="submit" hidden />
</form> </form>
<div class="toolbox2">
<div class="login-btn hidden" title="Login for upload/move/delete/edit permissions">
<svg width="16" height="16" viewBox="0 0 16 16">
<path fill-rule="evenodd"
d="M6 3.5a.5.5 0 0 1 .5-.5h8a.5.5 0 0 1 .5.5v9a.5.5 0 0 1-.5.5h-8a.5.5 0 0 1-.5-.5v-2a.5.5 0 0 0-1 0v2A1.5 1.5 0 0 0 6.5 14h8a1.5 1.5 0 0 0 1.5-1.5v-9A1.5 1.5 0 0 0 14.5 2h-8A1.5 1.5 0 0 0 5 3.5v2a.5.5 0 0 0 1 0v-2z" />
<path fill-rule="evenodd"
d="M11.854 8.354a.5.5 0 0 0 0-.708l-3-3a.5.5 0 1 0-.708.708L10.293 7.5H1.5a.5.5 0 0 0 0 1h8.793l-2.147 2.146a.5.5 0 0 0 .708.708l3-3z" />
</svg>
</div>
<div class="user-btn hidden">
<svg width="16" height="16" viewBox="0 0 16 16">
<path
d="M8 8a3 3 0 1 0 0-6 3 3 0 0 0 0 6Zm2-3a2 2 0 1 1-4 0 2 2 0 0 1 4 0Zm4 8c0 1-1 1-1 1H3s-1 0-1-1 1-4 6-4 6 3 6 4Zm-1-.004c-.001-.246-.154-.986-.832-1.664C11.516 10.68 10.289 10 8 10c-2.29 0-3.516.68-4.168 1.332-.678.678-.83 1.418-.832 1.664h10Z" />
</svg>
</div>
<div class="save-btn hidden" title="Save file">
<svg viewBox="0 0 1024 1024" width="24" height="24">
<path
d="M426.666667 682.666667v42.666666h170.666666v-42.666666h-170.666666z m-42.666667-85.333334h298.666667v128h42.666666V418.133333L605.866667 298.666667H298.666667v426.666666h42.666666v-128h42.666667z m260.266667-384L810.666667 379.733333V810.666667H213.333333V213.333333h430.933334zM341.333333 341.333333h85.333334v170.666667H341.333333V341.333333z"
fill="#444444" p-id="8311"></path>
</svg>
</div>
</div>
</div> </div>
<div class="main"> <div class="main">
<div class="empty-folder hidden"></div> <div class="index-page hidden">
<table class="uploaders-table hidden"> <div class="empty-folder hidden"></div>
<thead> <table class="uploaders-table hidden">
<tr> <thead>
<th class="cell-name" colspan="2">Name</th> <tr>
<th class="cell-status">Progress</th> <th class="cell-name" colspan="2">Name</th>
</tr> <th class="cell-status">Progress</th>
</thead> </tr>
</table> </thead>
<table class="paths-table hidden"> </table>
<thead> <table class="paths-table hidden">
<tr> <thead>
<th class="cell-name" colspan="2">Name</th> </thead>
<th class="cell-mtime">Last modified</th> <tbody>
<th class="cell-size">Size</th> </tbody>
<th class="cell-actions">Actions</th> </table>
</tr> </div>
</thead> <div class="editor-page hidden">
<tbody> <div class="not-editable hidden"></div>
</tbody> <textarea class="editor hidden" cols="10"></textarea>
</table> </div>
</div> </div>
<script> <script>
window.addEventListener("DOMContentLoaded", ready); window.addEventListener("DOMContentLoaded", ready);
</script> </script>
</body> </body>
</html> </html>

View File

@@ -6,17 +6,64 @@
* @property {number} size * @property {number} size
*/ */
// https://stackoverflow.com/a/901144/3642588 /**
const params = new Proxy(new URLSearchParams(window.location.search), { * @typedef {object} DATA
get: (searchParams, prop) => searchParams.get(prop), * @property {string} href
}); * @property {string} uri_prefix
* @property {"Index" | "Edit"} kind
* @property {PathItem[]} paths
* @property {boolean} allow_upload
* @property {boolean} allow_delete
* @property {boolean} allow_search
* @property {boolean} allow_archive
* @property {boolean} auth
* @property {string} user
* @property {boolean} dir_exists
* @property {string} editable
*/
const dirEmptyNote = params.q ? 'No results' : DATA.dir_exists ? 'Empty folder' : 'Folder will be created when a file is uploaded'; /**
* @type {DATA} DATA
*/
var DATA;
/**
* @type {PARAMS}
* @typedef {object} PARAMS
* @property {string} q
* @property {string} sort
* @property {string} order
*/
const PARAMS = Object.fromEntries(new URLSearchParams(window.location.search).entries());
const IFRAME_FORMATS = [
".pdf",
".jpg", ".jpeg", ".png", ".gif", ".bmp", ".svg",
".mp4", ".mov", ".avi", ".wmv", ".flv", ".webm",
".mp3", ".ogg", ".wav", ".m4a",
];
const dirEmptyNote = PARAMS.q ? 'No results' : DATA.dir_exists ? 'Empty folder' : 'Folder will be created when a file is uploaded';
const ICONS = {
dir: `<svg height="16" viewBox="0 0 14 16" width="14"><path fill-rule="evenodd" d="M13 4H7V3c0-.66-.31-1-1-1H1c-.55 0-1 .45-1 1v10c0 .55.45 1 1 1h12c.55 0 1-.45 1-1V5c0-.55-.45-1-1-1zM6 4H1V3h5v1z"></path></svg>`,
symlinkFile: `<svg height="16" viewBox="0 0 12 16" width="12"><path fill-rule="evenodd" d="M8.5 1H1c-.55 0-1 .45-1 1v12c0 .55.45 1 1 1h10c.55 0 1-.45 1-1V4.5L8.5 1zM11 14H1V2h7l3 3v9zM6 4.5l4 3-4 3v-2c-.98-.02-1.84.22-2.55.7-.71.48-1.19 1.25-1.45 2.3.02-1.64.39-2.88 1.13-3.73.73-.84 1.69-1.27 2.88-1.27v-2H6z"></path></svg>`,
symlinkDir: `<svg height="16" viewBox="0 0 14 16" width="14"><path fill-rule="evenodd" d="M13 4H7V3c0-.66-.31-1-1-1H1c-.55 0-1 .45-1 1v10c0 .55.45 1 1 1h12c.55 0 1-.45 1-1V5c0-.55-.45-1-1-1zM1 3h5v1H1V3zm6 9v-2c-.98-.02-1.84.22-2.55.7-.71.48-1.19 1.25-1.45 2.3.02-1.64.39-2.88 1.13-3.73C4.86 8.43 5.82 8 7.01 8V6l4 3-4 3H7z"></path></svg>`,
file: `<svg height="16" viewBox="0 0 12 16" width="12"><path fill-rule="evenodd" d="M6 5H2V4h4v1zM2 8h7V7H2v1zm0 2h7V9H2v1zm0 2h7v-1H2v1zm10-7.5V14c0 .55-.45 1-1 1H1c-.55 0-1-.45-1-1V2c0-.55.45-1 1-1h7.5L12 4.5zM11 5L8 2H1v12h10V5z"></path></svg>`,
download: `<svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg>`,
move: `<svg width="16" height="16" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M1.5 1.5A.5.5 0 0 0 1 2v4.8a2.5 2.5 0 0 0 2.5 2.5h9.793l-3.347 3.346a.5.5 0 0 0 .708.708l4.2-4.2a.5.5 0 0 0 0-.708l-4-4a.5.5 0 0 0-.708.708L13.293 8.3H3.5A1.5 1.5 0 0 1 2 6.8V2a.5.5 0 0 0-.5-.5z"/></svg>`,
edit: `<svg width="16" height="16" viewBox="0 0 16 16"><path d="M12.146.146a.5.5 0 0 1 .708 0l3 3a.5.5 0 0 1 0 .708l-10 10a.5.5 0 0 1-.168.11l-5 2a.5.5 0 0 1-.65-.65l2-5a.5.5 0 0 1 .11-.168l10-10zM11.207 2.5 13.5 4.793 14.793 3.5 12.5 1.207 11.207 2.5zm1.586 3L10.5 3.207 4 9.707V10h.5a.5.5 0 0 1 .5.5v.5h.5a.5.5 0 0 1 .5.5v.5h.293l6.5-6.5zm-9.761 5.175-.106.106-1.528 3.821 3.821-1.528.106-.106A.5.5 0 0 1 5 12.5V12h-.5a.5.5 0 0 1-.5-.5V11h-.5a.5.5 0 0 1-.468-.325z"/></svg>`,
delete: `<svg width="16" height="16" fill="currentColor"viewBox="0 0 16 16"><path d="M6.854 7.146a.5.5 0 1 0-.708.708L7.293 9l-1.147 1.146a.5.5 0 0 0 .708.708L8 9.707l1.146 1.147a.5.5 0 0 0 .708-.708L8.707 9l1.147-1.146a.5.5 0 0 0-.708-.708L8 8.293 6.854 7.146z"/><path d="M14 14V4.5L9.5 0H4a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h8a2 2 0 0 0 2-2zM9.5 3A1.5 1.5 0 0 0 11 4.5h2V14a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1V2a1 1 0 0 1 1-1h5.5v2z"/></svg>`,
}
/** /**
* @type Element * @type Element
*/ */
let $pathsTable; let $pathsTable;
/**
* @type Element
*/
let $pathsTableHead;
/** /**
* @type Element * @type Element
*/ */
@@ -29,61 +76,89 @@ let $uploadersTable;
* @type Element * @type Element
*/ */
let $emptyFolder; let $emptyFolder;
/**
* @type Element
*/
let $editor;
/**
* @type Element
*/
let $userBtn;
function ready() {
$pathsTable = document.querySelector(".paths-table")
$pathsTableHead = document.querySelector(".paths-table thead");
$pathsTableBody = document.querySelector(".paths-table tbody");
$uploadersTable = document.querySelector(".uploaders-table");
$emptyFolder = document.querySelector(".empty-folder");
$editor = document.querySelector(".editor");
$userBtn = document.querySelector(".user-btn");
addBreadcrumb(DATA.href, DATA.uri_prefix);
if (DATA.kind == "Index") {
document.title = `Index of ${DATA.href} - Dufs`;
document.querySelector(".index-page").classList.remove("hidden");
setupIndexPage();
} else if (DATA.kind == "Edit") {
document.title = `Edit of ${DATA.href} - Dufs`;
document.querySelector(".editor-page").classList.remove("hidden");;
setupEditPage();
}
}
class Uploader { class Uploader {
/** /**
* @type number *
* @param {File} file
* @param {string[]} dirs
*/ */
idx;
/**
* @type File
*/
file;
/**
* @type string
*/
name;
/**
* @type Element
*/
$uploadStatus;
/**
* @type number
*/
uploaded = 0;
/**
* @type number
*/
lastUptime = 0;
static globalIdx = 0;
constructor(file, dirs) { constructor(file, dirs) {
/**
* @type Element
*/
this.$uploadStatus = null
this.uploaded = 0;
this.lastUptime = 0;
this.name = [...dirs, file.name].join("/"); this.name = [...dirs, file.name].join("/");
this.idx = Uploader.globalIdx++; this.idx = Uploader.globalIdx++;
this.file = file; this.file = file;
} }
upload() { upload() {
const { file, idx, name } = this; const { idx, name } = this;
let url = getUrl(name); const url = newUrl(name);
const encodedName = encodedStr(name);
$uploadersTable.insertAdjacentHTML("beforeend", ` $uploadersTable.insertAdjacentHTML("beforeend", `
<tr id="upload${idx}" class="uploader"> <tr id="upload${idx}" class="uploader">
<td class="path cell-icon"> <td class="path cell-icon">
${getSvg(file.path_type)} ${getPathSvg()}
</td> </td>
<td class="path cell-name"> <td class="path cell-name">
<a href="${url}">${name}</a> <a href="${url}">${encodedName}</a>
</td> </td>
<td class="cell-status upload-status" id="uploadStatus${idx}"></td> <td class="cell-status upload-status" id="uploadStatus${idx}"></td>
</tr>`); </tr>`);
$uploadersTable.classList.remove("hidden"); $uploadersTable.classList.remove("hidden");
$emptyFolder.classList.add("hidden"); $emptyFolder.classList.add("hidden");
this.$uploadStatus = document.getElementById(`uploadStatus${idx}`); this.$uploadStatus = document.getElementById(`uploadStatus${idx}`);
this.lastUptime = Date.now(); this.$uploadStatus.innerHTML = '-';
Uploader.queues.push(this);
Uploader.runQueue();
}
ajax() {
Uploader.runnings += 1;
const url = newUrl(this.name);
this.lastUptime = Date.now();
const ajax = new XMLHttpRequest(); const ajax = new XMLHttpRequest();
ajax.upload.addEventListener("progress", e => this.progress(e), false); ajax.upload.addEventListener("progress", e => this.progress(e), false);
ajax.addEventListener("readystatechange", () => { ajax.addEventListener("readystatechange", () => {
if(ajax.readyState === 4) { if (ajax.readyState === 4) {
if (ajax.status >= 200 && ajax.status < 300) { if (ajax.status >= 200 && ajax.status < 300) {
this.complete(); this.complete();
} else { } else {
@@ -94,13 +169,14 @@ class Uploader {
ajax.addEventListener("error", () => this.fail(), false); ajax.addEventListener("error", () => this.fail(), false);
ajax.addEventListener("abort", () => this.fail(), false); ajax.addEventListener("abort", () => this.fail(), false);
ajax.open("PUT", url); ajax.open("PUT", url);
ajax.send(file); ajax.send(this.file);
} }
progress(event) { progress(event) {
let now = Date.now(); const now = Date.now();
let speed = (event.loaded - this.uploaded) / (now - this.lastUptime) * 1000; const speed = (event.loaded - this.uploaded) / (now - this.lastUptime) * 1000;
let [speedValue, speedUnit] = formatSize(speed); const [speedValue, speedUnit] = formatSize(speed);
const speedText = `${speedValue}${speedUnit.toLowerCase()}/s`; const speedText = `${speedValue}${speedUnit.toLowerCase()}/s`;
const progress = formatPercent((event.loaded / event.total) * 100); const progress = formatPercent((event.loaded / event.total) * 100);
const duration = formatDuration((event.total - event.loaded) / speed) const duration = formatDuration((event.total - event.loaded) / speed)
@@ -111,13 +187,40 @@ class Uploader {
complete() { complete() {
this.$uploadStatus.innerHTML = ``; this.$uploadStatus.innerHTML = ``;
Uploader.runnings -= 1;
Uploader.runQueue();
} }
fail() { fail() {
this.$uploadStatus.innerHTML = ``; this.$uploadStatus.innerHTML = ``;
Uploader.runnings -= 1;
Uploader.runQueue();
} }
} }
Uploader.globalIdx = 0;
Uploader.runnings = 0;
Uploader.auth = false;
/**
* @type Uploader[]
*/
Uploader.queues = [];
Uploader.runQueue = async () => {
if (Uploader.runnings > 2) return;
let uploader = Uploader.queues.shift();
if (!uploader) return;
if (!Uploader.auth) {
Uploader.auth = true;
await login();
}
uploader.ajax();
}
/** /**
* Add breadcrumb * Add breadcrumb
* @param {string} href * @param {string} href
@@ -137,16 +240,17 @@ function addBreadcrumb(href, uri_prefix) {
const name = parts[i]; const name = parts[i];
if (i > 0) { if (i > 0) {
if (!path.endsWith("/")) { if (!path.endsWith("/")) {
path += "/"; path += "/";
} }
path += encodeURI(name); path += encodeURIComponent(name);
} }
const encodedName = encodedStr(name);
if (i === 0) { if (i === 0) {
$breadcrumb.insertAdjacentHTML("beforeend", `<a href="${path}"><svg width="16" height="16" viewBox="0 0 16 16"><path d="M6.5 14.5v-3.505c0-.245.25-.495.5-.495h2c.25 0 .5.25.5.5v3.5a.5.5 0 0 0 .5.5h4a.5.5 0 0 0 .5-.5v-7a.5.5 0 0 0-.146-.354L13 5.793V2.5a.5.5 0 0 0-.5-.5h-1a.5.5 0 0 0-.5.5v1.293L8.354 1.146a.5.5 0 0 0-.708 0l-6 6A.5.5 0 0 0 1.5 7.5v7a.5.5 0 0 0 .5.5h4a.5.5 0 0 0 .5-.5z"/></svg></a>`); $breadcrumb.insertAdjacentHTML("beforeend", `<a href="${path}"><svg width="16" height="16" viewBox="0 0 16 16"><path d="M6.5 14.5v-3.505c0-.245.25-.495.5-.495h2c.25 0 .5.25.5.5v3.5a.5.5 0 0 0 .5.5h4a.5.5 0 0 0 .5-.5v-7a.5.5 0 0 0-.146-.354L13 5.793V2.5a.5.5 0 0 0-.5-.5h-1a.5.5 0 0 0-.5.5v1.293L8.354 1.146a.5.5 0 0 0-.708 0l-6 6A.5.5 0 0 0 1.5 7.5v7a.5.5 0 0 0 .5.5h4a.5.5 0 0 0 .5-.5z"/></svg></a>`);
} else if (i === len - 1) { } else if (i === len - 1) {
$breadcrumb.insertAdjacentHTML("beforeend", `<b>${name}</b>`); $breadcrumb.insertAdjacentHTML("beforeend", `<b>${encodedName}</b>`);
} else { } else {
$breadcrumb.insertAdjacentHTML("beforeend", `<a href="${path}">${name}</a>`); $breadcrumb.insertAdjacentHTML("beforeend", `<a href="${path}">${encodedName}</a>`);
} }
if (i !== len - 1) { if (i !== len - 1) {
$breadcrumb.insertAdjacentHTML("beforeend", `<span class="separator">/</span>`); $breadcrumb.insertAdjacentHTML("beforeend", `<span class="separator">/</span>`);
@@ -154,50 +258,147 @@ function addBreadcrumb(href, uri_prefix) {
} }
} }
function setupIndexPage() {
if (DATA.allow_archive) {
const $download = document.querySelector(".download");
$download.href = baseUrl() + "?zip";
$download.title = "Download folder as div .zip file";
$download.classList.remove("hidden");
}
if (DATA.allow_upload) {
setupDropzone();
setupUploadFile();
setupNewFolder();
setupNewFile();
}
if (DATA.auth) {
setupAuth();
}
if (DATA.allow_search) {
setupSearch()
}
renderPathsTableHead();
renderPathsTableBody();
}
/**
* Render path table thead
*/
function renderPathsTableHead() {
const headerItems = [
{
name: "name",
props: `colspan="2"`,
text: "Name",
},
{
name: "mtime",
props: ``,
text: "Last Modified",
},
{
name: "size",
props: ``,
text: "Size",
}
];
$pathsTableHead.insertAdjacentHTML("beforeend", `
<tr>
${headerItems.map(item => {
let svg = `<svg width="12" height="12" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M11.5 15a.5.5 0 0 0 .5-.5V2.707l3.146 3.147a.5.5 0 0 0 .708-.708l-4-4a.5.5 0 0 0-.708 0l-4 4a.5.5 0 1 0 .708.708L11 2.707V14.5a.5.5 0 0 0 .5.5zm-7-14a.5.5 0 0 1 .5.5v11.793l3.146-3.147a.5.5 0 0 1 .708.708l-4 4a.5.5 0 0 1-.708 0l-4-4a.5.5 0 0 1 .708-.708L4 13.293V1.5a.5.5 0 0 1 .5-.5z"/></svg>`;
let order = "asc";
if (PARAMS.sort === item.name) {
if (PARAMS.order === "asc") {
order = "desc";
svg = `<svg width="12" height="12" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M8 15a.5.5 0 0 0 .5-.5V2.707l3.146 3.147a.5.5 0 0 0 .708-.708l-4-4a.5.5 0 0 0-.708 0l-4 4a.5.5 0 1 0 .708.708L7.5 2.707V14.5a.5.5 0 0 0 .5.5z"/></svg>`
} else {
svg = `<svg width="12" height="12" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M8 1a.5.5 0 0 1 .5.5v11.793l3.146-3.147a.5.5 0 0 1 .708.708l-4 4a.5.5 0 0 1-.708 0l-4-4a.5.5 0 0 1 .708-.708L7.5 13.293V1.5A.5.5 0 0 1 8 1z"/></svg>`
}
}
const qs = new URLSearchParams({ ...PARAMS, order, sort: item.name }).toString();
const icon = `<span>${svg}</span>`
return `<th class="cell-${item.name}" ${item.props}><a href="?${qs}">${item.text}${icon}</a></th>`
}).join("\n")}
<th class="cell-actions">Actions</th>
</tr>
`);
}
/**
* Render path table tbody
*/
function renderPathsTableBody() {
if (DATA.paths && DATA.paths.length > 0) {
const len = DATA.paths.length;
if (len > 0) {
$pathsTable.classList.remove("hidden");
}
for (let i = 0; i < len; i++) {
addPath(DATA.paths[i], i);
}
} else {
$emptyFolder.textContent = dirEmptyNote;
$emptyFolder.classList.remove("hidden");
}
}
/** /**
* Add pathitem * Add pathitem
* @param {PathItem} file * @param {PathItem} file
* @param {number} index * @param {number} index
*/ */
function addPath(file, index) { function addPath(file, index) {
let url = getUrl(file.name) const encodedName = encodedStr(file.name);
let url = newUrl(file.name)
let actionDelete = ""; let actionDelete = "";
let actionDownload = ""; let actionDownload = "";
if (file.path_type.endsWith("Dir")) { let actionMove = "";
let actionEdit = "";
let isDir = file.path_type.endsWith("Dir");
if (isDir) {
url += "/"; url += "/";
actionDownload = ` if (DATA.allow_archive) {
<div class="action-btn"> actionDownload = `
<a href="${url}?zip" title="Download folder as a .zip file"> <div class="action-btn">
<svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg> <a href="${url}?zip" title="Download folder as a .zip file">${ICONS.download}</a>
</a> </div>`;
</div>`; }
} else { } else {
actionDownload = ` actionDownload = `
<div class="action-btn" > <div class="action-btn" >
<a href="${url}" title="Download file" download> <a href="${url}" title="Download file" download>${ICONS.download}</a>
<svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg>
</a>
</div>`; </div>`;
} }
if (DATA.allow_delete) { if (DATA.allow_delete) {
if (DATA.allow_upload) {
if (isDir) {
actionMove = `<div onclick="movePath(${index})" class="action-btn" id="moveBtn${index}" title="Move to new path">${ICONS.move}</div>`;
} else {
actionEdit = `<a class="action-btn" title="Edit file" target="_blank" href="${url}?edit">${ICONS.edit}</a>`;
}
}
actionDelete = ` actionDelete = `
<div onclick="deletePath(${index})" class="action-btn" id="deleteBtn${index}" title="Delete ${file.name}"> <div onclick="deletePath(${index})" class="action-btn" id="deleteBtn${index}" title="Delete">${ICONS.delete}</div>`;
<svg width="16" height="16" fill="currentColor"viewBox="0 0 16 16"><path d="M6.854 7.146a.5.5 0 1 0-.708.708L7.293 9l-1.147 1.146a.5.5 0 0 0 .708.708L8 9.707l1.146 1.147a.5.5 0 0 0 .708-.708L8.707 9l1.147-1.146a.5.5 0 0 0-.708-.708L8 8.293 6.854 7.146z"/><path d="M14 14V4.5L9.5 0H4a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h8a2 2 0 0 0 2-2zM9.5 3A1.5 1.5 0 0 0 11 4.5h2V14a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1V2a1 1 0 0 1 1-1h5.5v2z"/></svg>
</div>`;
} }
let actionCell = ` let actionCell = `
<td class="cell-actions"> <td class="cell-actions">
${actionDownload} ${actionDownload}
${actionMove}
${actionEdit}
${actionDelete} ${actionDelete}
</td>` </td>`
$pathsTableBody.insertAdjacentHTML("beforeend", ` $pathsTableBody.insertAdjacentHTML("beforeend", `
<tr id="addPath${index}"> <tr id="addPath${index}">
<td class="path cell-icon"> <td class="path cell-icon">
${getSvg(file.path_type)} ${getPathSvg(file.path_type)}
</td> </td>
<td class="path cell-name"> <td class="path cell-name">
<a href="${url}" title="${file.name}">${file.name}</a> <a href="${url}" ${isDir ? "" : `target="_blank"`}>${encodedName}</a>
</td> </td>
<td class="cell-mtime">${formatMtime(file.mtime)}</td> <td class="cell-mtime">${formatMtime(file.mtime)}</td>
<td class="cell-size">${formatSize(file.size).join(" ")}</td> <td class="cell-size">${formatSize(file.size).join(" ")}</td>
@@ -205,59 +406,292 @@ function addPath(file, index) {
</tr>`) </tr>`)
} }
function setupDropzone() {
["drag", "dragstart", "dragend", "dragover", "dragenter", "dragleave", "drop"].forEach(name => {
document.addEventListener(name, e => {
e.preventDefault();
e.stopPropagation();
});
});
document.addEventListener("drop", async e => {
if (!e.dataTransfer.items[0].webkitGetAsEntry) {
const files = e.dataTransfer.files.filter(v => v.size > 0);
for (const file of files) {
new Uploader(file, []).upload();
}
} else {
const entries = [];
const len = e.dataTransfer.items.length;
for (let i = 0; i < len; i++) {
entries.push(e.dataTransfer.items[i].webkitGetAsEntry());
}
addFileEntries(entries, [])
}
});
}
function setupAuth() {
if (DATA.user) {
$userBtn.classList.remove("hidden");
$userBtn.title = DATA.user;
} else {
const $loginBtn = document.querySelector(".login-btn");
$loginBtn.classList.remove("hidden");
$loginBtn.addEventListener("click", () => login(true));
}
}
function setupSearch() {
const $searchbar = document.querySelector(".searchbar");
$searchbar.classList.remove("hidden");
$searchbar.addEventListener("submit", event => {
event.preventDefault();
const formData = new FormData($searchbar);
const q = formData.get("q");
let href = baseUrl();
if (q) {
href += "?q=" + q;
}
location.href = href;
});
if (PARAMS.q) {
document.getElementById('search').value = PARAMS.q;
}
}
function setupUploadFile() {
document.querySelector(".upload-file").classList.remove("hidden");
document.getElementById("file").addEventListener("change", async e => {
const files = e.target.files;
for (let file of files) {
new Uploader(file, []).upload();
}
});
}
function setupNewFolder() {
const $newFolder = document.querySelector(".new-folder");
$newFolder.classList.remove("hidden");
$newFolder.addEventListener("click", () => {
const name = prompt("Enter folder name");
if (name) createFolder(name);
});
}
function setupNewFile() {
const $newFile = document.querySelector(".new-file");
$newFile.classList.remove("hidden");
$newFile.addEventListener("click", () => {
const name = prompt("Enter file name");
if (name) createFile(name);
});
}
async function setupEditPage() {
const url = baseUrl();
const $download = document.querySelector(".download");
$download.classList.remove("hidden");
$download.href = url;
const $moveFile = document.querySelector(".move-file");
$moveFile.classList.remove("hidden");
$moveFile.addEventListener("click", async () => {
const query = location.href.slice(url.length);
const newFileUrl = await doMovePath(url);
if (newFileUrl) {
location.href = newFileUrl + query;
}
});
const $deleteFile = document.querySelector(".delete-file");
$deleteFile.classList.remove("hidden");
$deleteFile.addEventListener("click", async () => {
const url = baseUrl();
const name = baseName(url);
await doDeletePath(name, url, () => {
location.href = location.href.split("/").slice(0, -1).join("/");
});
})
if (!DATA.editable) {
const $notEditable = document.querySelector(".not-editable");
const url = baseUrl();
const ext = extName(baseName(url));
if (IFRAME_FORMATS.find(v => v === ext)) {
$notEditable.insertAdjacentHTML("afterend", `<iframe src="${url}" sandbox width="100%" height="${window.innerHeight - 100}px"></iframe>`)
} else {
$notEditable.classList.remove("hidden");
$notEditable.textContent = "Cannot edit because it is too large or binary.";
}
return;
}
const $saveBtn = document.querySelector(".save-btn");
$saveBtn.classList.remove("hidden");
$saveBtn.addEventListener("click", saveChange);
$editor.classList.remove("hidden");
try {
const res = await fetch(baseUrl());
await assertResOK(res);
const text = await res.text();
$editor.value = text;
} catch (err) {
alert(`Failed get file, ${err.message}`);
}
}
/** /**
* Delete pathitem * Delete path
* @param {number} index * @param {number} index
* @returns * @returns
*/ */
async function deletePath(index) { async function deletePath(index) {
const file = DATA.paths[index]; const file = DATA.paths[index];
if (!file) return; if (!file) return;
await doDeletePath(file.name, newUrl(file.name), () => {
document.getElementById(`addPath${index}`)?.remove();
DATA.paths[index] = null;
if (!DATA.paths.find(v => !!v)) {
$pathsTable.classList.add("hidden");
$emptyFolder.textContent = dirEmptyNote;
$emptyFolder.classList.remove("hidden");
}
})
}
if (!confirm(`Delete \`${file.name}\`?`)) return; async function doDeletePath(name, url, cb) {
if (!confirm(`Delete \`${name}\`?`)) return;
try { try {
const res = await fetch(getUrl(file.name), { await login();
const res = await fetch(url, {
method: "DELETE", method: "DELETE",
}); });
if (res.status >= 200 && res.status < 300) { await assertResOK(res);
document.getElementById(`addPath${index}`).remove(); cb();
DATA.paths[index] = null;
if (!DATA.paths.find(v => !!v)) {
$pathsTable.classList.add("hidden");
$emptyFolder.textContent = dirEmptyNote;
$emptyFolder.classList.remove("hidden");
}
} else {
throw new Error(await res.text())
}
} catch (err) { } catch (err) {
alert(`Cannot delete \`${file.name}\`, ${err.message}`); alert(`Cannot delete \`${file.name}\`, ${err.message}`);
} }
} }
function dropzone() { /**
["drag", "dragstart", "dragend", "dragover", "dragenter", "dragleave", "drop"].forEach(name => { * Move path
document.addEventListener(name, e => { * @param {number} index
e.preventDefault(); * @returns
e.stopPropagation(); */
}); async function movePath(index) {
const file = DATA.paths[index];
if (!file) return;
const fileUrl = newUrl(file.name);
const newFileUrl = await doMovePath(fileUrl);
if (newFileUrl) {
location.href = newFileUrl.split("/").slice(0, -1).join("/");
}
}
async function doMovePath(fileUrl) {
const fileUrlObj = new URL(fileUrl)
const prefix = DATA.uri_prefix.slice(0, -1);
const filePath = decodeURIComponent(fileUrlObj.pathname.slice(prefix.length));
let newPath = prompt("Enter new path", filePath)
if (!newPath) return;
if (!newPath.startsWith("/")) newPath = "/" + newPath;
if (filePath === newPath) return;
const newFileUrl = fileUrlObj.origin + prefix + newPath.split("/").map(encodeURIComponent).join("/");
try {
await login();
const res1 = await fetch(newFileUrl, {
method: "HEAD",
}); });
document.addEventListener("drop", e => { if (res1.status === 200) {
if (!e.dataTransfer.items[0].webkitGetAsEntry) { if (!confirm("Override exsis file?")) {
const files = e.dataTransfer.files.filter(v => v.size > 0); return;
for (const file of files) { }
new Uploader(file, []).upload(); }
} const res2 = await fetch(fileUrl, {
} else { method: "MOVE",
const entries = []; headers: {
const len = e.dataTransfer.items.length; "Destination": newFileUrl,
for (let i = 0; i < len; i++) {
entries.push(e.dataTransfer.items[i].webkitGetAsEntry());
}
addFileEntries(entries, [])
} }
}); });
await assertResOK(res2);
return newFileUrl;
} catch (err) {
alert(`Cannot move \`${filePath}\` to \`${newPath}\`, ${err.message}`);
}
}
/**
* Save editor change
*/
async function saveChange() {
try {
await fetch(baseUrl(), {
method: "PUT",
body: $editor.value,
});
} catch (err) {
alert(`Failed to save file, ${err.message}`);
}
}
async function login(alert = false) {
if (!DATA.auth) return;
try {
const res = await fetch(baseUrl(), {
method: "WRITEABLE",
});
await assertResOK(res);
document.querySelector(".login-btn").classList.add("hidden");
$userBtn.classList.remove("hidden");
$userBtn.title = "";
} catch (err) {
let message = `Cannot login, ${err.message}`;
if (alert) {
alert(message);
} else {
throw new Error(message);
}
}
}
/**
* Create a folder
* @param {string} name
*/
async function createFolder(name) {
const url = newUrl(name);
try {
await login();
const res = await fetch(url, {
method: "MKCOL",
});
await assertResOK(res);
location.href = url;
} catch (err) {
alert(`Cannot create folder \`${name}\`, ${err.message}`);
}
}
async function createFile(name) {
const url = newUrl(name);
try {
await login();
const res = await fetch(url, {
method: "PUT",
body: "",
});
await assertResOK(res);
location.href = url + "?edit";
} catch (err) {
alert(`Cannot create file \`${name}\`, ${err.message}`);
}
} }
async function addFileEntries(entries, dirs) { async function addFileEntries(entries, dirs) {
@@ -274,23 +708,41 @@ async function addFileEntries(entries, dirs) {
} }
function getUrl(name) { function newUrl(name) {
let url = location.href.split('?')[0]; let url = baseUrl();
if (!url.endsWith("/")) url += "/"; if (!url.endsWith("/")) url += "/";
url += encodeURI(name); url += name.split("/").map(encodeURIComponent).join("/");
return url; return url;
} }
function getSvg(path_type) { function baseUrl() {
return location.href.split('?')[0];
}
function baseName(url) {
return decodeURIComponent(url.split("/").filter(v => v.length > 0).slice(-1)[0])
}
function extName(filename) {
const dotIndex = filename.lastIndexOf('.');
if (dotIndex === -1 || dotIndex === 0 || dotIndex === filename.length - 1) {
return '';
}
return filename.substring(dotIndex);
}
function getPathSvg(path_type) {
switch (path_type) { switch (path_type) {
case "Dir": case "Dir":
return `<svg height="16" viewBox="0 0 14 16" width="14"><path fill-rule="evenodd" d="M13 4H7V3c0-.66-.31-1-1-1H1c-.55 0-1 .45-1 1v10c0 .55.45 1 1 1h12c.55 0 1-.45 1-1V5c0-.55-.45-1-1-1zM6 4H1V3h5v1z"></path></svg>`; return ICONS.dir;
case "File": case "SymlinkFile":
return `<svg height="16" viewBox="0 0 12 16" width="12"><path fill-rule="evenodd" d="M6 5H2V4h4v1zM2 8h7V7H2v1zm0 2h7V9H2v1zm0 2h7v-1H2v1zm10-7.5V14c0 .55-.45 1-1 1H1c-.55 0-1-.45-1-1V2c0-.55.45-1 1-1h7.5L12 4.5zM11 5L8 2H1v12h10V5z"></path></svg>`; return ICONS.symlinkFile;
case "SymlinkDir": case "SymlinkDir":
return `<svg height="16" viewBox="0 0 14 16" width="14"><path fill-rule="evenodd" d="M13 4H7V3c0-.66-.31-1-1-1H1c-.55 0-1 .45-1 1v10c0 .55.45 1 1 1h12c.55 0 1-.45 1-1V5c0-.55-.45-1-1-1zM1 3h5v1H1V3zm6 9v-2c-.98-.02-1.84.22-2.55.7-.71.48-1.19 1.25-1.45 2.3.02-1.64.39-2.88 1.13-3.73C4.86 8.43 5.82 8 7.01 8V6l4 3-4 3H7z"></path></svg>`; return ICONS.symlinkDir;
default: default:
return `<svg height="16" viewBox="0 0 12 16" width="12"><path fill-rule="evenodd" d="M8.5 1H1c-.55 0-1 .45-1 1v12c0 .55.45 1 1 1h10c.55 0 1-.45 1-1V4.5L8.5 1zM11 14H1V2h7l3 3v9zM6 4.5l4 3-4 3v-2c-.98-.02-1.84.22-2.55.7-.71.48-1.19 1.25-1.45 2.3.02-1.64.39-2.88 1.13-3.73.73-.84 1.69-1.27 2.88-1.27v-2H6z"></path></svg>`; return ICONS.file;
} }
} }
@@ -310,65 +762,37 @@ function padZero(value, size) {
} }
function formatSize(size) { function formatSize(size) {
if (!size) return [] if (size == null) return []
const sizes = ['B', 'KB', 'MB', 'GB', 'TB']; const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
if (size == 0) return [0, "Byte"]; if (size == 0) return [0, "B"];
const i = parseInt(Math.floor(Math.log(size) / Math.log(1024))); const i = parseInt(Math.floor(Math.log(size) / Math.log(1024)));
return [Math.round(size / Math.pow(1024, i), 2), sizes[i]]; return [Math.round(size / Math.pow(1024, i), 2), sizes[i]];
} }
function formatDuration(seconds) { function formatDuration(seconds) {
seconds = Math.ceil(seconds); seconds = Math.ceil(seconds);
let h = Math.floor(seconds / 3600); const h = Math.floor(seconds / 3600);
let m = Math.floor((seconds - h * 3600) / 60); const m = Math.floor((seconds - h * 3600) / 60);
let s = seconds - h * 3600 - m * 60 const s = seconds - h * 3600 - m * 60
return `${padZero(h, 2)}:${padZero(m, 2)}:${padZero(s, 2)}`; return `${padZero(h, 2)}:${padZero(m, 2)}:${padZero(s, 2)}`;
} }
function formatPercent(precent) { function formatPercent(percent) {
if (precent > 10) { if (percent > 10) {
return precent.toFixed(1) + "%"; return percent.toFixed(1) + "%";
} else { } else {
return precent.toFixed(2) + "%"; return percent.toFixed(2) + "%";
} }
} }
function ready() { function encodedStr(rawStr) {
$pathsTable = document.querySelector(".paths-table") return rawStr.replace(/[\u00A0-\u9999<>\&]/g, function (i) {
$pathsTableBody = document.querySelector(".paths-table tbody"); return '&#' + i.charCodeAt(0) + ';';
$uploadersTable = document.querySelector(".uploaders-table"); });
$emptyFolder = document.querySelector(".empty-folder"); }
if (DATA.allow_search) { async function assertResOK(res) {
document.querySelector(".searchbar").classList.remove("hidden"); if (!(res.status >= 200 && res.status < 300)) {
if (params.q) { throw new Error(await res.text())
document.getElementById('search').value = params.q;
}
}
addBreadcrumb(DATA.href, DATA.uri_prefix);
if (Array.isArray(DATA.paths)) {
const len = DATA.paths.length;
if (len > 0) {
$pathsTable.classList.remove("hidden");
}
for (let i = 0; i < len; i++) {
addPath(DATA.paths[i], i);
}
if (len == 0) {
$emptyFolder.textContent = dirEmptyNote;
$emptyFolder.classList.remove("hidden");
}
}
if (DATA.allow_upload) {
dropzone();
document.querySelector(".upload-control").classList.remove("hidden");
document.getElementById("file").addEventListener("change", e => {
const files = e.target.files;
for (let file of files) {
new Uploader(file, []).upload();
}
});
} }
} }

View File

@@ -1,4 +1,8 @@
use clap::{AppSettings, Arg, ArgMatches, Command}; use anyhow::{bail, Context, Result};
use clap::builder::PossibleValuesParser;
use clap::{value_parser, Arg, ArgAction, ArgMatches, Command};
use clap_complete::{generate, Generator, Shell};
#[cfg(feature = "tls")]
use rustls::{Certificate, PrivateKey}; use rustls::{Certificate, PrivateKey};
use std::env; use std::env;
use std::net::IpAddr; use std::net::IpAddr;
@@ -6,11 +10,13 @@ use std::path::{Path, PathBuf};
use crate::auth::AccessControl; use crate::auth::AccessControl;
use crate::auth::AuthMethod; use crate::auth::AuthMethod;
use crate::log_http::{LogHttp, DEFAULT_LOG_FORMAT};
#[cfg(feature = "tls")]
use crate::tls::{load_certs, load_private_key}; use crate::tls::{load_certs, load_private_key};
use crate::BoxResult; use crate::utils::encode_uri;
fn app() -> Command<'static> { pub fn build_cli() -> Command {
Command::new(env!("CARGO_CRATE_NAME")) let app = Command::new(env!("CARGO_CRATE_NAME"))
.version(env!("CARGO_PKG_VERSION")) .version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS")) .author(env!("CARGO_PKG_AUTHORS"))
.about(concat!( .about(concat!(
@@ -18,179 +24,284 @@ fn app() -> Command<'static> {
" - ", " - ",
env!("CARGO_PKG_REPOSITORY") env!("CARGO_PKG_REPOSITORY")
)) ))
.global_setting(AppSettings::DeriveDisplayOrder) .arg(
Arg::new("root")
.env("DUFS_ROOT")
.hide_env(true)
.default_value(".")
.value_parser(value_parser!(PathBuf))
.help("Specific path to serve"),
)
.arg( .arg(
Arg::new("bind") Arg::new("bind")
.env("DUFS_BIND")
.hide_env(true)
.short('b') .short('b')
.long("bind") .long("bind")
.help("Specify bind address") .help("Specify bind address or unix socket")
.multiple_values(true) .action(ArgAction::Append)
.multiple_occurrences(true) .value_delimiter(',')
.value_name("addr"), .value_name("addrs"),
) )
.arg( .arg(
Arg::new("port") Arg::new("port")
.env("DUFS_PORT")
.hide_env(true)
.short('p') .short('p')
.long("port") .long("port")
.default_value("5000") .default_value("5000")
.value_parser(value_parser!(u16))
.help("Specify port to listen on") .help("Specify port to listen on")
.value_name("port"), .value_name("port"),
) )
.arg(
Arg::new("path")
.default_value(".")
.allow_invalid_utf8(true)
.help("Specific path to serve"),
)
.arg( .arg(
Arg::new("path-prefix") Arg::new("path-prefix")
.env("DUFS_PATH_PREFIX")
.hide_env(true)
.long("path-prefix") .long("path-prefix")
.value_name("path") .value_name("path")
.help("Specify an path prefix"), .help("Specify a path prefix"),
)
.arg(
Arg::new("hidden")
.env("DUFS_HIDDEN")
.hide_env(true)
.long("hidden")
.help("Hide paths from directory listings, separated by `,`")
.value_name("value"),
) )
.arg( .arg(
Arg::new("auth") Arg::new("auth")
.env("DUFS_AUTH")
.hide_env(true)
.short('a') .short('a')
.long("auth") .long("auth")
.help("Add auth for path") .help("Add auth for path")
.multiple_values(true) .action(ArgAction::Append)
.multiple_occurrences(true) .value_delimiter(',')
.value_name("rule"), .value_name("rules"),
) )
.arg( .arg(
Arg::new("auth-method") Arg::new("auth-method")
.env("DUFS_AUTH_METHOD")
.hide_env(true)
.long("auth-method") .long("auth-method")
.help("Select auth method") .help("Select auth method")
.possible_values(["basic", "digest"]) .value_parser(PossibleValuesParser::new(["basic", "digest"]))
.default_value("digest") .default_value("digest")
.value_name("value"), .value_name("value"),
) )
.arg( .arg(
Arg::new("allow-all") Arg::new("allow-all")
.env("DUFS_ALLOW_ALL")
.hide_env(true)
.short('A') .short('A')
.long("allow-all") .long("allow-all")
.action(ArgAction::SetTrue)
.help("Allow all operations"), .help("Allow all operations"),
) )
.arg( .arg(
Arg::new("allow-upload") Arg::new("allow-upload")
.env("DUFS_ALLOW_UPLOAD")
.hide_env(true)
.long("allow-upload") .long("allow-upload")
.action(ArgAction::SetTrue)
.help("Allow upload files/folders"), .help("Allow upload files/folders"),
) )
.arg( .arg(
Arg::new("allow-delete") Arg::new("allow-delete")
.env("DUFS_ALLOW_DELETE")
.hide_env(true)
.long("allow-delete") .long("allow-delete")
.action(ArgAction::SetTrue)
.help("Allow delete files/folders"), .help("Allow delete files/folders"),
) )
.arg( .arg(
Arg::new("allow-search") Arg::new("allow-search")
.env("DUFS_ALLOW_SEARCH")
.hide_env(true)
.long("allow-search") .long("allow-search")
.action(ArgAction::SetTrue)
.help("Allow search files/folders"), .help("Allow search files/folders"),
) )
.arg( .arg(
Arg::new("allow-symlink") Arg::new("allow-symlink")
.env("DUFS_ALLOW_SYMLINK")
.hide_env(true)
.long("allow-symlink") .long("allow-symlink")
.action(ArgAction::SetTrue)
.help("Allow symlink to files/folders outside root directory"), .help("Allow symlink to files/folders outside root directory"),
) )
.arg(
Arg::new("allow-archive")
.env("DUFS_ALLOW_ARCHIVE")
.hide_env(true)
.long("allow-archive")
.action(ArgAction::SetTrue)
.help("Allow zip archive generation"),
)
.arg( .arg(
Arg::new("enable-cors") Arg::new("enable-cors")
.env("DUFS_ENABLE_CORS")
.hide_env(true)
.long("enable-cors") .long("enable-cors")
.action(ArgAction::SetTrue)
.help("Enable CORS, sets `Access-Control-Allow-Origin: *`"), .help("Enable CORS, sets `Access-Control-Allow-Origin: *`"),
) )
.arg( .arg(
Arg::new("render-index") Arg::new("render-index")
.env("DUFS_RENDER_INDEX")
.hide_env(true)
.long("render-index") .long("render-index")
.action(ArgAction::SetTrue)
.help("Serve index.html when requesting a directory, returns 404 if not found index.html"), .help("Serve index.html when requesting a directory, returns 404 if not found index.html"),
) )
.arg( .arg(
Arg::new("render-try-index") Arg::new("render-try-index")
.env("DUFS_RENDER_TRY_INDEX")
.hide_env(true)
.long("render-try-index") .long("render-try-index")
.help("Serve index.html when requesting a directory, returns file listing if not found index.html"), .action(ArgAction::SetTrue)
.help("Serve index.html when requesting a directory, returns directory listing if not found index.html"),
) )
.arg( .arg(
Arg::new("render-spa") Arg::new("render-spa")
.env("DUFS_RENDER_SPA")
.hide_env(true)
.long("render-spa") .long("render-spa")
.action(ArgAction::SetTrue)
.help("Serve SPA(Single Page Application)"), .help("Serve SPA(Single Page Application)"),
) )
.arg(
Arg::new("assets")
.env("DUFS_ASSETS")
.hide_env(true)
.long("assets")
.help("Use custom assets to override builtin assets")
.value_parser(value_parser!(PathBuf))
.value_name("path")
);
#[cfg(feature = "tls")]
let app = app
.arg( .arg(
Arg::new("tls-cert") Arg::new("tls-cert")
.env("DUFS_TLS_CERT")
.hide_env(true)
.long("tls-cert") .long("tls-cert")
.value_name("path") .value_name("path")
.value_parser(value_parser!(PathBuf))
.help("Path to an SSL/TLS certificate to serve with HTTPS"), .help("Path to an SSL/TLS certificate to serve with HTTPS"),
) )
.arg( .arg(
Arg::new("tls-key") Arg::new("tls-key")
.env("DUFS_TLS_KEY")
.hide_env(true)
.long("tls-key") .long("tls-key")
.value_name("path") .value_name("path")
.value_parser(value_parser!(PathBuf))
.help("Path to the SSL/TLS certificate's private key"), .help("Path to the SSL/TLS certificate's private key"),
) );
app.arg(
Arg::new("log-format")
.env("DUFS_LOG_FORMAT")
.hide_env(true)
.long("log-format")
.value_name("format")
.help("Customize http log format"),
)
.arg(
Arg::new("completions")
.long("completions")
.value_name("shell")
.value_parser(value_parser!(Shell))
.help("Print shell completion script for <shell>"),
)
} }
pub fn matches() -> ArgMatches { pub fn print_completions<G: Generator>(gen: G, cmd: &mut Command) {
app().get_matches() generate(gen, cmd, cmd.get_name().to_string(), &mut std::io::stdout());
} }
#[derive(Debug)] #[derive(Debug)]
pub struct Args { pub struct Args {
pub addrs: Vec<IpAddr>, pub addrs: Vec<BindAddr>,
pub port: u16, pub port: u16,
pub path: PathBuf, pub path: PathBuf,
pub path_is_file: bool, pub path_is_file: bool,
pub path_prefix: String, pub path_prefix: String,
pub uri_prefix: String, pub uri_prefix: String,
pub hidden: Vec<String>,
pub auth_method: AuthMethod, pub auth_method: AuthMethod,
pub auth: AccessControl, pub auth: AccessControl,
pub allow_upload: bool, pub allow_upload: bool,
pub allow_delete: bool, pub allow_delete: bool,
pub allow_search: bool, pub allow_search: bool,
pub allow_symlink: bool, pub allow_symlink: bool,
pub allow_archive: bool,
pub render_index: bool, pub render_index: bool,
pub render_spa: bool, pub render_spa: bool,
pub render_try_index: bool, pub render_try_index: bool,
pub enable_cors: bool, pub enable_cors: bool,
pub assets_path: Option<PathBuf>,
pub log_http: LogHttp,
#[cfg(feature = "tls")]
pub tls: Option<(Vec<Certificate>, PrivateKey)>, pub tls: Option<(Vec<Certificate>, PrivateKey)>,
#[cfg(not(feature = "tls"))]
pub tls: Option<()>,
} }
impl Args { impl Args {
/// Parse command-line arguments. /// Parse command-line arguments.
/// ///
/// If a parsing error ocurred, exit the process and print out informative /// If a parsing error occurred, exit the process and print out informative
/// error message to user. /// error message to user.
pub fn parse(matches: ArgMatches) -> BoxResult<Args> { pub fn parse(matches: ArgMatches) -> Result<Args> {
let port = matches.value_of_t::<u16>("port")?; let port = *matches.get_one::<u16>("port").unwrap();
let addrs = matches let addrs = matches
.values_of("bind") .get_many::<String>("bind")
.map(|v| v.collect()) .map(|bind| bind.map(|v| v.as_str()).collect())
.unwrap_or_else(|| vec!["0.0.0.0", "::"]); .unwrap_or_else(|| vec!["0.0.0.0", "::"]);
let addrs: Vec<IpAddr> = Args::parse_addrs(&addrs)?; let addrs: Vec<BindAddr> = Args::parse_addrs(&addrs)?;
let path = Args::parse_path(matches.value_of_os("path").unwrap_or_default())?; let path = Args::parse_path(matches.get_one::<PathBuf>("root").unwrap())?;
let path_is_file = path.metadata()?.is_file(); let path_is_file = path.metadata()?.is_file();
let path_prefix = matches let path_prefix = matches
.value_of("path-prefix") .get_one::<String>("path-prefix")
.map(|v| v.trim_matches('/').to_owned()) .map(|v| v.trim_matches('/').to_owned())
.unwrap_or_default(); .unwrap_or_default();
let uri_prefix = if path_prefix.is_empty() { let uri_prefix = if path_prefix.is_empty() {
"/".to_owned() "/".to_owned()
} else { } else {
format!("/{}/", &path_prefix) format!("/{}/", &encode_uri(&path_prefix))
}; };
let enable_cors = matches.is_present("enable-cors"); let hidden: Vec<String> = matches
let auth: Vec<&str> = matches .get_one::<String>("hidden")
.values_of("auth") .map(|v| v.split(',').map(|x| x.to_string()).collect())
.map(|v| v.collect())
.unwrap_or_default(); .unwrap_or_default();
let auth_method = match matches.value_of("auth-method").unwrap() { let enable_cors = matches.get_flag("enable-cors");
let auth: Vec<&str> = matches
.get_many::<String>("auth")
.map(|auth| auth.map(|v| v.as_str()).collect())
.unwrap_or_default();
let auth_method = match matches.get_one::<String>("auth-method").unwrap().as_str() {
"basic" => AuthMethod::Basic, "basic" => AuthMethod::Basic,
_ => AuthMethod::Digest, _ => AuthMethod::Digest,
}; };
let auth = AccessControl::new(&auth, &uri_prefix)?; let auth = AccessControl::new(&auth, &uri_prefix)?;
let allow_upload = matches.is_present("allow-all") || matches.is_present("allow-upload"); let allow_upload = matches.get_flag("allow-all") || matches.get_flag("allow-upload");
let allow_delete = matches.is_present("allow-all") || matches.is_present("allow-delete"); let allow_delete = matches.get_flag("allow-all") || matches.get_flag("allow-delete");
let allow_search = matches.is_present("allow-all") || matches.is_present("allow-search"); let allow_search = matches.get_flag("allow-all") || matches.get_flag("allow-search");
let allow_symlink = matches.is_present("allow-all") || matches.is_present("allow-symlink"); let allow_symlink = matches.get_flag("allow-all") || matches.get_flag("allow-symlink");
let render_index = matches.is_present("render-index"); let allow_archive = matches.get_flag("allow-all") || matches.get_flag("allow-archive");
let render_try_index = matches.is_present("render-try-index"); let render_index = matches.get_flag("render-index");
let render_spa = matches.is_present("render-spa"); let render_try_index = matches.get_flag("render-try-index");
let tls = match (matches.value_of("tls-cert"), matches.value_of("tls-key")) { let render_spa = matches.get_flag("render-spa");
#[cfg(feature = "tls")]
let tls = match (
matches.get_one::<PathBuf>("tls-cert"),
matches.get_one::<PathBuf>("tls-key"),
) {
(Some(certs_file), Some(key_file)) => { (Some(certs_file), Some(key_file)) => {
let certs = load_certs(certs_file)?; let certs = load_certs(certs_file)?;
let key = load_private_key(key_file)?; let key = load_private_key(key_file)?;
@@ -198,6 +309,17 @@ impl Args {
} }
_ => None, _ => None,
}; };
#[cfg(not(feature = "tls"))]
let tls = None;
let log_http: LogHttp = matches
.get_one::<String>("log-format")
.map(|v| v.as_str())
.unwrap_or(DEFAULT_LOG_FORMAT)
.parse()?;
let assets_path = match matches.get_one::<PathBuf>("assets") {
Some(v) => Some(Args::parse_assets_path(v)?),
None => None,
};
Ok(Args { Ok(Args {
addrs, addrs,
@@ -206,6 +328,7 @@ impl Args {
path_is_file, path_is_file,
path_prefix, path_prefix,
uri_prefix, uri_prefix,
hidden,
auth_method, auth_method,
auth, auth,
enable_cors, enable_cors,
@@ -213,36 +336,43 @@ impl Args {
allow_upload, allow_upload,
allow_search, allow_search,
allow_symlink, allow_symlink,
allow_archive,
render_index, render_index,
render_try_index, render_try_index,
render_spa, render_spa,
tls, tls,
log_http,
assets_path,
}) })
} }
fn parse_addrs(addrs: &[&str]) -> BoxResult<Vec<IpAddr>> { fn parse_addrs(addrs: &[&str]) -> Result<Vec<BindAddr>> {
let mut ip_addrs = vec![]; let mut bind_addrs = vec![];
let mut invalid_addrs = vec![]; let mut invalid_addrs = vec![];
for addr in addrs { for addr in addrs {
match addr.parse::<IpAddr>() { match addr.parse::<IpAddr>() {
Ok(v) => { Ok(v) => {
ip_addrs.push(v); bind_addrs.push(BindAddr::Address(v));
} }
Err(_) => { Err(_) => {
invalid_addrs.push(*addr); if cfg!(unix) {
bind_addrs.push(BindAddr::Path(PathBuf::from(addr)));
} else {
invalid_addrs.push(*addr);
}
} }
} }
} }
if !invalid_addrs.is_empty() { if !invalid_addrs.is_empty() {
return Err(format!("Invalid bind address `{}`", invalid_addrs.join(",")).into()); bail!("Invalid bind address `{}`", invalid_addrs.join(","));
} }
Ok(ip_addrs) Ok(bind_addrs)
} }
fn parse_path<P: AsRef<Path>>(path: P) -> BoxResult<PathBuf> { fn parse_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
let path = path.as_ref(); let path = path.as_ref();
if !path.exists() { if !path.exists() {
return Err(format!("Path `{}` doesn't exist", path.display()).into()); bail!("Path `{}` doesn't exist", path.display());
} }
env::current_dir() env::current_dir()
@@ -250,6 +380,20 @@ impl Args {
p.push(path); // If path is absolute, it replaces the current path. p.push(path); // If path is absolute, it replaces the current path.
std::fs::canonicalize(p) std::fs::canonicalize(p)
}) })
.map_err(|err| format!("Failed to access path `{}`: {}", path.display(), err,).into()) .with_context(|| format!("Failed to access path `{}`", path.display()))
}
fn parse_assets_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
let path = Self::parse_path(path)?;
if !path.join("index.html").exists() {
bail!("Path `{}` doesn't contains index.html", path.display());
}
Ok(path)
} }
} }
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub enum BindAddr {
Address(IpAddr),
Path(PathBuf),
}

View File

@@ -1,17 +1,16 @@
use anyhow::{anyhow, bail, Result};
use base64::{engine::general_purpose, Engine as _};
use headers::HeaderValue; use headers::HeaderValue;
use hyper::Method; use hyper::Method;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use md5::Context; use md5::Context;
use std::{ use std::collections::HashMap;
collections::HashMap,
time::{SystemTime, UNIX_EPOCH},
};
use uuid::Uuid; use uuid::Uuid;
use crate::utils::encode_uri; use crate::utils::{encode_uri, unix_now};
use crate::BoxResult;
const REALM: &str = "DUFS"; const REALM: &str = "DUFS";
const DIGEST_AUTH_TIMEOUT: u32 = 86400;
lazy_static! { lazy_static! {
static ref NONCESTARTHASH: Context = { static ref NONCESTARTHASH: Context = {
@@ -35,14 +34,14 @@ pub struct PathControl {
} }
impl AccessControl { impl AccessControl {
pub fn new(raw_rules: &[&str], uri_prefix: &str) -> BoxResult<Self> { pub fn new(raw_rules: &[&str], uri_prefix: &str) -> Result<Self> {
let mut rules = HashMap::default(); let mut rules = HashMap::default();
if raw_rules.is_empty() { if raw_rules.is_empty() {
return Ok(Self { rules }); return Ok(Self { rules });
} }
for rule in raw_rules { for rule in raw_rules {
let parts: Vec<&str> = rule.split('@').collect(); let parts: Vec<&str> = rule.split('@').collect();
let create_err = || format!("Invalid auth `{}`", rule).into(); let create_err = || anyhow!("Invalid auth `{rule}`");
match parts.as_slice() { match parts.as_slice() {
[path, readwrite] => { [path, readwrite] => {
let control = PathControl { let control = PathControl {
@@ -71,6 +70,10 @@ impl AccessControl {
Ok(Self { rules }) Ok(Self { rules })
} }
pub fn valid(&self) -> bool {
!self.rules.is_empty()
}
pub fn guard( pub fn guard(
&self, &self,
path: &str, path: &str,
@@ -81,6 +84,11 @@ impl AccessControl {
if self.rules.is_empty() { if self.rules.is_empty() {
return GuardType::ReadWrite; return GuardType::ReadWrite;
} }
if method == Method::OPTIONS {
return GuardType::ReadOnly;
}
let mut controls = vec![]; let mut controls = vec![];
for path in walk_path(path) { for path in walk_path(path) {
if let Some(control) = self.rules.get(path) { if let Some(control) = self.rules.get(path) {
@@ -131,7 +139,12 @@ impl GuardType {
} }
fn sanitize_path(path: &str, uri_prefix: &str) -> String { fn sanitize_path(path: &str, uri_prefix: &str) -> String {
encode_uri(&format!("{}{}", uri_prefix, path.trim_matches('/'))) let new_path = match (uri_prefix, path) {
("/", "/") => "/".into(),
(_, "/") => uri_prefix.trim_end_matches('/').into(),
_ => format!("{}{}", uri_prefix, path.trim_matches('/')),
};
encode_uri(&new_path)
} }
fn walk_path(path: &str) -> impl Iterator<Item = &str> { fn walk_path(path: &str) -> impl Iterator<Item = &str> {
@@ -166,7 +179,7 @@ impl Account {
let user = p[0]; let user = p[0];
let pass = p[1]; let pass = p[1];
let mut h = Context::new(); let mut h = Context::new();
h.consume(format!("{}:{}:{}", user, REALM, pass).as_bytes()); h.consume(format!("{user}:{REALM}:{pass}").as_bytes());
Some(Account { Some(Account {
user: user.to_owned(), user: user.to_owned(),
pass: format!("{:x}", h.compute()), pass: format!("{:x}", h.compute()),
@@ -181,19 +194,36 @@ pub enum AuthMethod {
} }
impl AuthMethod { impl AuthMethod {
pub fn www_auth(&self, stale: bool) -> String { pub fn www_auth(&self, stale: bool) -> Result<String> {
match self { match self {
AuthMethod::Basic => { AuthMethod::Basic => Ok(format!("Basic realm=\"{REALM}\"")),
format!("Basic realm=\"{}\"", REALM)
}
AuthMethod::Digest => { AuthMethod::Digest => {
let str_stale = if stale { "stale=true," } else { "" }; let str_stale = if stale { "stale=true," } else { "" };
format!( Ok(format!(
"Digest realm=\"{}\",nonce=\"{}\",{}qop=\"auth\"", "Digest realm=\"{}\",nonce=\"{}\",{}qop=\"auth\"",
REALM, REALM,
create_nonce(), create_nonce()?,
str_stale str_stale
) ))
}
}
}
pub fn get_user(&self, authorization: &HeaderValue) -> Option<String> {
match self {
AuthMethod::Basic => {
let value: Vec<u8> = general_purpose::STANDARD
.decode(strip_prefix(authorization.as_bytes(), b"Basic ")?)
.ok()?;
let parts: Vec<&str> = std::str::from_utf8(&value).ok()?.split(':').collect();
Some(parts[0].to_string())
}
AuthMethod::Digest => {
let digest_value = strip_prefix(authorization.as_bytes(), b"Digest ")?;
let digest_map = to_headermap(digest_value).ok()?;
digest_map
.get(b"username".as_ref())
.and_then(|b| std::str::from_utf8(b).ok())
.map(|v| v.to_string())
} }
} }
} }
@@ -206,10 +236,10 @@ impl AuthMethod {
) -> Option<()> { ) -> Option<()> {
match self { match self {
AuthMethod::Basic => { AuthMethod::Basic => {
let value: Vec<u8> = let basic_value: Vec<u8> = general_purpose::STANDARD
base64::decode(strip_prefix(authorization.as_bytes(), b"Basic ").unwrap()) .decode(strip_prefix(authorization.as_bytes(), b"Basic ")?)
.unwrap(); .ok()?;
let parts: Vec<&str> = std::str::from_utf8(&value).unwrap().split(':').collect(); let parts: Vec<&str> = std::str::from_utf8(&basic_value).ok()?.split(':').collect();
if parts[0] != auth_user { if parts[0] != auth_user {
return None; return None;
@@ -228,13 +258,13 @@ impl AuthMethod {
} }
AuthMethod::Digest => { AuthMethod::Digest => {
let digest_value = strip_prefix(authorization.as_bytes(), b"Digest ")?; let digest_value = strip_prefix(authorization.as_bytes(), b"Digest ")?;
let user_vals = to_headermap(digest_value).ok()?; let digest_map = to_headermap(digest_value).ok()?;
if let (Some(username), Some(nonce), Some(user_response)) = ( if let (Some(username), Some(nonce), Some(user_response)) = (
user_vals digest_map
.get(b"username".as_ref()) .get(b"username".as_ref())
.and_then(|b| std::str::from_utf8(*b).ok()), .and_then(|b| std::str::from_utf8(b).ok()),
user_vals.get(b"nonce".as_ref()), digest_map.get(b"nonce".as_ref()),
user_vals.get(b"response".as_ref()), digest_map.get(b"response".as_ref()),
) { ) {
match validate_nonce(nonce) { match validate_nonce(nonce) {
Ok(true) => {} Ok(true) => {}
@@ -246,24 +276,24 @@ impl AuthMethod {
let mut ha = Context::new(); let mut ha = Context::new();
ha.consume(method); ha.consume(method);
ha.consume(b":"); ha.consume(b":");
if let Some(uri) = user_vals.get(b"uri".as_ref()) { if let Some(uri) = digest_map.get(b"uri".as_ref()) {
ha.consume(uri); ha.consume(uri);
} }
let ha = format!("{:x}", ha.compute()); let ha = format!("{:x}", ha.compute());
let mut correct_response = None; let mut correct_response = None;
if let Some(qop) = user_vals.get(b"qop".as_ref()) { if let Some(qop) = digest_map.get(b"qop".as_ref()) {
if qop == &b"auth".as_ref() || qop == &b"auth-int".as_ref() { if qop == &b"auth".as_ref() || qop == &b"auth-int".as_ref() {
correct_response = Some({ correct_response = Some({
let mut c = Context::new(); let mut c = Context::new();
c.consume(&auth_pass); c.consume(auth_pass);
c.consume(b":"); c.consume(b":");
c.consume(nonce); c.consume(nonce);
c.consume(b":"); c.consume(b":");
if let Some(nc) = user_vals.get(b"nc".as_ref()) { if let Some(nc) = digest_map.get(b"nc".as_ref()) {
c.consume(nc); c.consume(nc);
} }
c.consume(b":"); c.consume(b":");
if let Some(cnonce) = user_vals.get(b"cnonce".as_ref()) { if let Some(cnonce) = digest_map.get(b"cnonce".as_ref()) {
c.consume(cnonce); c.consume(cnonce);
} }
c.consume(b":"); c.consume(b":");
@@ -278,7 +308,7 @@ impl AuthMethod {
Some(r) => r, Some(r) => r,
None => { None => {
let mut c = Context::new(); let mut c = Context::new();
c.consume(&auth_pass); c.consume(auth_pass);
c.consume(b":"); c.consume(b":");
c.consume(nonce); c.consume(nonce);
c.consume(b":"); c.consume(b":");
@@ -299,16 +329,16 @@ impl AuthMethod {
/// Check if a nonce is still valid. /// Check if a nonce is still valid.
/// Return an error if it was never valid /// Return an error if it was never valid
fn validate_nonce(nonce: &[u8]) -> Result<bool, ()> { fn validate_nonce(nonce: &[u8]) -> Result<bool> {
if nonce.len() != 34 { if nonce.len() != 34 {
return Err(()); bail!("invalid nonce");
} }
//parse hex //parse hex
if let Ok(n) = std::str::from_utf8(nonce) { if let Ok(n) = std::str::from_utf8(nonce) {
//get time //get time
if let Ok(secs_nonce) = u32::from_str_radix(&n[..8], 16) { if let Ok(secs_nonce) = u32::from_str_radix(&n[..8], 16) {
//check time //check time
let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap(); let now = unix_now()?;
let secs_now = now.as_secs() as u32; let secs_now = now.as_secs() as u32;
if let Some(dur) = secs_now.checked_sub(secs_nonce) { if let Some(dur) = secs_now.checked_sub(secs_nonce) {
@@ -317,13 +347,12 @@ fn validate_nonce(nonce: &[u8]) -> Result<bool, ()> {
h.consume(secs_nonce.to_be_bytes()); h.consume(secs_nonce.to_be_bytes());
let h = format!("{:x}", h.compute()); let h = format!("{:x}", h.compute());
if h[..26] == n[8..34] { if h[..26] == n[8..34] {
return Ok(dur < 300); // from the last 5min return Ok(dur < DIGEST_AUTH_TIMEOUT);
//Authentication-Info ?
} }
} }
} }
} }
Err(()) bail!("invalid nonce");
} }
fn strip_prefix<'a>(search: &'a [u8], prefix: &[u8]) -> Option<&'a [u8]> { fn strip_prefix<'a>(search: &'a [u8], prefix: &[u8]) -> Option<&'a [u8]> {
@@ -340,12 +369,12 @@ fn strip_prefix<'a>(search: &'a [u8], prefix: &[u8]) -> Option<&'a [u8]> {
fn to_headermap(header: &[u8]) -> Result<HashMap<&[u8], &[u8]>, ()> { fn to_headermap(header: &[u8]) -> Result<HashMap<&[u8], &[u8]>, ()> {
let mut sep = Vec::new(); let mut sep = Vec::new();
let mut asign = Vec::new(); let mut assign = Vec::new();
let mut i: usize = 0; let mut i: usize = 0;
let mut esc = false; let mut esc = false;
for c in header { for c in header {
match (c, esc) { match (c, esc) {
(b'=', false) => asign.push(i), (b'=', false) => assign.push(i),
(b',', false) => sep.push(i), (b',', false) => sep.push(i),
(b'"', false) => esc = true, (b'"', false) => esc = true,
(b'"', true) => esc = false, (b'"', true) => esc = false,
@@ -353,16 +382,16 @@ fn to_headermap(header: &[u8]) -> Result<HashMap<&[u8], &[u8]>, ()> {
} }
i += 1; i += 1;
} }
sep.push(i); // same len for both Vecs sep.push(i);
i = 0; i = 0;
let mut ret = HashMap::new(); let mut ret = HashMap::new();
for (&k, &a) in sep.iter().zip(asign.iter()) { for (&k, &a) in sep.iter().zip(assign.iter()) {
while header[i] == b' ' { while header[i] == b' ' {
i += 1; i += 1;
} }
if a <= i || k <= 1 + a { if a <= i || k <= 1 + a {
//keys and vals must contain one char //keys and values must contain one char
return Err(()); return Err(());
} }
let key = &header[i..a]; let key = &header[i..a];
@@ -379,12 +408,12 @@ fn to_headermap(header: &[u8]) -> Result<HashMap<&[u8], &[u8]>, ()> {
Ok(ret) Ok(ret)
} }
fn create_nonce() -> String { fn create_nonce() -> Result<String> {
let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap(); let now = unix_now()?;
let secs = now.as_secs() as u32; let secs = now.as_secs() as u32;
let mut h = NONCESTARTHASH.clone(); let mut h = NONCESTARTHASH.clone();
h.consume(secs.to_be_bytes()); h.consume(secs.to_be_bytes());
let n = format!("{:08x}{:032x}", secs, h.compute()); let n = format!("{:08x}{:032x}", secs, h.compute());
n[..34].to_string() Ok(n[..34].to_string())
} }

99
src/log_http.rs Normal file
View File

@@ -0,0 +1,99 @@
use std::{collections::HashMap, str::FromStr, sync::Arc};
use crate::{args::Args, server::Request};
pub const DEFAULT_LOG_FORMAT: &str = r#"$remote_addr "$request" $status"#;
#[derive(Debug)]
pub struct LogHttp {
elements: Vec<LogElement>,
}
#[derive(Debug)]
enum LogElement {
Variable(String),
Header(String),
Literal(String),
}
impl LogHttp {
pub fn data(&self, req: &Request, args: &Arc<Args>) -> HashMap<String, String> {
let mut data = HashMap::default();
for element in self.elements.iter() {
match element {
LogElement::Variable(name) => match name.as_str() {
"request" => {
data.insert(name.to_string(), format!("{} {}", req.method(), req.uri()));
}
"remote_user" => {
if let Some(user) = req
.headers()
.get("authorization")
.and_then(|v| args.auth_method.get_user(v))
{
data.insert(name.to_string(), user);
}
}
_ => {}
},
LogElement::Header(name) => {
if let Some(value) = req.headers().get(name).and_then(|v| v.to_str().ok()) {
data.insert(name.to_string(), value.to_string());
}
}
LogElement::Literal(_) => {}
}
}
data
}
pub fn log(&self, data: &HashMap<String, String>, err: Option<String>) {
if self.elements.is_empty() {
return;
}
let mut output = String::new();
for element in self.elements.iter() {
match element {
LogElement::Literal(value) => output.push_str(value.as_str()),
LogElement::Header(name) | LogElement::Variable(name) => {
output.push_str(data.get(name).map(|v| v.as_str()).unwrap_or("-"))
}
}
}
match err {
Some(err) => error!("{} {}", output, err),
None => info!("{}", output),
}
}
}
impl FromStr for LogHttp {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut elements = vec![];
let mut is_var = false;
let mut cache = String::new();
for c in format!("{s} ").chars() {
if c == '$' {
if !cache.is_empty() {
elements.push(LogElement::Literal(cache.to_string()));
}
cache.clear();
is_var = true;
} else if is_var && !(c.is_alphanumeric() || c == '_') {
if let Some(value) = cache.strip_prefix("$http_") {
elements.push(LogElement::Header(value.replace('_', "-").to_string()));
} else if let Some(value) = cache.strip_prefix('$') {
elements.push(LogElement::Variable(value.to_string()));
}
cache.clear();
is_var = false;
}
cache.push(c);
}
let cache = cache.trim();
if !cache.is_empty() {
elements.push(LogElement::Literal(cache.to_string()));
}
Ok(Self { elements })
}
}

30
src/logger.rs Normal file
View File

@@ -0,0 +1,30 @@
use chrono::{Local, SecondsFormat};
use log::{Level, Metadata, Record};
use log::{LevelFilter, SetLoggerError};
struct SimpleLogger;
impl log::Log for SimpleLogger {
fn enabled(&self, metadata: &Metadata) -> bool {
metadata.level() <= Level::Info
}
fn log(&self, record: &Record) {
if self.enabled(record.metadata()) {
let timestamp = Local::now().to_rfc3339_opts(SecondsFormat::Secs, true);
if record.level() < Level::Info {
eprintln!("{} {} - {}", timestamp, record.level(), record.args());
} else {
println!("{} {} - {}", timestamp, record.level(), record.args());
}
}
}
fn flush(&self) {}
}
static LOGGER: SimpleLogger = SimpleLogger;
pub fn init() -> Result<(), SetLoggerError> {
log::set_logger(&LOGGER).map(|()| log::set_max_level(LevelFilter::Info))
}

View File

@@ -1,50 +1,53 @@
mod args; mod args;
mod auth; mod auth;
mod log_http;
mod logger;
mod server; mod server;
mod streamer; mod streamer;
#[cfg(feature = "tls")]
mod tls; mod tls;
#[cfg(unix)]
mod unix;
mod utils; mod utils;
#[macro_use] #[macro_use]
extern crate log; extern crate log;
use crate::args::{matches, Args}; use crate::args::{build_cli, print_completions, Args};
use crate::server::{Request, Server}; use crate::server::{Request, Server};
#[cfg(feature = "tls")]
use crate::tls::{TlsAcceptor, TlsStream}; use crate::tls::{TlsAcceptor, TlsStream};
use std::io::Write; use anyhow::{anyhow, Context, Result};
use std::net::{IpAddr, SocketAddr, TcpListener as StdTcpListener}; use std::net::{IpAddr, SocketAddr, TcpListener as StdTcpListener};
use std::{env, sync::Arc}; use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use args::BindAddr;
use clap_complete::Shell;
use futures::future::join_all; use futures::future::join_all;
use tokio::net::TcpListener; use tokio::net::TcpListener;
use tokio::task::JoinHandle; use tokio::task::JoinHandle;
use hyper::server::conn::{AddrIncoming, AddrStream}; use hyper::server::conn::{AddrIncoming, AddrStream};
use hyper::service::{make_service_fn, service_fn}; use hyper::service::{make_service_fn, service_fn};
#[cfg(feature = "tls")]
use rustls::ServerConfig; use rustls::ServerConfig;
pub type BoxResult<T> = Result<T, Box<dyn std::error::Error>>;
#[tokio::main] #[tokio::main]
async fn main() { async fn main() -> Result<()> {
run().await.unwrap_or_else(handle_err) logger::init().map_err(|e| anyhow!("Failed to init logger, {e}"))?;
} let cmd = build_cli();
let matches = cmd.get_matches();
async fn run() -> BoxResult<()> { if let Some(generator) = matches.get_one::<Shell>("completions") {
if env::var("RUST_LOG").is_err() { let mut cmd = build_cli();
env::set_var("RUST_LOG", "info") print_completions(*generator, &mut cmd);
return Ok(());
} }
env_logger::builder() let args = Args::parse(matches)?;
.format(|buf, record| {
let timestamp = buf.timestamp_millis();
writeln!(buf, "[{} {}] {}", timestamp, record.level(), record.args())
})
.init();
let args = Args::parse(matches())?;
let args = Arc::new(args); let args = Arc::new(args);
let handles = serve(args.clone())?; let running = Arc::new(AtomicBool::new(true));
let handles = serve(args.clone(), running.clone())?;
print_listening(args)?; print_listening(args)?;
tokio::select! { tokio::select! {
@@ -57,20 +60,22 @@ async fn run() -> BoxResult<()> {
Ok(()) Ok(())
}, },
_ = shutdown_signal() => { _ = shutdown_signal() => {
running.store(false, Ordering::SeqCst);
Ok(()) Ok(())
}, },
} }
} }
fn serve(args: Arc<Args>) -> BoxResult<Vec<JoinHandle<Result<(), hyper::Error>>>> { fn serve(
let inner = Arc::new(Server::new(args.clone())); args: Arc<Args>,
running: Arc<AtomicBool>,
) -> Result<Vec<JoinHandle<Result<(), hyper::Error>>>> {
let inner = Arc::new(Server::init(args.clone(), running)?);
let mut handles = vec![]; let mut handles = vec![];
let port = args.port; let port = args.port;
for ip in args.addrs.iter() { for bind_addr in args.addrs.iter() {
let inner = inner.clone(); let inner = inner.clone();
let incoming = create_addr_incoming(SocketAddr::new(*ip, port)) let serve_func = move |remote_addr: Option<SocketAddr>| {
.map_err(|e| format!("Failed to bind `{}:{}`, {}", ip, port, e))?;
let serv_func = move |remote_addr: SocketAddr| {
let inner = inner.clone(); let inner = inner.clone();
async move { async move {
Ok::<_, hyper::Error>(service_fn(move |req: Request| { Ok::<_, hyper::Error>(service_fn(move |req: Request| {
@@ -79,35 +84,62 @@ fn serve(args: Arc<Args>) -> BoxResult<Vec<JoinHandle<Result<(), hyper::Error>>>
})) }))
} }
}; };
match args.tls.clone() { match bind_addr {
Some((certs, key)) => { BindAddr::Address(ip) => {
let config = ServerConfig::builder() let incoming = create_addr_incoming(SocketAddr::new(*ip, port))
.with_safe_defaults() .with_context(|| format!("Failed to bind `{ip}:{port}`"))?;
.with_no_client_auth() match args.tls.as_ref() {
.with_single_cert(certs, key)?; #[cfg(feature = "tls")]
let config = Arc::new(config); Some((certs, key)) => {
let accepter = TlsAcceptor::new(config.clone(), incoming); let config = ServerConfig::builder()
let new_service = make_service_fn(move |socket: &TlsStream| { .with_safe_defaults()
let remote_addr = socket.remote_addr(); .with_no_client_auth()
serv_func(remote_addr) .with_single_cert(certs.clone(), key.clone())?;
}); let config = Arc::new(config);
let server = tokio::spawn(hyper::Server::builder(accepter).serve(new_service)); let accepter = TlsAcceptor::new(config.clone(), incoming);
handles.push(server); let new_service = make_service_fn(move |socket: &TlsStream| {
let remote_addr = socket.remote_addr();
serve_func(Some(remote_addr))
});
let server =
tokio::spawn(hyper::Server::builder(accepter).serve(new_service));
handles.push(server);
}
#[cfg(not(feature = "tls"))]
Some(_) => {
unreachable!()
}
None => {
let new_service = make_service_fn(move |socket: &AddrStream| {
let remote_addr = socket.remote_addr();
serve_func(Some(remote_addr))
});
let server =
tokio::spawn(hyper::Server::builder(incoming).serve(new_service));
handles.push(server);
}
};
} }
None => { BindAddr::Path(path) => {
let new_service = make_service_fn(move |socket: &AddrStream| { if path.exists() {
let remote_addr = socket.remote_addr(); std::fs::remove_file(path)?;
serv_func(remote_addr) }
}); #[cfg(unix)]
let server = tokio::spawn(hyper::Server::builder(incoming).serve(new_service)); {
handles.push(server); let listener = tokio::net::UnixListener::bind(path)
.with_context(|| format!("Failed to bind `{}`", path.display()))?;
let acceptor = unix::UnixAcceptor::from_listener(listener);
let new_service = make_service_fn(move |_| serve_func(None));
let server = tokio::spawn(hyper::Server::builder(acceptor).serve(new_service));
handles.push(server);
}
} }
}; }
} }
Ok(handles) Ok(handles)
} }
fn create_addr_incoming(addr: SocketAddr) -> BoxResult<AddrIncoming> { fn create_addr_incoming(addr: SocketAddr) -> Result<AddrIncoming> {
use socket2::{Domain, Protocol, Socket, Type}; use socket2::{Domain, Protocol, Socket, Type};
let socket = Socket::new(Domain::for_address(addr), Type::STREAM, Some(Protocol::TCP))?; let socket = Socket::new(Domain::for_address(addr), Type::STREAM, Some(Protocol::TCP))?;
if addr.is_ipv6() { if addr.is_ipv6() {
@@ -122,45 +154,52 @@ fn create_addr_incoming(addr: SocketAddr) -> BoxResult<AddrIncoming> {
Ok(incoming) Ok(incoming)
} }
fn print_listening(args: Arc<Args>) -> BoxResult<()> { fn print_listening(args: Arc<Args>) -> Result<()> {
let mut addrs = vec![]; let mut bind_addrs = vec![];
let (mut ipv4, mut ipv6) = (false, false); let (mut ipv4, mut ipv6) = (false, false);
for ip in args.addrs.iter() { for bind_addr in args.addrs.iter() {
if ip.is_unspecified() { match bind_addr {
if ip.is_ipv6() { BindAddr::Address(ip) => {
ipv6 = true; if ip.is_unspecified() {
} else { if ip.is_ipv6() {
ipv4 = true; ipv6 = true;
} else {
ipv4 = true;
}
} else {
bind_addrs.push(bind_addr.clone());
}
} }
} else { _ => bind_addrs.push(bind_addr.clone()),
addrs.push(*ip);
} }
} }
if ipv4 || ipv6 { if ipv4 || ipv6 {
let ifaces = get_if_addrs::get_if_addrs() let ifaces =
.map_err(|e| format!("Failed to get local interface addresses: {}", e))?; if_addrs::get_if_addrs().with_context(|| "Failed to get local interface addresses")?;
for iface in ifaces.into_iter() { for iface in ifaces.into_iter() {
let local_ip = iface.ip(); let local_ip = iface.ip();
if ipv4 && local_ip.is_ipv4() { if ipv4 && local_ip.is_ipv4() {
addrs.push(local_ip) bind_addrs.push(BindAddr::Address(local_ip))
} }
if ipv6 && local_ip.is_ipv6() { if ipv6 && local_ip.is_ipv6() {
addrs.push(local_ip) bind_addrs.push(BindAddr::Address(local_ip))
} }
} }
} }
addrs.sort_unstable(); bind_addrs.sort_unstable();
let urls = addrs let urls = bind_addrs
.into_iter() .into_iter()
.map(|addr| match addr { .map(|bind_addr| match bind_addr {
IpAddr::V4(_) => format!("{}:{}", addr, args.port), BindAddr::Address(addr) => {
IpAddr::V6(_) => format!("[{}]:{}", addr, args.port), let addr = match addr {
IpAddr::V4(_) => format!("{}:{}", addr, args.port),
IpAddr::V6(_) => format!("[{}]:{}", addr, args.port),
};
let protocol = if args.tls.is_some() { "https" } else { "http" };
format!("{}://{}{}", protocol, addr, args.uri_prefix)
}
BindAddr::Path(path) => path.display().to_string(),
}) })
.map(|addr| match &args.tls {
Some(_) => format!("https://{}", addr),
None => format!("http://{}", addr),
})
.map(|url| format!("{}{}", url, args.uri_prefix))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if urls.len() == 1 { if urls.len() == 1 {
@@ -168,20 +207,15 @@ fn print_listening(args: Arc<Args>) -> BoxResult<()> {
} else { } else {
let info = urls let info = urls
.iter() .iter()
.map(|v| format!(" {}", v)) .map(|v| format!(" {v}"))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.join("\n"); .join("\n");
println!("Listening on:\n{}\n", info); println!("Listening on:\n{info}\n");
} }
Ok(()) Ok(())
} }
fn handle_err<T>(err: Box<dyn std::error::Error>) -> T {
eprintln!("error: {}", err);
std::process::exit(1);
}
async fn shutdown_signal() { async fn shutdown_signal() {
tokio::signal::ctrl_c() tokio::signal::ctrl_c()
.await .await

File diff suppressed because it is too large Load Diff

View File

@@ -1,3 +1,4 @@
use anyhow::{anyhow, bail, Context as AnyhowContext, Result};
use core::task::{Context, Poll}; use core::task::{Context, Poll};
use futures::ready; use futures::ready;
use hyper::server::accept::Accept; use hyper::server::accept::Accept;
@@ -5,6 +6,7 @@ use hyper::server::conn::{AddrIncoming, AddrStream};
use rustls::{Certificate, PrivateKey}; use rustls::{Certificate, PrivateKey};
use std::future::Future; use std::future::Future;
use std::net::SocketAddr; use std::net::SocketAddr;
use std::path::Path;
use std::pin::Pin; use std::pin::Pin;
use std::sync::Arc; use std::sync::Arc;
use std::{fs, io}; use std::{fs, io};
@@ -123,36 +125,37 @@ impl Accept for TlsAcceptor {
} }
// Load public certificate from file. // Load public certificate from file.
pub fn load_certs(filename: &str) -> Result<Vec<Certificate>, Box<dyn std::error::Error>> { pub fn load_certs<T: AsRef<Path>>(filename: T) -> Result<Vec<Certificate>> {
// Open certificate file. // Open certificate file.
let certfile = fs::File::open(&filename) let cert_file = fs::File::open(filename.as_ref())
.map_err(|e| format!("Failed to access `{}`, {}", &filename, e))?; .with_context(|| format!("Failed to access `{}`", filename.as_ref().display()))?;
let mut reader = io::BufReader::new(certfile); let mut reader = io::BufReader::new(cert_file);
// Load and return certificate. // Load and return certificate.
let certs = rustls_pemfile::certs(&mut reader).map_err(|_| "Failed to load certificate")?; let certs = rustls_pemfile::certs(&mut reader).with_context(|| "Failed to load certificate")?;
if certs.is_empty() { if certs.is_empty() {
return Err("No supported certificate in file".into()); bail!("No supported certificate in file");
} }
Ok(certs.into_iter().map(Certificate).collect()) Ok(certs.into_iter().map(Certificate).collect())
} }
// Load private key from file. // Load private key from file.
pub fn load_private_key(filename: &str) -> Result<PrivateKey, Box<dyn std::error::Error>> { pub fn load_private_key<T: AsRef<Path>>(filename: T) -> Result<PrivateKey> {
// Open keyfile. let key_file = fs::File::open(filename.as_ref())
let keyfile = fs::File::open(&filename) .with_context(|| format!("Failed to access `{}`", filename.as_ref().display()))?;
.map_err(|e| format!("Failed to access `{}`, {}", &filename, e))?; let mut reader = io::BufReader::new(key_file);
let mut reader = io::BufReader::new(keyfile);
// Load and return a single private key. // Load and return a single private key.
let keys = rustls_pemfile::read_all(&mut reader) let keys = rustls_pemfile::read_all(&mut reader)
.map_err(|e| format!("There was a problem with reading private key: {:?}", e))? .with_context(|| "There was a problem with reading private key")?
.into_iter() .into_iter()
.find_map(|item| match item { .find_map(|item| match item {
rustls_pemfile::Item::RSAKey(key) | rustls_pemfile::Item::PKCS8Key(key) => Some(key), rustls_pemfile::Item::RSAKey(key)
| rustls_pemfile::Item::PKCS8Key(key)
| rustls_pemfile::Item::ECKey(key) => Some(key),
_ => None, _ => None,
}) })
.ok_or("No supported private key in file")?; .ok_or_else(|| anyhow!("No supported private key in file"))?;
Ok(PrivateKey(keys)) Ok(PrivateKey(keys))
} }

31
src/unix.rs Normal file
View File

@@ -0,0 +1,31 @@
use hyper::server::accept::Accept;
use tokio::net::UnixListener;
use std::pin::Pin;
use std::task::{Context, Poll};
pub struct UnixAcceptor {
inner: UnixListener,
}
impl UnixAcceptor {
pub fn from_listener(listener: UnixListener) -> Self {
Self { inner: listener }
}
}
impl Accept for UnixAcceptor {
type Conn = tokio::net::UnixStream;
type Error = std::io::Error;
fn poll_accept(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Result<Self::Conn, Self::Error>>> {
match self.inner.poll_accept(cx) {
Poll::Pending => Poll::Pending,
Poll::Ready(Ok((socket, _addr))) => Poll::Ready(Some(Ok(socket))),
Poll::Ready(Err(err)) => Poll::Ready(Some(Err(err))),
}
}
}

View File

@@ -1,4 +1,15 @@
use std::borrow::Cow; use anyhow::{anyhow, Context, Result};
use std::{
borrow::Cow,
path::Path,
time::{Duration, SystemTime, UNIX_EPOCH},
};
pub fn unix_now() -> Result<Duration> {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.with_context(|| "Invalid system time")
}
pub fn encode_uri(v: &str) -> String { pub fn encode_uri(v: &str) -> String {
let parts: Vec<_> = v.split('/').map(urlencoding::encode).collect(); let parts: Vec<_> = v.split('/').map(urlencoding::encode).collect();
@@ -10,3 +21,47 @@ pub fn decode_uri(v: &str) -> Option<Cow<str>> {
.decode_utf8() .decode_utf8()
.ok() .ok()
} }
pub fn get_file_name(path: &Path) -> &str {
path.file_name()
.and_then(|v| v.to_str())
.unwrap_or_default()
}
pub fn try_get_file_name(path: &Path) -> Result<&str> {
path.file_name()
.and_then(|v| v.to_str())
.ok_or_else(|| anyhow!("Failed to get file name of `{}`", path.display()))
}
pub fn glob(pattern: &str, target: &str) -> bool {
let pat = match ::glob::Pattern::new(pattern) {
Ok(pat) => pat,
Err(_) => return false,
};
pat.matches(target)
}
#[test]
fn test_glob_key() {
assert!(glob("", ""));
assert!(glob(".*", ".git"));
assert!(glob("abc", "abc"));
assert!(glob("a*c", "abc"));
assert!(glob("a?c", "abc"));
assert!(glob("a*c", "abbc"));
assert!(glob("*c", "abc"));
assert!(glob("a*", "abc"));
assert!(glob("?c", "bc"));
assert!(glob("a?", "ab"));
assert!(!glob("abc", "adc"));
assert!(!glob("abc", "abcd"));
assert!(!glob("a?c", "abbc"));
assert!(!glob("*.log", "log"));
assert!(glob("*.abc-cba", "xyz.abc-cba"));
assert!(glob("*.abc-cba", "123.xyz.abc-cba"));
assert!(glob("*.log", ".log"));
assert!(glob("*.log", "a.log"));
assert!(glob("*/", "abc/"));
assert!(!glob("*/", "abc"));
}

View File

@@ -20,6 +20,13 @@ fn default_not_allow_delete(server: TestServer) -> Result<(), Error> {
Ok(()) Ok(())
} }
#[rstest]
fn default_not_allow_archive(server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?zip", server.url()))?;
assert_eq!(resp.status(), 404);
Ok(())
}
#[rstest] #[rstest]
fn default_not_exist_dir(server: TestServer) -> Result<(), Error> { fn default_not_exist_dir(server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}404/", server.url()))?; let resp = reqwest::blocking::get(format!("{}404/", server.url()))?;
@@ -64,10 +71,22 @@ fn allow_upload_delete_can_override(#[with(&["-A"])] server: TestServer) -> Resu
fn allow_search(#[with(&["--allow-search"])] server: TestServer) -> Result<(), Error> { fn allow_search(#[with(&["--allow-search"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "test.html"))?; let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "test.html"))?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let paths = utils::retrive_index_paths(&resp.text()?); let paths = utils::retrieve_index_paths(&resp.text()?);
assert!(!paths.is_empty()); assert!(!paths.is_empty());
for p in paths { for p in paths {
assert!(p.contains(&"test.html")); assert!(p.contains("test.html"));
} }
Ok(()) Ok(())
} }
#[rstest]
fn allow_archive(#[with(&["--allow-archive"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?zip", server.url()))?;
assert_eq!(resp.status(), 200);
assert_eq!(
resp.headers().get("content-type").unwrap(),
"application/zip"
);
assert!(resp.headers().contains_key("content-disposition"));
Ok(())
}

View File

@@ -1,16 +1,15 @@
//! Run file server with different args
mod fixtures; mod fixtures;
mod utils; mod utils;
use assert_cmd::prelude::*; use fixtures::{server, Error, TestServer};
use assert_fs::fixture::TempDir;
use fixtures::{port, server, tmpdir, wait_for_port, Error, TestServer};
use rstest::rstest; use rstest::rstest;
use std::process::{Command, Stdio};
#[rstest] #[rstest]
fn path_prefix_index(#[with(&["--path-prefix", "xyz"])] server: TestServer) -> Result<(), Error> { fn path_prefix_index(#[with(&["--path-prefix", "xyz"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}{}", server.url(), "xyz"))?; let resp = reqwest::blocking::get(format!("{}{}", server.url(), "xyz"))?;
assert_index_resp!(resp); assert_resp_paths!(resp);
Ok(()) Ok(())
} }
@@ -31,23 +30,3 @@ fn path_prefix_propfind(
assert!(text.contains("<D:href>/xyz/</D:href>")); assert!(text.contains("<D:href>/xyz/</D:href>"));
Ok(()) Ok(())
} }
#[rstest]
#[case("index.html")]
fn serve_single_file(tmpdir: TempDir, port: u16, #[case] file: &str) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")?
.env("RUST_LOG", "false")
.arg(tmpdir.path().join(file))
.arg("-p")
.arg(port.to_string())
.stdout(Stdio::piped())
.spawn()?;
wait_for_port(port);
let resp = reqwest::blocking::get(format!("http://localhost:{}/index.html", port))?;
assert_eq!(resp.text()?, "This is index.html");
child.kill()?;
Ok(())
}

122
tests/assets.rs Normal file
View File

@@ -0,0 +1,122 @@
mod fixtures;
mod utils;
use assert_cmd::prelude::*;
use assert_fs::fixture::TempDir;
use fixtures::{port, server, tmpdir, wait_for_port, Error, TestServer, DIR_ASSETS};
use rstest::rstest;
use std::process::{Command, Stdio};
#[rstest]
fn assets(server: TestServer) -> Result<(), Error> {
let ver = env!("CARGO_PKG_VERSION");
let resp = reqwest::blocking::get(server.url())?;
let index_js = format!("/__dufs_v{ver}_index.js");
let index_css = format!("/__dufs_v{ver}_index.css");
let favicon_ico = format!("/__dufs_v{ver}_favicon.ico");
let text = resp.text()?;
assert!(text.contains(&format!(r#"href="{index_css}""#)));
assert!(text.contains(&format!(r#"href="{favicon_ico}""#)));
assert!(text.contains(&format!(r#"src="{index_js}""#)));
Ok(())
}
#[rstest]
fn asset_js(server: TestServer) -> Result<(), Error> {
let url = format!(
"{}__dufs_v{}_index.js",
server.url(),
env!("CARGO_PKG_VERSION")
);
let resp = reqwest::blocking::get(url)?;
assert_eq!(resp.status(), 200);
assert_eq!(
resp.headers().get("content-type").unwrap(),
"application/javascript"
);
Ok(())
}
#[rstest]
fn asset_css(server: TestServer) -> Result<(), Error> {
let url = format!(
"{}__dufs_v{}_index.css",
server.url(),
env!("CARGO_PKG_VERSION")
);
let resp = reqwest::blocking::get(url)?;
assert_eq!(resp.status(), 200);
assert_eq!(resp.headers().get("content-type").unwrap(), "text/css");
Ok(())
}
#[rstest]
fn asset_ico(server: TestServer) -> Result<(), Error> {
let url = format!(
"{}__dufs_v{}_favicon.ico",
server.url(),
env!("CARGO_PKG_VERSION")
);
let resp = reqwest::blocking::get(url)?;
assert_eq!(resp.status(), 200);
assert_eq!(resp.headers().get("content-type").unwrap(), "image/x-icon");
Ok(())
}
#[rstest]
fn assets_with_prefix(#[with(&["--path-prefix", "xyz"])] server: TestServer) -> Result<(), Error> {
let ver = env!("CARGO_PKG_VERSION");
let resp = reqwest::blocking::get(format!("{}xyz/", server.url()))?;
let index_js = format!("/xyz/__dufs_v{ver}_index.js");
let index_css = format!("/xyz/__dufs_v{ver}_index.css");
let favicon_ico = format!("/xyz/__dufs_v{ver}_favicon.ico");
let text = resp.text()?;
assert!(text.contains(&format!(r#"href="{index_css}""#)));
assert!(text.contains(&format!(r#"href="{favicon_ico}""#)));
assert!(text.contains(&format!(r#"src="{index_js}""#)));
Ok(())
}
#[rstest]
fn asset_js_with_prefix(
#[with(&["--path-prefix", "xyz"])] server: TestServer,
) -> Result<(), Error> {
let url = format!(
"{}xyz/__dufs_v{}_index.js",
server.url(),
env!("CARGO_PKG_VERSION")
);
let resp = reqwest::blocking::get(url)?;
assert_eq!(resp.status(), 200);
assert_eq!(
resp.headers().get("content-type").unwrap(),
"application/javascript"
);
Ok(())
}
#[rstest]
fn assets_override(tmpdir: TempDir, port: u16) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")?
.arg(tmpdir.path())
.arg("-p")
.arg(port.to_string())
.arg("--assets")
.arg(tmpdir.join(DIR_ASSETS))
.stdout(Stdio::piped())
.spawn()?;
wait_for_port(port);
let url = format!("http://localhost:{port}");
let resp = reqwest::blocking::get(&url)?;
assert!(resp.text()?.starts_with(&format!(
"/__dufs_v{}_index.js;DATA",
env!("CARGO_PKG_VERSION")
)));
let resp = reqwest::blocking::get(&url)?;
assert_resp_paths!(resp);
child.kill()?;
Ok(())
}

View File

@@ -35,6 +35,30 @@ fn auth_skip(#[with(&["--auth", "/@user:pass@*"])] server: TestServer) -> Result
Ok(()) Ok(())
} }
#[rstest]
fn auth_skip_on_options_method(
#[with(&["--auth", "/@user:pass"])] server: TestServer,
) -> Result<(), Error> {
let url = format!("{}index.html", server.url());
let resp = fetch!(b"OPTIONS", &url).send()?;
assert_eq!(resp.status(), 200);
Ok(())
}
#[rstest]
fn auth_check(
#[with(&["--auth", "/@user:pass@user2:pass2", "-A"])] server: TestServer,
) -> Result<(), Error> {
let url = format!("{}index.html", server.url());
let resp = fetch!(b"WRITEABLE", &url).send()?;
assert_eq!(resp.status(), 401);
let resp = fetch!(b"WRITEABLE", &url).send_with_digest_auth("user2", "pass2")?;
assert_eq!(resp.status(), 401);
let resp = fetch!(b"WRITEABLE", &url).send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 200);
Ok(())
}
#[rstest] #[rstest]
fn auth_readonly( fn auth_readonly(
#[with(&["--auth", "/@user:pass@user2:pass2", "-A"])] server: TestServer, #[with(&["--auth", "/@user:pass@user2:pass2", "-A"])] server: TestServer,
@@ -54,10 +78,10 @@ fn auth_readonly(
#[rstest] #[rstest]
fn auth_nest( fn auth_nest(
#[with(&["--auth", "/@user:pass@user2:pass2", "--auth", "/dira@user3:pass3", "-A"])] #[with(&["--auth", "/@user:pass@user2:pass2", "--auth", "/dir1@user3:pass3", "-A"])]
server: TestServer, server: TestServer,
) -> Result<(), Error> { ) -> Result<(), Error> {
let url = format!("{}dira/file1", server.url()); let url = format!("{}dir1/file1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?; let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
let resp = fetch!(b"PUT", &url) let resp = fetch!(b"PUT", &url)
@@ -73,7 +97,7 @@ fn auth_nest(
#[rstest] #[rstest]
fn auth_nest_share( fn auth_nest_share(
#[with(&["--auth", "/@user:pass@*", "--auth", "/dira@user3:pass3", "-A"])] server: TestServer, #[with(&["--auth", "/@user:pass@*", "--auth", "/dir1@user3:pass3", "-A"])] server: TestServer,
) -> Result<(), Error> { ) -> Result<(), Error> {
let url = format!("{}index.html", server.url()); let url = format!("{}index.html", server.url());
let resp = fetch!(b"GET", &url).send()?; let resp = fetch!(b"GET", &url).send()?;
@@ -82,16 +106,58 @@ fn auth_nest_share(
} }
#[rstest] #[rstest]
#[case(server(&["--auth", "/@user:pass", "--auth-method", "basic", "-A"]), "user", "pass")]
#[case(server(&["--auth", "/@u1:p1", "--auth-method", "basic", "-A"]), "u1", "p1")]
fn auth_basic( fn auth_basic(
#[with(&["--auth", "/@user:pass", "--auth-method", "basic", "-A"])] server: TestServer, #[case] server: TestServer,
#[case] user: &str,
#[case] pass: &str,
) -> Result<(), Error> { ) -> Result<(), Error> {
let url = format!("{}file1", server.url()); let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?; let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
let resp = fetch!(b"PUT", &url) let resp = fetch!(b"PUT", &url)
.body(b"abc".to_vec()) .body(b"abc".to_vec())
.basic_auth("user", Some("pass")) .basic_auth(user, Some(pass))
.send()?; .send()?;
assert_eq!(resp.status(), 201); assert_eq!(resp.status(), 201);
Ok(()) Ok(())
} }
#[rstest]
fn auth_webdav_move(
#[with(&["--auth", "/@user:pass@*", "--auth", "/dir1@user3:pass3", "-A"])] server: TestServer,
) -> Result<(), Error> {
let origin_url = format!("{}dir1/test.html", server.url());
let new_url = format!("{}test2.html", server.url());
let resp = fetch!(b"MOVE", &origin_url)
.header("Destination", &new_url)
.send_with_digest_auth("user3", "pass3")?;
assert_eq!(resp.status(), 403);
Ok(())
}
#[rstest]
fn auth_webdav_copy(
#[with(&["--auth", "/@user:pass@*", "--auth", "/dir1@user3:pass3", "-A"])] server: TestServer,
) -> Result<(), Error> {
let origin_url = format!("{}dir1/test.html", server.url());
let new_url = format!("{}test2.html", server.url());
let resp = fetch!(b"COPY", &origin_url)
.header("Destination", &new_url)
.send_with_digest_auth("user3", "pass3")?;
assert_eq!(resp.status(), 403);
Ok(())
}
#[rstest]
fn auth_path_prefix(
#[with(&["--auth", "/@user:pass", "--path-prefix", "xyz", "-A"])] server: TestServer,
) -> Result<(), Error> {
let url = format!("{}xyz/index.html", server.url());
let resp = fetch!(b"GET", &url).send()?;
assert_eq!(resp.status(), 401);
let resp = fetch!(b"GET", &url).send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 200);
Ok(())
}

View File

@@ -6,14 +6,13 @@ use assert_cmd::prelude::*;
use assert_fs::fixture::TempDir; use assert_fs::fixture::TempDir;
use regex::Regex; use regex::Regex;
use rstest::rstest; use rstest::rstest;
use std::io::{BufRead, BufReader}; use std::io::Read;
use std::process::{Command, Stdio}; use std::process::{Command, Stdio};
#[rstest] #[rstest]
#[case(&["-b", "20.205.243.166"])] #[case(&["-b", "20.205.243.166"])]
fn bind_fails(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> { fn bind_fails(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> {
Command::cargo_bin("dufs")? Command::cargo_bin("dufs")?
.env("RUST_LOG", "false")
.arg(tmpdir.path()) .arg(tmpdir.path())
.arg("-p") .arg("-p")
.arg(port.to_string()) .arg(port.to_string())
@@ -51,7 +50,6 @@ fn bind_ipv4_ipv6(
#[case(&["--path-prefix", "/prefix"])] #[case(&["--path-prefix", "/prefix"])]
fn validate_printed_urls(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> { fn validate_printed_urls(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")? let mut child = Command::cargo_bin("dufs")?
.env("RUST_LOG", "false")
.arg(tmpdir.path()) .arg(tmpdir.path())
.arg("-p") .arg("-p")
.arg(port.to_string()) .arg(port.to_string())
@@ -61,22 +59,23 @@ fn validate_printed_urls(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> R
wait_for_port(port); wait_for_port(port);
// WARN assumes urls list is terminated by an empty line let stdout = child.stdout.as_mut().expect("Failed to get stdout");
let url_lines = BufReader::new(child.stdout.take().unwrap()) let mut buf = [0; 1000];
let buf_len = stdout.read(&mut buf)?;
let output = std::str::from_utf8(&buf[0..buf_len])?;
let url_lines = output
.lines() .lines()
.map(|line| line.expect("Error reading stdout"))
.take_while(|line| !line.is_empty()) /* non-empty lines */ .take_while(|line| !line.is_empty()) /* non-empty lines */
.collect::<Vec<_>>(); .collect::<Vec<_>>()
let url_lines = url_lines.join("\n"); .join("\n");
let urls = Regex::new(r"http://[a-zA-Z0-9\.\[\]:/]+") let urls = Regex::new(r"http://[a-zA-Z0-9\.\[\]:/]+")
.unwrap() .unwrap()
.captures_iter(url_lines.as_str()) .captures_iter(url_lines.as_str())
.map(|caps| caps.get(0).unwrap().as_str()) .filter_map(|caps| caps.get(0).map(|v| v.as_str()))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
assert!(!urls.is_empty()); assert!(!urls.is_empty());
for url in urls { for url in urls {
reqwest::blocking::get(url)?.error_for_status()?; reqwest::blocking::get(url)?.error_for_status()?;
} }

32
tests/cli.rs Normal file
View File

@@ -0,0 +1,32 @@
//! Run cli with different args, not starting a server
mod fixtures;
use assert_cmd::prelude::*;
use clap::ValueEnum;
use clap_complete::Shell;
use fixtures::Error;
use std::process::Command;
#[test]
/// Show help and exit.
fn help_shows() -> Result<(), Error> {
Command::cargo_bin("dufs")?.arg("-h").assert().success();
Ok(())
}
#[test]
/// Print completions and exit.
fn print_completions() -> Result<(), Error> {
// let shell_enums = EnumValueParser::<Shell>::new();
for shell in Shell::value_variants() {
Command::cargo_bin("dufs")?
.arg("--completions")
.arg(shell.to_string())
.assert()
.success();
}
Ok(())
}

View File

@@ -7,31 +7,27 @@ use rstest::rstest;
#[rstest] #[rstest]
fn cors(#[with(&["--enable-cors"])] server: TestServer) -> Result<(), Error> { fn cors(#[with(&["--enable-cors"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;
assert_eq!( assert_eq!(
resp.headers().get("access-control-allow-origin").unwrap(), resp.headers().get("access-control-allow-origin").unwrap(),
"*" "*"
); );
assert_eq!( assert_eq!(
resp.headers().get("access-control-allow-headers").unwrap(), resp.headers()
"range, content-type, accept, origin, www-authenticate" .get("access-control-allow-credentials")
.unwrap(),
"true"
); );
Ok(())
}
#[rstest]
fn cors_options(#[with(&["--enable-cors"])] server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"OPTIONS", server.url()).send()?;
assert_eq!( assert_eq!(
resp.headers().get("access-control-allow-origin").unwrap(), resp.headers().get("access-control-allow-methods").unwrap(),
"*" "GET,HEAD,PUT,OPTIONS,DELETE,PROPFIND,COPY,MOVE"
); );
assert_eq!( assert_eq!(
resp.headers().get("access-control-allow-headers").unwrap(), resp.headers().get("access-control-allow-headers").unwrap(),
"range, content-type, accept, origin, www-authenticate" "Authorization,Destination,Range,Content-Type"
);
assert_eq!(
resp.headers().get("access-control-expose-headers").unwrap(),
"WWW-Authenticate,Content-Range,Accept-Ranges,Content-Disposition"
); );
Ok(()) Ok(())
} }

11
tests/data/cert_ecdsa.pem Normal file
View File

@@ -0,0 +1,11 @@
-----BEGIN CERTIFICATE-----
MIIBfTCCASOgAwIBAgIUfrAUHXIfeM54OLnTIUD9xT6FIwkwCgYIKoZIzj0EAwIw
FDESMBAGA1UEAwwJbG9jYWxob3N0MB4XDTIyMDgwMjAxMjQ1NFoXDTMyMDczMDAx
MjQ1NFowFDESMBAGA1UEAwwJbG9jYWxob3N0MFkwEwYHKoZIzj0CAQYIKoZIzj0D
AQcDQgAEW4tBe0jF2wYSLCvdreb0izR/8sgKNKkbe4xPyA9uNEbtTk58eoO3944R
JPT6S5wRTHFpF0BJhQRfiuW4K2EUcaNTMFEwHQYDVR0OBBYEFEebUDkiMJoV2d5W
8o+6p4DauHFFMB8GA1UdIwQYMBaAFEebUDkiMJoV2d5W8o+6p4DauHFFMA8GA1Ud
EwEB/wQFMAMBAf8wCgYIKoZIzj0EAwIDSAAwRQIhAPJvmzqaq/S5yYxeB4se8k2z
6pnVNxrTT2CqdPD8Z+7rAiBZAyU+5+KbQq3aZsmuNUx+YOqTDMkaUR/nd/tjnnOX
gA==
-----END CERTIFICATE-----

View File

@@ -1,3 +1,5 @@
#!/usr/bin/env bash #!/usr/bin/env bash
openssl req -subj '/CN=localhost' -x509 -newkey rsa:4096 -keyout key_pkcs8.pem -out cert.pem -nodes -days 3650 openssl req -subj '/CN=localhost' -x509 -newkey rsa:4096 -keyout key_pkcs8.pem -out cert.pem -nodes -days 3650
openssl rsa -in key_pkcs8.pem -out key_pkcs1.pem openssl rsa -in key_pkcs8.pem -out key_pkcs1.pem
openssl ecparam -name prime256v1 -genkey -noout -out key_ecdsa.pem
openssl req -subj '/CN=localhost' -x509 -key key_ecdsa.pem -out cert_ecdsa.pem -nodes -days 3650

5
tests/data/key_ecdsa.pem Normal file
View File

@@ -0,0 +1,5 @@
-----BEGIN EC PRIVATE KEY-----
MHcCAQEEILOQ44lHqD4w12HJKlZJ+Y3u91eUKjabu3UKPSahhC89oAoGCCqGSM49
AwEHoUQDQgAEW4tBe0jF2wYSLCvdreb0izR/8sgKNKkbe4xPyA9uNEbtTk58eoO3
944RJPT6S5wRTHFpF0BJhQRfiuW4K2EUcQ==
-----END EC PRIVATE KEY-----

View File

@@ -1,25 +0,0 @@
mod fixtures;
mod utils;
use fixtures::{server, Error, TestServer};
use rstest::rstest;
#[rstest]
fn default_favicon(server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}favicon.ico", server.url()))?;
assert_eq!(resp.status(), 200);
assert_eq!(resp.headers().get("content-type").unwrap(), "image/x-icon");
Ok(())
}
#[rstest]
fn exist_favicon(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let url = format!("{}favicon.ico", server.url());
let data = b"abc";
let resp = fetch!(b"PUT", &url).body(data.to_vec()).send()?;
assert_eq!(resp.status(), 201);
let resp = reqwest::blocking::get(url)?;
assert_eq!(resp.status(), 200);
assert_eq!(resp.bytes()?, data.to_vec());
Ok(())
}

View File

@@ -11,39 +11,32 @@ use std::time::{Duration, Instant};
#[allow(dead_code)] #[allow(dead_code)]
pub type Error = Box<dyn std::error::Error>; pub type Error = Box<dyn std::error::Error>;
#[allow(dead_code)]
pub const BIN_FILE: &str = "😀.bin";
/// File names for testing purpose /// File names for testing purpose
#[allow(dead_code)] #[allow(dead_code)]
pub static FILES: &[&str] = &[ pub static FILES: &[&str] = &["test.txt", "test.html", "index.html", BIN_FILE];
"test.txt",
"test.html",
"index.html",
"test.mkv",
#[cfg(not(windows))]
"test \" \' & < >.csv",
"😀.data",
"⎙.mp4",
"#[]{}()@!$&'`+,;= %20.test",
#[cfg(unix)]
":?#[]{}<>()@!$&'`|*+,;= %20.test",
#[cfg(not(windows))]
"foo\\bar.test",
];
/// Directory names for testing diretory don't exist /// Directory names for testing directory don't exist
#[allow(dead_code)] #[allow(dead_code)]
pub static DIR_NO_FOUND: &str = "dir-no-found/"; pub static DIR_NO_FOUND: &str = "dir-no-found/";
/// Directory names for testing diretory don't have index.html /// Directory names for testing directory don't have index.html
#[allow(dead_code)] #[allow(dead_code)]
pub static DIR_NO_INDEX: &str = "dir-no-index/"; pub static DIR_NO_INDEX: &str = "dir-no-index/";
/// Directory names for testing hidden
#[allow(dead_code)]
pub static DIR_GIT: &str = ".git/";
/// Directory names for testings assets override
#[allow(dead_code)]
pub static DIR_ASSETS: &str = "dir-assets/";
/// Directory names for testing purpose /// Directory names for testing purpose
#[allow(dead_code)] #[allow(dead_code)]
pub static DIRECTORIES: &[&str] = &["dira/", "dirb/", "dirc/", DIR_NO_INDEX]; pub static DIRECTORIES: &[&str] = &["dir1/", "dir2/", "dir3/", DIR_NO_INDEX, DIR_GIT, DIR_ASSETS];
/// Name of a deeply nested file
#[allow(dead_code)]
pub static DEEPLY_NESTED_FILE: &str = "very/deeply/nested/test.rs";
/// Test fixture which creates a temporary directory with a few files and directories inside. /// Test fixture which creates a temporary directory with a few files and directories inside.
/// The directories also contain files. /// The directories also contain files.
@@ -52,27 +45,62 @@ pub static DEEPLY_NESTED_FILE: &str = "very/deeply/nested/test.rs";
pub fn tmpdir() -> TempDir { pub fn tmpdir() -> TempDir {
let tmpdir = assert_fs::TempDir::new().expect("Couldn't create a temp dir for tests"); let tmpdir = assert_fs::TempDir::new().expect("Couldn't create a temp dir for tests");
for file in FILES { for file in FILES {
tmpdir if *file == BIN_FILE {
.child(file) tmpdir.child(file).write_binary(b"bin\0\0123").unwrap();
.write_str(&format!("This is {}", file)) } else {
.expect("Couldn't write to file");
}
for directory in DIRECTORIES {
for file in FILES {
if *directory == DIR_NO_INDEX && *file == "index.html" {
continue;
}
tmpdir tmpdir
.child(format!("{}{}", directory, file)) .child(file)
.write_str(&format!("This is {}{}", directory, file)) .write_str(&format!("This is {file}"))
.expect("Couldn't write to file"); .unwrap();
} }
} }
for directory in DIRECTORIES {
if *directory == DIR_ASSETS {
tmpdir
.child(format!("{}{}", directory, "index.html"))
.write_str("__ASSERTS_PREFIX__index.js;DATA = __INDEX_DATA__")
.unwrap();
} else {
for file in FILES {
if *directory == DIR_NO_INDEX && *file == "index.html" {
continue;
}
if *file == BIN_FILE {
tmpdir
.child(format!("{directory}{file}"))
.write_binary(b"bin\0\0123")
.unwrap();
} else {
tmpdir
.child(format!("{directory}{file}"))
.write_str(&format!("This is {directory}{file}"))
.unwrap();
}
}
}
}
tmpdir.child("dir4/hidden").touch().unwrap();
tmpdir tmpdir
.child(&DEEPLY_NESTED_FILE) .child("content-types/bin.tar")
.write_str("File in a deeply nested directory.") .write_binary(b"\x7f\x45\x4c\x46\x02\x01\x00\x00")
.expect("Couldn't write to file"); .unwrap();
tmpdir
.child("content-types/bin")
.write_binary(b"\x7f\x45\x4c\x46\x02\x01\x00\x00")
.unwrap();
tmpdir
.child("content-types/file-utf8.txt")
.write_str("世界")
.unwrap();
tmpdir
.child("content-types/file-gbk.txt")
.write_binary(b"\xca\xc0\xbd\xe7")
.unwrap();
tmpdir
.child("content-types/file")
.write_str("世界")
.unwrap();
tmpdir tmpdir
} }
@@ -96,7 +124,6 @@ where
let tmpdir = tmpdir(); let tmpdir = tmpdir();
let child = Command::cargo_bin("dufs") let child = Command::cargo_bin("dufs")
.expect("Couldn't find test binary") .expect("Couldn't find test binary")
.env("RUST_LOG", "false")
.arg(tmpdir.path()) .arg(tmpdir.path())
.arg("-p") .arg("-p")
.arg(port.to_string()) .arg(port.to_string())
@@ -112,44 +139,15 @@ where
TestServer::new(port, tmpdir, child, is_tls) TestServer::new(port, tmpdir, child, is_tls)
} }
/// Same as `server()` but ignore stderr
#[fixture]
#[allow(dead_code)]
pub fn server_no_stderr<I>(#[default(&[] as &[&str])] args: I) -> TestServer
where
I: IntoIterator + Clone,
I::Item: AsRef<std::ffi::OsStr>,
{
let port = port();
let tmpdir = tmpdir();
let child = Command::cargo_bin("dufs")
.expect("Couldn't find test binary")
.env("RUST_LOG", "false")
.arg(tmpdir.path())
.arg("-p")
.arg(port.to_string())
.args(args.clone())
.stdout(Stdio::null())
.stderr(Stdio::null())
.spawn()
.expect("Couldn't run test binary");
let is_tls = args
.into_iter()
.any(|x| x.as_ref().to_str().unwrap().contains("tls"));
wait_for_port(port);
TestServer::new(port, tmpdir, child, is_tls)
}
/// Wait a max of 1s for the port to become available. /// Wait a max of 1s for the port to become available.
pub fn wait_for_port(port: u16) { pub fn wait_for_port(port: u16) {
let start_wait = Instant::now(); let start_wait = Instant::now();
while !port_check::is_port_reachable(format!("localhost:{}", port)) { while !port_check::is_port_reachable(format!("localhost:{port}")) {
sleep(Duration::from_millis(100)); sleep(Duration::from_millis(100));
if start_wait.elapsed().as_secs() > 1 { if start_wait.elapsed().as_secs() > 1 {
panic!("timeout waiting for port {}", port); panic!("timeout waiting for port {port}");
} }
} }
} }

72
tests/hidden.rs Normal file
View File

@@ -0,0 +1,72 @@
mod fixtures;
mod utils;
use fixtures::{server, Error, TestServer};
use rstest::rstest;
#[rstest]
#[case(server(&[] as &[&str]), true)]
#[case(server(&["--hidden", ".git,index.html"]), false)]
fn hidden_get_dir(#[case] server: TestServer, #[case] exist: bool) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?;
assert_eq!(resp.status(), 200);
let paths = utils::retrieve_index_paths(&resp.text()?);
assert!(paths.contains("dir1/"));
assert_eq!(paths.contains(".git/"), exist);
assert_eq!(paths.contains("index.html"), exist);
Ok(())
}
#[rstest]
#[case(server(&[] as &[&str]), true)]
#[case(server(&["--hidden", "*.html"]), false)]
fn hidden_get_dir2(#[case] server: TestServer, #[case] exist: bool) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?;
assert_eq!(resp.status(), 200);
let paths = utils::retrieve_index_paths(&resp.text()?);
assert!(paths.contains("dir1/"));
assert_eq!(paths.contains("index.html"), exist);
assert_eq!(paths.contains("test.html"), exist);
Ok(())
}
#[rstest]
#[case(server(&[] as &[&str]), true)]
#[case(server(&["--hidden", ".git,index.html"]), false)]
fn hidden_propfind_dir(#[case] server: TestServer, #[case] exist: bool) -> Result<(), Error> {
let resp = fetch!(b"PROPFIND", server.url()).send()?;
assert_eq!(resp.status(), 207);
let body = resp.text()?;
assert!(body.contains("<D:href>/dir1/</D:href>"));
assert_eq!(body.contains("<D:href>/.git/</D:href>"), exist);
assert_eq!(body.contains("<D:href>/index.html</D:href>"), exist);
Ok(())
}
#[rstest]
#[case(server(&["--allow-search"] as &[&str]), true)]
#[case(server(&["--allow-search", "--hidden", ".git,test.html"]), false)]
fn hidden_search_dir(#[case] server: TestServer, #[case] exist: bool) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "test.html"))?;
assert_eq!(resp.status(), 200);
let paths = utils::retrieve_index_paths(&resp.text()?);
for p in paths {
assert_eq!(p.contains("test.html"), exist);
}
Ok(())
}
#[rstest]
#[case(server(&["--hidden", "hidden/"]), "dir4/", 1)]
#[case(server(&["--hidden", "hidden"]), "dir4/", 0)]
fn hidden_dir_noly(
#[case] server: TestServer,
#[case] dir: &str,
#[case] count: usize,
) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}{}", server.url(), dir))?;
assert_eq!(resp.status(), 200);
let paths = utils::retrieve_index_paths(&resp.text()?);
assert_eq!(paths.len(), count);
Ok(())
}

View File

@@ -1,13 +1,15 @@
mod fixtures; mod fixtures;
mod utils; mod utils;
use fixtures::{server, Error, TestServer}; use fixtures::{server, Error, TestServer, BIN_FILE};
use rstest::rstest; use rstest::rstest;
use serde_json::Value;
use utils::retrive_edit_file;
#[rstest] #[rstest]
fn get_dir(server: TestServer) -> Result<(), Error> { fn get_dir(server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;
assert_index_resp!(resp); assert_resp_paths!(resp);
Ok(()) Ok(())
} }
@@ -38,7 +40,7 @@ fn head_dir_404(server: TestServer) -> Result<(), Error> {
} }
#[rstest] #[rstest]
fn get_dir_zip(server: TestServer) -> Result<(), Error> { fn get_dir_zip(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?zip", server.url()))?; let resp = reqwest::blocking::get(format!("{}?zip", server.url()))?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
assert_eq!( assert_eq!(
@@ -50,7 +52,33 @@ fn get_dir_zip(server: TestServer) -> Result<(), Error> {
} }
#[rstest] #[rstest]
fn head_dir_zip(server: TestServer) -> Result<(), Error> { fn get_dir_json(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?json", server.url()))?;
assert_eq!(resp.status(), 200);
assert_eq!(
resp.headers().get("content-type").unwrap(),
"application/json"
);
let json: Value = serde_json::from_str(&resp.text().unwrap()).unwrap();
assert!(json["paths"].as_array().is_some());
Ok(())
}
#[rstest]
fn get_dir_simple(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?simple", server.url()))?;
assert_eq!(resp.status(), 200);
assert_eq!(
resp.headers().get("content-type").unwrap(),
"text/html; charset=utf-8"
);
let text = resp.text().unwrap();
assert!(text.split('\n').any(|v| v == "index.html"));
Ok(())
}
#[rstest]
fn head_dir_zip(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"HEAD", format!("{}?zip", server.url())).send()?; let resp = fetch!(b"HEAD", format!("{}?zip", server.url())).send()?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
assert_eq!( assert_eq!(
@@ -66,26 +94,35 @@ fn head_dir_zip(server: TestServer) -> Result<(), Error> {
fn get_dir_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> { fn get_dir_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "test.html"))?; let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "test.html"))?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let paths = utils::retrive_index_paths(&resp.text()?); let paths = utils::retrieve_index_paths(&resp.text()?);
assert!(!paths.is_empty()); assert!(!paths.is_empty());
for p in paths { for p in paths {
assert!(p.contains(&"test.html")); assert!(p.contains("test.html"));
} }
Ok(()) Ok(())
} }
#[rstest] #[rstest]
fn get_dir_search2(#[with(&["-A"])] server: TestServer) -> Result<(), Error> { fn get_dir_search2(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "😀.data"))?; let resp = reqwest::blocking::get(format!("{}?q={BIN_FILE}", server.url()))?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let paths = utils::retrive_index_paths(&resp.text()?); let paths = utils::retrieve_index_paths(&resp.text()?);
assert!(!paths.is_empty()); assert!(!paths.is_empty());
for p in paths { for p in paths {
assert!(p.contains(&"😀.data")); assert!(p.contains(BIN_FILE));
} }
Ok(()) Ok(())
} }
#[rstest]
fn get_dir_search3(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?q={}&simple", server.url(), "test.html"))?;
assert_eq!(resp.status(), 200);
let text = resp.text().unwrap();
assert!(text.split('\n').any(|v| v == "test.html"));
Ok(())
}
#[rstest] #[rstest]
fn head_dir_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> { fn head_dir_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"HEAD", format!("{}?q={}", server.url(), "test.html")).send()?; let resp = fetch!(b"HEAD", format!("{}?q={}", server.url(), "test.html")).send()?;
@@ -98,11 +135,23 @@ fn head_dir_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
Ok(()) Ok(())
} }
#[rstest]
fn empty_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?q=", server.url()))?;
assert_eq!(resp.status(), 200);
let paths = utils::retrieve_index_paths(&resp.text()?);
assert!(paths.is_empty());
Ok(())
}
#[rstest] #[rstest]
fn get_file(server: TestServer) -> Result<(), Error> { fn get_file(server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}index.html", server.url()))?; let resp = reqwest::blocking::get(format!("{}index.html", server.url()))?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
assert_eq!(resp.headers().get("content-type").unwrap(), "text/html"); assert_eq!(
resp.headers().get("content-type").unwrap(),
"text/html; charset=UTF-8"
);
assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes"); assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes");
assert!(resp.headers().contains_key("etag")); assert!(resp.headers().contains_key("etag"));
assert!(resp.headers().contains_key("last-modified")); assert!(resp.headers().contains_key("last-modified"));
@@ -115,7 +164,10 @@ fn get_file(server: TestServer) -> Result<(), Error> {
fn head_file(server: TestServer) -> Result<(), Error> { fn head_file(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"HEAD", format!("{}index.html", server.url())).send()?; let resp = fetch!(b"HEAD", format!("{}index.html", server.url())).send()?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
assert_eq!(resp.headers().get("content-type").unwrap(), "text/html"); assert_eq!(
resp.headers().get("content-type").unwrap(),
"text/html; charset=UTF-8"
);
assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes"); assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes");
assert!(resp.headers().contains_key("content-disposition")); assert!(resp.headers().contains_key("content-disposition"));
assert!(resp.headers().contains_key("etag")); assert!(resp.headers().contains_key("etag"));
@@ -132,6 +184,24 @@ fn get_file_404(server: TestServer) -> Result<(), Error> {
Ok(()) Ok(())
} }
#[rstest]
fn get_file_edit(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"GET", format!("{}index.html?edit", server.url())).send()?;
assert_eq!(resp.status(), 200);
let editable = retrive_edit_file(&resp.text().unwrap()).unwrap();
assert!(editable);
Ok(())
}
#[rstest]
fn get_file_edit_bin(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"GET", format!("{}{BIN_FILE}?edit", server.url())).send()?;
assert_eq!(resp.status(), 200);
let editable = retrive_edit_file(&resp.text().unwrap()).unwrap();
assert!(!editable);
Ok(())
}
#[rstest] #[rstest]
fn head_file_404(server: TestServer) -> Result<(), Error> { fn head_file_404(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"HEAD", format!("{}404", server.url())).send()?; let resp = fetch!(b"HEAD", format!("{}404", server.url())).send()?;
@@ -173,7 +243,7 @@ fn put_file_create_dir(#[with(&["-A"])] server: TestServer) -> Result<(), Error>
#[rstest] #[rstest]
fn put_file_conflict_dir(#[with(&["-A"])] server: TestServer) -> Result<(), Error> { fn put_file_conflict_dir(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let url = format!("{}dira", server.url()); let url = format!("{}dir1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?; let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 403); assert_eq!(resp.status(), 403);
Ok(()) Ok(())
@@ -195,3 +265,33 @@ fn delete_file_404(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
assert_eq!(resp.status(), 404); assert_eq!(resp.status(), 404);
Ok(()) Ok(())
} }
#[rstest]
fn get_file_content_type(server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}content-types/bin.tar", server.url()))?;
assert_eq!(
resp.headers().get("content-type").unwrap(),
"application/x-tar"
);
let resp = reqwest::blocking::get(format!("{}content-types/bin", server.url()))?;
assert_eq!(
resp.headers().get("content-type").unwrap(),
"application/octet-stream"
);
let resp = reqwest::blocking::get(format!("{}content-types/file-utf8.txt", server.url()))?;
assert_eq!(
resp.headers().get("content-type").unwrap(),
"text/plain; charset=UTF-8"
);
let resp = reqwest::blocking::get(format!("{}content-types/file-gbk.txt", server.url()))?;
assert_eq!(
resp.headers().get("content-type").unwrap(),
"text/plain; charset=GBK"
);
let resp = reqwest::blocking::get(format!("{}content-types/file", server.url()))?;
assert_eq!(
resp.headers().get("content-type").unwrap(),
"text/plain; charset=UTF-8"
);
Ok(())
}

78
tests/log_http.rs Normal file
View File

@@ -0,0 +1,78 @@
mod fixtures;
mod utils;
use diqwest::blocking::WithDigestAuth;
use fixtures::{port, tmpdir, wait_for_port, Error};
use assert_cmd::prelude::*;
use assert_fs::fixture::TempDir;
use rstest::rstest;
use std::io::Read;
use std::process::{Command, Stdio};
#[rstest]
#[case(&["-a", "/@user:pass", "--log-format", "$remote_user"], false)]
#[case(&["-a", "/@user:pass", "--log-format", "$remote_user", "--auth-method", "basic"], true)]
fn log_remote_user(
tmpdir: TempDir,
port: u16,
#[case] args: &[&str],
#[case] is_basic: bool,
) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")?
.arg(tmpdir.path())
.arg("-p")
.arg(port.to_string())
.args(args)
.stdout(Stdio::piped())
.spawn()?;
wait_for_port(port);
let stdout = child.stdout.as_mut().expect("Failed to get stdout");
let req = fetch!(b"GET", &format!("http://localhost:{port}"));
let resp = if is_basic {
req.basic_auth("user", Some("pass")).send()?
} else {
req.send_with_digest_auth("user", "pass")?
};
assert_eq!(resp.status(), 200);
let mut buf = [0; 1000];
let buf_len = stdout.read(&mut buf)?;
let output = std::str::from_utf8(&buf[0..buf_len])?;
assert!(output.lines().last().unwrap().ends_with("user"));
child.kill()?;
Ok(())
}
#[rstest]
#[case(&["--log-format", ""])]
fn no_log(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")?
.arg(tmpdir.path())
.arg("-p")
.arg(port.to_string())
.args(args)
.stdout(Stdio::piped())
.spawn()?;
wait_for_port(port);
let stdout = child.stdout.as_mut().expect("Failed to get stdout");
let resp = fetch!(b"GET", &format!("http://localhost:{port}")).send()?;
assert_eq!(resp.status(), 200);
let mut buf = [0; 1000];
let buf_len = stdout.read(&mut buf)?;
let output = std::str::from_utf8(&buf[0..buf_len])?;
assert_eq!(output.lines().last().unwrap(), "");
Ok(())
}

View File

@@ -1,7 +1,7 @@
mod fixtures; mod fixtures;
mod utils; mod utils;
use fixtures::{server, Error, TestServer, DIR_NO_FOUND, DIR_NO_INDEX}; use fixtures::{server, Error, TestServer, BIN_FILE, DIR_NO_FOUND, DIR_NO_INDEX, FILES};
use rstest::rstest; use rstest::rstest;
#[rstest] #[rstest]
@@ -30,17 +30,19 @@ fn render_try_index(#[with(&["--render-try-index"])] server: TestServer) -> Resu
#[rstest] #[rstest]
fn render_try_index2(#[with(&["--render-try-index"])] server: TestServer) -> Result<(), Error> { fn render_try_index2(#[with(&["--render-try-index"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}{}", server.url(), DIR_NO_INDEX))?; let resp = reqwest::blocking::get(format!("{}{}", server.url(), DIR_NO_INDEX))?;
let files: Vec<&str> = self::fixtures::FILES let files: Vec<&str> = FILES
.iter() .iter()
.filter(|v| **v != "index.html") .filter(|v| **v != "index.html")
.cloned() .cloned()
.collect(); .collect();
assert_index_resp!(resp, files); assert_resp_paths!(resp, files);
Ok(()) Ok(())
} }
#[rstest] #[rstest]
fn render_try_index3(#[with(&["--render-try-index"])] server: TestServer) -> Result<(), Error> { fn render_try_index3(
#[with(&["--render-try-index", "--allow-archive"])] server: TestServer,
) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}{}?zip", server.url(), DIR_NO_INDEX))?; let resp = reqwest::blocking::get(format!("{}{}?zip", server.url(), DIR_NO_INDEX))?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
assert_eq!( assert_eq!(
@@ -50,6 +52,17 @@ fn render_try_index3(#[with(&["--render-try-index"])] server: TestServer) -> Res
Ok(()) Ok(())
} }
#[rstest]
#[case(server(&["--render-try-index"] as &[&str]), false)]
#[case(server(&["--render-try-index", "--allow-search"] as &[&str]), true)]
fn render_try_index4(#[case] server: TestServer, #[case] searched: bool) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}{}?q={}", server.url(), DIR_NO_INDEX, BIN_FILE))?;
assert_eq!(resp.status(), 200);
let paths = utils::retrieve_index_paths(&resp.text()?);
assert_eq!(paths.iter().all(|v| v.contains(BIN_FILE)), searched);
Ok(())
}
#[rstest] #[rstest]
fn render_spa(#[with(&["--render-spa"])] server: TestServer) -> Result<(), Error> { fn render_spa(#[with(&["--render-spa"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;

60
tests/single_file.rs Normal file
View File

@@ -0,0 +1,60 @@
//! Run file server with different args
mod fixtures;
mod utils;
use assert_cmd::prelude::*;
use assert_fs::fixture::TempDir;
use fixtures::{port, tmpdir, wait_for_port, Error};
use rstest::rstest;
use std::process::{Command, Stdio};
#[rstest]
#[case("index.html")]
fn single_file(tmpdir: TempDir, port: u16, #[case] file: &str) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")?
.arg(tmpdir.path().join(file))
.arg("-p")
.arg(port.to_string())
.stdout(Stdio::piped())
.spawn()?;
wait_for_port(port);
let resp = reqwest::blocking::get(format!("http://localhost:{port}"))?;
assert_eq!(resp.text()?, "This is index.html");
let resp = reqwest::blocking::get(format!("http://localhost:{port}/"))?;
assert_eq!(resp.text()?, "This is index.html");
let resp = reqwest::blocking::get(format!("http://localhost:{port}/index.html"))?;
assert_eq!(resp.text()?, "This is index.html");
child.kill()?;
Ok(())
}
#[rstest]
#[case("index.html")]
fn path_prefix_single_file(tmpdir: TempDir, port: u16, #[case] file: &str) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")?
.arg(tmpdir.path().join(file))
.arg("-p")
.arg(port.to_string())
.arg("--path-prefix")
.arg("xyz")
.stdout(Stdio::piped())
.spawn()?;
wait_for_port(port);
let resp = reqwest::blocking::get(format!("http://localhost:{port}/xyz"))?;
assert_eq!(resp.text()?, "This is index.html");
let resp = reqwest::blocking::get(format!("http://localhost:{port}/xyz/"))?;
assert_eq!(resp.text()?, "This is index.html");
let resp = reqwest::blocking::get(format!("http://localhost:{port}/xyz/index.html"))?;
assert_eq!(resp.text()?, "This is index.html");
let resp = reqwest::blocking::get(format!("http://localhost:{port}"))?;
assert_eq!(resp.status(), 404);
child.kill()?;
Ok(())
}

29
tests/sort.rs Normal file
View File

@@ -0,0 +1,29 @@
mod fixtures;
mod utils;
use fixtures::{server, Error, TestServer};
use rstest::rstest;
#[rstest]
fn ls_dir_sort_by_name(server: TestServer) -> Result<(), Error> {
let url = server.url();
let resp = reqwest::blocking::get(format!("{url}?sort=name&order=asc"))?;
let paths1 = self::utils::retrieve_index_paths(&resp.text()?);
let resp = reqwest::blocking::get(format!("{url}?sort=name&order=desc"))?;
let mut paths2 = self::utils::retrieve_index_paths(&resp.text()?);
paths2.reverse();
assert_eq!(paths1, paths2);
Ok(())
}
#[rstest]
fn search_dir_sort_by_name(server: TestServer) -> Result<(), Error> {
let url = server.url();
let resp = reqwest::blocking::get(format!("{url}?q=test.html&sort=name&order=asc"))?;
let paths1 = self::utils::retrieve_index_paths(&resp.text()?);
let resp = reqwest::blocking::get(format!("{url}?q=test.html&sort=name&order=desc"))?;
let mut paths2 = self::utils::retrieve_index_paths(&resp.text()?);
paths2.reverse();
assert_eq!(paths1, paths2);
Ok(())
}

View File

@@ -20,9 +20,9 @@ fn default_not_allow_symlink(server: TestServer, tmpdir: TempDir) -> Result<(),
let resp = reqwest::blocking::get(format!("{}{}/index.html", server.url(), dir))?; let resp = reqwest::blocking::get(format!("{}{}/index.html", server.url(), dir))?;
assert_eq!(resp.status(), 404); assert_eq!(resp.status(), 404);
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;
let paths = utils::retrive_index_paths(&resp.text()?); let paths = utils::retrieve_index_paths(&resp.text()?);
assert!(!paths.is_empty()); assert!(!paths.is_empty());
assert!(!paths.contains(&format!("{}/", dir))); assert!(!paths.contains(&format!("{dir}/")));
Ok(()) Ok(())
} }
@@ -39,8 +39,8 @@ fn allow_symlink(
let resp = reqwest::blocking::get(format!("{}{}/index.html", server.url(), dir))?; let resp = reqwest::blocking::get(format!("{}{}/index.html", server.url(), dir))?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;
let paths = utils::retrive_index_paths(&resp.text()?); let paths = utils::retrieve_index_paths(&resp.text()?);
assert!(!paths.is_empty()); assert!(!paths.is_empty());
assert!(paths.contains(&format!("{}/", dir))); assert!(paths.contains(&format!("{dir}/")));
Ok(()) Ok(())
} }

View File

@@ -17,12 +17,16 @@ use rstest::rstest;
"--tls-cert", "tests/data/cert.pem", "--tls-cert", "tests/data/cert.pem",
"--tls-key", "tests/data/key_pkcs1.pem", "--tls-key", "tests/data/key_pkcs1.pem",
]))] ]))]
#[case(server(&[
"--tls-cert", "tests/data/cert_ecdsa.pem",
"--tls-key", "tests/data/key_ecdsa.pem",
]))]
fn tls_works(#[case] server: TestServer) -> Result<(), Error> { fn tls_works(#[case] server: TestServer) -> Result<(), Error> {
let client = ClientBuilder::new() let client = ClientBuilder::new()
.danger_accept_invalid_certs(true) .danger_accept_invalid_certs(true)
.build()?; .build()?;
let resp = client.get(server.url()).send()?.error_for_status()?; let resp = client.get(server.url()).send()?.error_for_status()?;
assert_index_resp!(resp); assert_resp_paths!(resp);
Ok(()) Ok(())
} }
@@ -30,10 +34,10 @@ fn tls_works(#[case] server: TestServer) -> Result<(), Error> {
#[rstest] #[rstest]
fn wrong_path_cert() -> Result<(), Error> { fn wrong_path_cert() -> Result<(), Error> {
Command::cargo_bin("dufs")? Command::cargo_bin("dufs")?
.args(&["--tls-cert", "wrong", "--tls-key", "tests/data/key.pem"]) .args(["--tls-cert", "wrong", "--tls-key", "tests/data/key.pem"])
.assert() .assert()
.failure() .failure()
.stderr(contains("error: Failed to access `wrong`")); .stderr(contains("Failed to access `wrong`"));
Ok(()) Ok(())
} }
@@ -42,10 +46,10 @@ fn wrong_path_cert() -> Result<(), Error> {
#[rstest] #[rstest]
fn wrong_path_key() -> Result<(), Error> { fn wrong_path_key() -> Result<(), Error> {
Command::cargo_bin("dufs")? Command::cargo_bin("dufs")?
.args(&["--tls-cert", "tests/data/cert.pem", "--tls-key", "wrong"]) .args(["--tls-cert", "tests/data/cert.pem", "--tls-key", "wrong"])
.assert() .assert()
.failure() .failure()
.stderr(contains("error: Failed to access `wrong`")); .stderr(contains("Failed to access `wrong`"));
Ok(()) Ok(())
} }

View File

@@ -1,15 +1,15 @@
use indexmap::IndexSet;
use serde_json::Value; use serde_json::Value;
use std::collections::HashSet;
#[macro_export] #[macro_export]
macro_rules! assert_index_resp { macro_rules! assert_resp_paths {
($resp:ident) => { ($resp:ident) => {
assert_index_resp!($resp, self::fixtures::FILES) assert_resp_paths!($resp, self::fixtures::FILES)
}; };
($resp:ident, $files:expr) => { ($resp:ident, $files:expr) => {
assert_eq!($resp.status(), 200); assert_eq!($resp.status(), 200);
let body = $resp.text()?; let body = $resp.text()?;
let paths = self::utils::retrive_index_paths(&body); let paths = self::utils::retrieve_index_paths(&body);
assert!(!paths.is_empty()); assert!(!paths.is_empty());
for file in $files { for file in $files {
assert!(paths.contains(&file.to_string())); assert!(paths.contains(&file.to_string()));
@@ -25,8 +25,32 @@ macro_rules! fetch {
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn retrive_index_paths(index: &str) -> HashSet<String> { pub fn retrieve_index_paths(content: &str) -> IndexSet<String> {
retrive_index_paths_impl(index).unwrap_or_default() let value = retrive_json(content).unwrap();
let paths = value
.get("paths")
.unwrap()
.as_array()
.unwrap()
.iter()
.flat_map(|v| {
let name = v.get("name")?.as_str()?;
let path_type = v.get("path_type")?.as_str()?;
if path_type.ends_with("Dir") {
Some(format!("{name}/"))
} else {
Some(name.to_owned())
}
})
.collect();
paths
}
#[allow(dead_code)]
pub fn retrive_edit_file(content: &str) -> Option<bool> {
let value = retrive_json(content)?;
let value = value.get("editable").unwrap();
Some(value.as_bool().unwrap())
} }
#[allow(dead_code)] #[allow(dead_code)]
@@ -35,27 +59,10 @@ pub fn encode_uri(v: &str) -> String {
parts.join("/") parts.join("/")
} }
fn retrive_index_paths_impl(index: &str) -> Option<HashSet<String>> { fn retrive_json(content: &str) -> Option<Value> {
let lines: Vec<&str> = index.lines().collect(); let lines: Vec<&str> = content.lines().collect();
let (i, _) = lines let line = lines.iter().find(|v| v.contains("DATA ="))?;
.iter() let line_col = line.find("DATA =").unwrap() + 6;
.enumerate() let value: Value = line[line_col..].parse().unwrap();
.find(|(_, v)| v.contains("const DATA"))?; Some(value)
let line = lines.get(i + 1)?;
let value: Value = line.parse().ok()?;
let paths = value
.get("paths")?
.as_array()?
.iter()
.flat_map(|v| {
let name = v.get("name")?.as_str()?;
let path_type = v.get("path_type")?.as_str()?;
if path_type.ends_with("Dir") {
Some(format!("{}/", name))
} else {
Some(name.to_owned())
}
})
.collect();
Some(paths)
} }

View File

@@ -7,13 +7,13 @@ use xml::escape::escape_str_pcdata;
#[rstest] #[rstest]
fn propfind_dir(server: TestServer) -> Result<(), Error> { fn propfind_dir(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"PROPFIND", format!("{}dira", server.url())).send()?; let resp = fetch!(b"PROPFIND", format!("{}dir1", server.url())).send()?;
assert_eq!(resp.status(), 207); assert_eq!(resp.status(), 207);
let body = resp.text()?; let body = resp.text()?;
assert!(body.contains("<D:href>/dira/</D:href>")); assert!(body.contains("<D:href>/dir1/</D:href>"));
assert!(body.contains("<D:displayname>dira</D:displayname>")); assert!(body.contains("<D:displayname>dir1</D:displayname>"));
for f in FILES { for f in FILES {
assert!(body.contains(&format!("<D:href>/dira/{}</D:href>", utils::encode_uri(f)))); assert!(body.contains(&format!("<D:href>/dir1/{}</D:href>", utils::encode_uri(f))));
assert!(body.contains(&format!( assert!(body.contains(&format!(
"<D:displayname>{}</D:displayname>", "<D:displayname>{}</D:displayname>",
escape_str_pcdata(f) escape_str_pcdata(f)
@@ -24,13 +24,13 @@ fn propfind_dir(server: TestServer) -> Result<(), Error> {
#[rstest] #[rstest]
fn propfind_dir_depth0(server: TestServer) -> Result<(), Error> { fn propfind_dir_depth0(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"PROPFIND", format!("{}dira", server.url())) let resp = fetch!(b"PROPFIND", format!("{}dir1", server.url()))
.header("depth", "0") .header("depth", "0")
.send()?; .send()?;
assert_eq!(resp.status(), 207); assert_eq!(resp.status(), 207);
let body = resp.text()?; let body = resp.text()?;
assert!(body.contains("<D:href>/dira/</D:href>")); assert!(body.contains("<D:href>/dir1/</D:href>"));
assert!(body.contains("<D:displayname>dira</D:displayname>")); assert!(body.contains("<D:displayname>dir1</D:displayname>"));
assert_eq!( assert_eq!(
body.lines() body.lines()
.filter(|v| *v == "<D:status>HTTP/1.1 200 OK</D:status>") .filter(|v| *v == "<D:status>HTTP/1.1 200 OK</D:status>")
@@ -47,6 +47,13 @@ fn propfind_404(server: TestServer) -> Result<(), Error> {
Ok(()) Ok(())
} }
#[rstest]
fn propfind_double_slash(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"PROPFIND", format!("{}/", server.url())).send()?;
assert_eq!(resp.status(), 207);
Ok(())
}
#[rstest] #[rstest]
fn propfind_file(server: TestServer) -> Result<(), Error> { fn propfind_file(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"PROPFIND", format!("{}test.html", server.url())).send()?; let resp = fetch!(b"PROPFIND", format!("{}test.html", server.url())).send()?;
@@ -93,6 +100,13 @@ fn mkcol_not_allow_upload(server: TestServer) -> Result<(), Error> {
Ok(()) Ok(())
} }
#[rstest]
fn mkcol_already_exists(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"MKCOL", format!("{}dir1", server.url())).send()?;
assert_eq!(resp.status(), 405);
Ok(())
}
#[rstest] #[rstest]
fn copy_file(#[with(&["-A"])] server: TestServer) -> Result<(), Error> { fn copy_file(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let new_url = format!("{}test2.html", server.url()); let new_url = format!("{}test2.html", server.url());