Compare commits

..

54 Commits

Author SHA1 Message Date
sigoden
cb1f3cddea chore(release): version v0.25.0 2022-07-07 07:51:51 +08:00
sigoden
05dbcfb2df feat: limit the number of concurrent uploads (#98) 2022-07-06 19:17:30 +08:00
sigoden
76e967fa59 feat: add completions (#97) 2022-07-06 12:11:00 +08:00
sigoden
140a360e37 chore: optimize move path default value 2022-07-05 09:16:21 +08:00
sigoden
604cbb7412 feat: check permission on move/copy destination (#93) 2022-07-04 23:25:05 +08:00
sigoden
c6541b1c36 feat: ui supports move folder/file to new path (#92) 2022-07-04 21:20:00 +08:00
sigoden
b6729a3d64 feat: ui supports creating folder (#91) 2022-07-04 20:12:35 +08:00
sigoden
4f1a35de5d chore(release): version v0.24.0 2022-07-03 06:47:49 +08:00
sigoden
2ffdcdf106 feat: allow search with --render-try-index (#88) 2022-07-02 23:25:57 +08:00
sigoden
1e0cdafbcf fix: unexpect stack overflow when searching a lot (#87) 2022-07-02 22:55:22 +08:00
sigoden
0a03941e05 chore: update deps 2022-07-02 11:48:47 +08:00
sigoden
07a7322748 chore: update readme 2022-07-01 21:37:56 +08:00
sigoden
936d08545b chore(release): version v0.23.1 2022-07-01 06:47:34 +08:00
sigoden
2e6af671ca fix: permissions of unzipped files (#84) 2022-06-30 19:29:47 +08:00
sigoden
583117c01f fix: safari layout and compatibility (#83) 2022-06-30 10:00:42 +08:00
sigoden
6e1df040b4 chore: update deps 2022-06-29 20:36:18 +08:00
sigoden
f5aa3354e1 chore: add github issule templates 2022-06-29 15:16:04 +08:00
sigoden
3ed0d885fe chore(release): version v0.23.0 2022-06-29 11:01:40 +08:00
sigoden
542e9a4ec5 chore: remove aarch64-linux-android platform 2022-06-29 10:58:43 +08:00
sigoden
5ee2c5504c ci: support more platforms (#76) 2022-06-29 10:51:59 +08:00
sigoden
fd02a53823 chore: replace old get-if-addrs with new if-addrs (#78) 2022-06-29 10:01:01 +08:00
sigoden
6554c1c308 feat: use feature to conditional support tls (#77) 2022-06-29 09:19:09 +08:00
sigoden
fe71600bd2 chore(release): version v0.22.0 2022-06-26 12:43:20 +08:00
sigoden
9cfeee0df0 chore: update args help message and readme 2022-06-25 09:58:39 +08:00
sigoden
eb7a536a3f feat: support hiding folders with --hidden (#73) 2022-06-25 08:15:16 +08:00
sigoden
c1c6dbc356 chore(release): version v0.21.0 2022-06-23 19:34:38 +08:00
sigoden
e29cf4c752 refactor: split css/js from index.html (#68) 2022-06-21 23:01:00 +08:00
sigoden
7f062b6705 feat: use custom logger with timestamp in rfc3339 (#67) 2022-06-21 21:19:51 +08:00
sigoden
ea8b9e9cce fix: escape name contains html escape code (#65) 2022-06-21 19:23:34 +08:00
sigoden
d2270be8fb chore: update changelog 2022-06-21 07:56:24 +08:00
sigoden
2d0dfed456 chore(release): version v0.20.0 2022-06-21 07:52:45 +08:00
sigoden
4058a2db72 feat: add option --allow-search (#62) 2022-06-21 07:23:20 +08:00
sigoden
069cb64889 fix: decodeURI searching string (#61) 2022-06-20 21:51:41 +08:00
sigoden
c85ea06785 chore: update cli help message and reamde 2022-06-20 19:40:09 +08:00
sigoden
68139c6263 chore: little improves 2022-06-20 15:11:39 +08:00
Joe Koop
deb6365a28 feat: added basic auth (#60)
* some small css fixes and changes

* added basic auth
https://stackoverflow.com/a/9534652/3642588

* most tests are passing

* fixed all the tests

* maybe now CI will pass

* implemented sigoden's suggestions

* test basic auth

* fixed some little things
2022-06-20 11:25:09 +08:00
sigoden
0d3acb8ae6 chore(release): version v0.19.0 2022-06-19 23:09:43 +08:00
sigoden
a67da8bdd3 feat: rename to dufs (#59)
close #50

BREAKING CHANGE: rename duf to dufs
2022-06-19 22:53:51 +08:00
sigoden
3260b52c47 chore: fix breadcrumb 2022-06-19 22:22:49 +08:00
sigoden
7194ebf248 chore: adjust ui 2022-06-19 22:16:43 +08:00
Joe Koop
b1b0fdd4db feat: reactive webpage (#51) 2022-06-19 22:04:59 +08:00
sigoden
db71f75236 feat: ui hidden root dirname (#58)
close #56
2022-06-19 21:23:19 +08:00
sigoden
e66951fd11 refactor: rename --cors to --enable-cors (#57)
BREAKING CHANGE: `--cors` rename to `--enable-cors`
2022-06-19 17:27:09 +08:00
sigoden
051ff8da2d chore: update readme 2022-06-19 15:30:42 +08:00
sigoden
c3ac2a21c9 feat: serve single file (#54)
close #53
2022-06-19 14:23:10 +08:00
sigoden
9c2e9d1503 feat: path level access control (#52)
BREAKING CHANGE: `--auth` is changed, `--no-auth-access` is removed
2022-06-19 11:26:03 +08:00
sigoden
9384cc8587 chore(release): version v0.18.0 2022-06-18 08:05:18 +08:00
sigoden
df48021757 chore: send not found text when 404 2022-06-18 08:03:48 +08:00
sigoden
af866aaaf4 chore: optimize --render-try-index 2022-06-17 19:05:25 +08:00
sigoden
68d238d34d feat: add slash to end of dir href 2022-06-17 19:01:17 +08:00
sigoden
a10150f2f8 chore: update readme 2022-06-17 10:59:19 +08:00
sigoden
5b11bb75dd feat: add option --render-try-index (#47)
close #46
2022-06-17 08:41:01 +08:00
sigoden
6d7da0363c chore(release): version v0.17.1 2022-06-16 10:31:47 +08:00
sigoden
d8f7335053 fix: range request (#44)
close #43
2022-06-16 10:24:32 +08:00
34 changed files with 2016 additions and 1126 deletions

17
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@@ -0,0 +1,17 @@
---
name: Bug report
about: Create a report to help us improve
---
**Problem**
<!-- A clear and concise description of what the bug is. -->
**Log**
If applicable, add logs to help explain your problem.
**Environment:**
- Dufs version:
- Browser/Webdav Info:
- OS Info:

View File

@@ -0,0 +1,16 @@
---
name: Feature Request
about: If you have any interesting advice, you can tell us.
---
## Specific Demand
<!--
What feature do you need, please describe it in detail.
-->
## Implement Suggestion
<!--
If you have any suggestion for complete this feature, you can tell us.
-->

6
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "cargo" # See documentation for possible values
directory: "/" # Location of package manifests
schedule:
interval: "monthly"

View File

@@ -7,33 +7,67 @@ on:
jobs: jobs:
release: release:
name: Publish to Github Reelases name: Publish to Github Relases
outputs: outputs:
rc: ${{ steps.check-tag.outputs.rc }} rc: ${{ steps.check-tag.outputs.rc }}
strategy: strategy:
matrix: matrix:
target:
- aarch64-unknown-linux-musl
- aarch64-apple-darwin
- x86_64-apple-darwin
- x86_64-pc-windows-msvc
- x86_64-unknown-linux-musl
include: include:
- target: aarch64-unknown-linux-musl - target: aarch64-unknown-linux-musl
os: ubuntu-latest os: ubuntu-latest
use-cross: true use-cross: true
cargo-flags: ""
- target: aarch64-apple-darwin - target: aarch64-apple-darwin
os: macos-latest os: macos-latest
use-cross: true use-cross: true
cargo-flags: ""
- target: aarch64-pc-windows-msvc
os: windows-latest
use-cross: true
cargo-flags: "--no-default-features"
- target: x86_64-apple-darwin - target: x86_64-apple-darwin
os: macos-latest os: macos-latest
cargo-flags: ""
- target: x86_64-pc-windows-msvc - target: x86_64-pc-windows-msvc
os: windows-latest os: windows-latest
cargo-flags: ""
- target: x86_64-unknown-linux-musl - target: x86_64-unknown-linux-musl
os: ubuntu-latest os: ubuntu-latest
use-cross: true use-cross: true
cargo-flags: ""
- target: i686-unknown-linux-musl
os: ubuntu-latest
use-cross: true
cargo-flags: ""
- target: i686-pc-windows-msvc
os: windows-latest
use-cross: true
cargo-flags: ""
- target: armv7-unknown-linux-musleabihf
os: ubuntu-latest
use-cross: true
cargo-flags: ""
- target: arm-unknown-linux-musleabihf
os: ubuntu-latest
use-cross: true
cargo-flags: ""
- target: mips-unknown-linux-musl
os: ubuntu-latest
use-cross: true
cargo-flags: "--no-default-features"
- target: mipsel-unknown-linux-musl
os: ubuntu-latest
use-cross: true
cargo-flags: "--no-default-features"
- target: mips64-unknown-linux-gnuabi64
os: ubuntu-latest
use-cross: true
cargo-flags: "--no-default-features"
- target: mips64el-unknown-linux-gnuabi64
os: ubuntu-latest
use-cross: true
cargo-flags: "--no-default-features"
runs-on: ${{matrix.os}} runs-on: ${{matrix.os}}
steps: steps:
@@ -60,13 +94,6 @@ jobs:
toolchain: stable toolchain: stable
profile: minimal # minimal component installation (ie, no documentation) profile: minimal # minimal component installation (ie, no documentation)
- name: Install prerequisites
shell: bash
run: |
case ${{ matrix.target }} in
aarch64-unknown-linux-musl) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;;
esac
- name: Show Version Information (Rust, cargo, GCC) - name: Show Version Information (Rust, cargo, GCC)
shell: bash shell: bash
run: | run: |
@@ -82,7 +109,7 @@ jobs:
with: with:
use-cross: ${{ matrix.use-cross }} use-cross: ${{ matrix.use-cross }}
command: build command: build
args: --locked --release --target=${{ matrix.target }} args: --locked --release --target=${{ matrix.target }} ${{ matrix.cargo-flags }}
- name: Build Archive - name: Build Archive
shell: bash shell: bash
@@ -133,6 +160,8 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: release needs: release
steps: steps:
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1 uses: docker/setup-buildx-action@v1
- name: Login to DockerHub - name: Login to DockerHub
@@ -141,9 +170,16 @@ jobs:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push - name: Build and push
id: docker_build
uses: docker/build-push-action@v2 uses: docker/build-push-action@v2
with: with:
build-args: |
REPO=${{ github.repository }}
VER=${{ github.ref_name }}
platforms: |
linux/amd64
linux/arm64
linux/386
linux/arm/v7
push: ${{ needs.release.outputs.rc == 'false' }} push: ${{ needs.release.outputs.rc == 'false' }}
tags: ${{ github.repository }}:latest, ${{ github.repository }}:${{ github.ref_name }} tags: ${{ github.repository }}:latest, ${{ github.repository }}:${{ github.ref_name }}

View File

@@ -2,61 +2,156 @@
All notable changes to this project will be documented in this file. All notable changes to this project will be documented in this file.
## [0.25.0] - 2022-07-06
### Features
- Ui supports creating folder ([#91](https://github.com/sigoden/dufs/issues/91))
- Ui supports move folder/file to new path ([#92](https://github.com/sigoden/dufs/issues/92))
- Check permission on move/copy destination ([#93](https://github.com/sigoden/dufs/issues/93))
- Add completions ([#97](https://github.com/sigoden/dufs/issues/97))
- Limit the number of concurrent uploads ([#98](https://github.com/sigoden/dufs/issues/98))
## [0.24.0] - 2022-07-02
### Bug Fixes
- Unexpect stack overflow when searching a lot ([#87](https://github.com/sigoden/dufs/issues/87))
### Features
- Allow search with --render-try-index ([#88](https://github.com/sigoden/dufs/issues/88))
## [0.23.1] - 2022-06-30
### Bug Fixes
- Safari layout and compatibility ([#83](https://github.com/sigoden/dufs/issues/83))
- Permissions of unzipped files ([#84](https://github.com/sigoden/dufs/issues/84))
## [0.23.0] - 2022-06-29
### Features
- Use feature to conditional support tls ([#77](https://github.com/sigoden/dufs/issues/77))
### Ci
- Support more platforms ([#76](https://github.com/sigoden/dufs/issues/76))
## [0.22.0] - 2022-06-26
### Features
- Support hiding folders with --hidden ([#73](https://github.com/sigoden/dufs/issues/73))
## [0.21.0] - 2022-06-23
### Bug Fixes
- Escape name contains html escape code ([#65](https://github.com/sigoden/dufs/issues/65))
### Features
- Use custom logger with timestamp in rfc3339 ([#67](https://github.com/sigoden/dufs/issues/67))
### Refactor
- Split css/js from index.html ([#68](https://github.com/sigoden/dufs/issues/68))
## [0.20.0] - 2022-06-20
### Bug Fixes
- DecodeURI searching string ([#61](https://github.com/sigoden/dufs/issues/61))
### Features
- Added basic auth ([#60](https://github.com/sigoden/dufs/issues/60))
- Add option --allow-search ([#62](https://github.com/sigoden/dufs/issues/62))
## [0.19.0] - 2022-06-19
### Features
- [**breaking**] Path level access control ([#52](https://github.com/sigoden/dufs/issues/52))
- Serve single file ([#54](https://github.com/sigoden/dufs/issues/54))
- Ui hidden root dirname ([#58](https://github.com/sigoden/dufs/issues/58))
- Reactive webpage ([#51](https://github.com/sigoden/dufs/issues/51))
- [**breaking**] Rename to dufs ([#59](https://github.com/sigoden/dufs/issues/59))
### Refactor
- [**breaking**] Rename --cors to --enable-cors ([#57](https://github.com/sigoden/dufs/issues/57))
## [0.18.0] - 2022-06-18
### Features
- Add option --render-try-index ([#47](https://github.com/sigoden/dufs/issues/47))
- Add slash to end of dir href
## [0.17.1] - 2022-06-16
### Bug Fixes
- Range request ([#44](https://github.com/sigoden/dufs/issues/44))
## [0.17.0] - 2022-06-15 ## [0.17.0] - 2022-06-15
### Bug Fixes ### Bug Fixes
- Webdav propfind dir with slash ([#42](https://github.com/sigoden/duf/issues/42)) - Webdav propfind dir with slash ([#42](https://github.com/sigoden/dufs/issues/42))
### Features ### Features
- Listen both ipv4 and ipv6 by default ([#40](https://github.com/sigoden/duf/issues/40)) - Listen both ipv4 and ipv6 by default ([#40](https://github.com/sigoden/dufs/issues/40))
### Refactor ### Refactor
- Trival changes ([#41](https://github.com/sigoden/duf/issues/41)) - Trival changes ([#41](https://github.com/sigoden/dufs/issues/41))
## [0.16.0] - 2022-06-12 ## [0.16.0] - 2022-06-12
### Features ### Features
- Implement head method ([#33](https://github.com/sigoden/duf/issues/33)) - Implement head method ([#33](https://github.com/sigoden/dufs/issues/33))
- Display upload speed and time left ([#34](https://github.com/sigoden/duf/issues/34)) - Display upload speed and time left ([#34](https://github.com/sigoden/dufs/issues/34))
- Support tls-key in pkcs#8 format ([#35](https://github.com/sigoden/duf/issues/35)) - Support tls-key in pkcs#8 format ([#35](https://github.com/sigoden/dufs/issues/35))
- Options method return status 200 - Options method return status 200
### Testing ### Testing
- Add integration tests ([#36](https://github.com/sigoden/duf/issues/36)) - Add integration tests ([#36](https://github.com/sigoden/dufs/issues/36))
## [0.15.1] - 2022-06-11 ## [0.15.1] - 2022-06-11
### Bug Fixes ### Bug Fixes
- Cannot upload ([#32](https://github.com/sigoden/duf/issues/32)) - Cannot upload ([#32](https://github.com/sigoden/dufs/issues/32))
## [0.15.0] - 2022-06-10 ## [0.15.0] - 2022-06-10
### Bug Fixes ### Bug Fixes
- Encode webdav href as uri ([#28](https://github.com/sigoden/duf/issues/28)) - Encode webdav href as uri ([#28](https://github.com/sigoden/dufs/issues/28))
- Query dir param - Query dir param
### Features ### Features
- Add basic dark theme ([#29](https://github.com/sigoden/duf/issues/29)) - Add basic dark theme ([#29](https://github.com/sigoden/dufs/issues/29))
- Add empty state placeholder to page([#30](https://github.com/sigoden/duf/issues/30)) - Add empty state placeholder to page([#30](https://github.com/sigoden/dufs/issues/30))
## [0.14.0] - 2022-06-07 ## [0.14.0] - 2022-06-07
### Bug Fixes ### Bug Fixes
- Send index page with content-type ([#26](https://github.com/sigoden/duf/issues/26)) - Send index page with content-type ([#26](https://github.com/sigoden/dufs/issues/26))
### Features ### Features
- Support ipv6 ([#25](https://github.com/sigoden/duf/issues/25)) - Support ipv6 ([#25](https://github.com/sigoden/dufs/issues/25))
- Add favicon ([#27](https://github.com/sigoden/duf/issues/27)) - Add favicon ([#27](https://github.com/sigoden/dufs/issues/27))
## [0.13.2] - 2022-06-06 ## [0.13.2] - 2022-06-06
@@ -69,11 +164,11 @@ All notable changes to this project will be documented in this file.
### Bug Fixes ### Bug Fixes
- Escape filename ([#21](https://github.com/sigoden/duf/issues/21)) - Escape filename ([#21](https://github.com/sigoden/dufs/issues/21))
### Refactor ### Refactor
- Use logger ([#22](https://github.com/sigoden/duf/issues/22)) - Use logger ([#22](https://github.com/sigoden/dufs/issues/22))
## [0.13.0] - 2022-06-05 ## [0.13.0] - 2022-06-05
@@ -83,16 +178,16 @@ All notable changes to this project will be documented in this file.
### Features ### Features
- Implement more webdav methods ([#13](https://github.com/sigoden/duf/issues/13)) - Implement more webdav methods ([#13](https://github.com/sigoden/dufs/issues/13))
- Use digest auth ([#14](https://github.com/sigoden/duf/issues/14)) - Use digest auth ([#14](https://github.com/sigoden/dufs/issues/14))
- Add webdav proppatch handler ([#18](https://github.com/sigoden/duf/issues/18)) - Add webdav proppatch handler ([#18](https://github.com/sigoden/dufs/issues/18))
## [0.12.1] - 2022-06-04 ## [0.12.1] - 2022-06-04
### Features ### Features
- Support webdav ([#10](https://github.com/sigoden/duf/issues/10)) - Support webdav ([#10](https://github.com/sigoden/dufs/issues/10))
- Remove unzip uploaded feature ([#11](https://github.com/sigoden/duf/issues/11)) - Remove unzip uploaded feature ([#11](https://github.com/sigoden/dufs/issues/11))
## [0.11.0] - 2022-06-03 ## [0.11.0] - 2022-06-03

569
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,42 +1,47 @@
[package] [package]
name = "duf" name = "dufs"
version = "0.17.0" version = "0.25.0"
edition = "2021" edition = "2021"
authors = ["sigoden <sigoden@gmail.com>"] authors = ["sigoden <sigoden@gmail.com>"]
description = "Duf is a simple file server." description = "Dufs is a distinctive utility file server"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
homepage = "https://github.com/sigoden/duf" homepage = "https://github.com/sigoden/dufs"
repository = "https://github.com/sigoden/duf" repository = "https://github.com/sigoden/dufs"
categories = ["command-line-utilities", "web-programming::http-server"] categories = ["command-line-utilities", "web-programming::http-server"]
keywords = ["static", "file", "server", "webdav", "cli"] keywords = ["static", "file", "server", "webdav", "cli"]
[dependencies] [dependencies]
clap = { version = "3", default-features = false, features = ["std"] } clap = { version = "3", default-features = false, features = ["std", "wrap_help"] }
clap_complete = "3"
chrono = "0.4" chrono = "0.4"
tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util", "signal"]} tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util", "signal"]}
tokio-rustls = "0.23" tokio-util = { version = "0.7", features = ["io-util"] }
tokio-util = { version = "0.7", features = ["codec", "io-util"] }
hyper = { version = "0.14", features = ["http1", "server", "tcp", "stream"] } hyper = { version = "0.14", features = ["http1", "server", "tcp", "stream"] }
percent-encoding = "2.1" percent-encoding = "2.1"
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
serde_json = "1" serde_json = "1"
futures = "0.3" futures = "0.3"
base64 = "0.13" base64 = "0.13"
async_zip = "0.0.7" async_zip = { version = "0.0.8", default-features = false, features = ["deflate"] }
async-walkdir = "0.2"
headers = "0.3" headers = "0.3"
mime_guess = "2.0" mime_guess = "2.0"
get_if_addrs = "0.5" if-addrs = "0.7"
rustls = { version = "0.20", default-features = false, features = ["tls12"] } rustls = { version = "0.20", default-features = false, features = ["tls12"], optional = true }
rustls-pemfile = "1" rustls-pemfile = { version = "1", optional = true }
tokio-rustls = { version = "0.23", optional = true }
md5 = "0.7" md5 = "0.7"
lazy_static = "1.4" lazy_static = "1.4"
uuid = { version = "1.1", features = ["v4", "fast-rng"] } uuid = { version = "1.1", features = ["v4", "fast-rng"] }
urlencoding = "2.1" urlencoding = "2.1"
xml-rs = "0.8" xml-rs = "0.8"
env_logger = { version = "0.9", default-features = false, features = ["humantime"] }
log = "0.4" log = "0.4"
socket2 = "0.4" socket2 = "0.4"
async-stream = "0.3"
walkdir = "2.3"
[features]
default = ["tls"]
tls = ["rustls", "rustls-pemfile", "tokio-rustls"]
[dev-dependencies] [dev-dependencies]
assert_cmd = "2" assert_cmd = "2"
@@ -44,7 +49,7 @@ reqwest = { version = "0.11", features = ["blocking", "multipart", "rustls-tls"]
assert_fs = "1" assert_fs = "1"
select = "0.5" select = "0.5"
port_check = "0.1" port_check = "0.1"
rstest = "0.13" rstest = "0.15"
regex = "1" regex = "1"
pretty_assertions = "1.2" pretty_assertions = "1.2"
url = "2" url = "2"

View File

@@ -1,10 +1,18 @@
FROM rust:1.61 as builder FROM alpine as builder
RUN rustup target add x86_64-unknown-linux-musl ARG REPO VER TARGETPLATFORM
RUN apt-get update && apt-get install --no-install-recommends -y musl-tools RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then \
WORKDIR /app TARGET="x86_64-unknown-linux-musl"; \
COPY . . elif [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
RUN cargo build --target x86_64-unknown-linux-musl --release TARGET="aarch64-unknown-linux-musl"; \
elif [ "$TARGETPLATFORM" = "linux/386" ]; then \
TARGET="i686-unknown-linux-musl"; \
elif [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then \
TARGET="armv7-unknown-linux-musleabihf"; \
fi && \
wget https://github.com/${REPO}/releases/download/${VER}/dufs-${VER}-${TARGET}.tar.gz && \
tar -xf dufs-${VER}-${TARGET}.tar.gz && \
mv dufs /bin/
FROM scratch FROM scratch
COPY --from=builder /app/target/x86_64-unknown-linux-musl/release/duf /bin/ COPY --from=builder /bin/dufs /bin/dufs
ENTRYPOINT ["/bin/duf"] ENTRYPOINT ["/bin/dufs"]

145
README.md
View File

@@ -1,11 +1,11 @@
# Duf # Dufs
[![CI](https://github.com/sigoden/duf/actions/workflows/ci.yaml/badge.svg)](https://github.com/sigoden/duf/actions/workflows/ci.yaml) [![CI](https://github.com/sigoden/dufs/actions/workflows/ci.yaml/badge.svg)](https://github.com/sigoden/dufs/actions/workflows/ci.yaml)
[![Crates](https://img.shields.io/crates/v/duf.svg)](https://crates.io/crates/duf) [![Crates](https://img.shields.io/crates/v/dufs.svg)](https://crates.io/crates/dufs)
Duf is a simple file server. Support static serve, search, upload, webdav... Dufs is a distinctive utility file server that supports static serving, uploading, searching, accessing control, webdav...
![demo](https://user-images.githubusercontent.com/4012553/171526189-09afc2de-793f-4216-b3d5-31ea408d3610.png) ![demo](https://user-images.githubusercontent.com/4012553/177549931-130383ef-0480-4911-b9c2-0d9534a624b7.png)
## Features ## Features
@@ -14,7 +14,7 @@ Duf is a simple file server. Support static serve, search, upload, webdav...
- Upload files and folders (Drag & Drop) - Upload files and folders (Drag & Drop)
- Search files - Search files
- Partial responses (Parallel/Resume download) - Partial responses (Parallel/Resume download)
- Authentication - Path level access control
- Support https - Support https
- Support webdav - Support webdav
- Easy to use with curl - Easy to use with curl
@@ -24,116 +24,173 @@ Duf is a simple file server. Support static serve, search, upload, webdav...
### With cargo ### With cargo
``` ```
cargo install duf cargo install dufs
``` ```
### With docker ### With docker
``` ```
docker run -v /tmp:/tmp -p 5000:5000 --rm -it docker.io/sigoden/duf /tmp docker run -v `pwd`:/data -p 5000:5000 --rm -it sigoden/dufs /data
``` ```
### Binaries on macOS, Linux, Windows ### Binaries on macOS, Linux, Windows
Download from [Github Releases](https://github.com/sigoden/duf/releases), unzip and add duf to your $PATH. Download from [Github Releases](https://github.com/sigoden/dufs/releases), unzip and add dufs to your $PATH.
## CLI ## CLI
``` ```
Duf is a simple file server. - https://github.com/sigoden/duf Dufs is a distinctive utility file server - https://github.com/sigoden/dufs
USAGE: USAGE:
duf [OPTIONS] [--] [path] dufs [OPTIONS] [--] [path]
ARGS: ARGS:
<path> Path to a root directory for serving files [default: .] <path> Specific path to serve [default: .]
OPTIONS: OPTIONS:
-a, --auth <user:pass> Use HTTP authentication -b, --bind <addr>... Specify bind address
--no-auth-access Not required auth when access static files
-A, --allow-all Allow all operations
--allow-delete Allow delete files/folders
--allow-symlink Allow symlink to files/folders outside root directory
--allow-upload Allow upload files/folders
-b, --bind <address>... Specify bind address
--cors Enable CORS, sets `Access-Control-Allow-Origin: *`
-h, --help Print help information
-p, --port <port> Specify port to listen on [default: 5000] -p, --port <port> Specify port to listen on [default: 5000]
--path-prefix <path> Specify an url path prefix --path-prefix <path> Specify an path prefix
--render-index Render index.html when requesting a directory --hidden <value> Hide directories from directory listings, separated by `,`
--render-spa Render for single-page application -a, --auth <rule>... Add auth for path
--auth-method <value> Select auth method [default: digest] [possible values: basic, digest]
-A, --allow-all Allow all operations
--allow-upload Allow upload files/folders
--allow-delete Allow delete files/folders
--allow-search Allow search files/folders
--allow-symlink Allow symlink to files/folders outside root directory
--enable-cors Enable CORS, sets `Access-Control-Allow-Origin: *`
--render-index Serve index.html when requesting a directory, returns 404 if not found index.html
--render-try-index Serve index.html when requesting a directory, returns directory listing if not found index.html
--render-spa Serve SPA(Single Page Application)
--completions <shell> Print shell completion script for <shell> [possible values: bash, elvish, fish, powershell, zsh]
--tls-cert <path> Path to an SSL/TLS certificate to serve with HTTPS --tls-cert <path> Path to an SSL/TLS certificate to serve with HTTPS
--tls-key <path> Path to the SSL/TLS certificate's private key --tls-key <path> Path to the SSL/TLS certificate's private key
-h, --help Print help information
-V, --version Print version information -V, --version Print version information
``` ```
## Examples ## Examples
You can run this command to start serving your current working directory on 127.0.0.1:5000 by default. Serve current working directory
``` ```
duf dufs
``` ```
...or specify which folder you want to serve. Explicitly allow all operations including download/upload/delete
``` ```
duf folder_name dufs -A
```
Allow all operations such as upload, delete
```sh
duf --allow-all
``` ```
Only allow upload operation Only allow upload operation
``` ```
duf --allow-upload dufs --allow-upload
``` ```
Serve a single page application (SPA) Serve a specific directory
``` ```
duf --render-spa dufs Downloads
```
Serve a specific file
```
dufs linux-distro.iso
```
Serve index.html when requesting a directory
```
dufs --render-index
```
Serve single-page application like react
```
dufs --render-spa
```
Require username/password
```
dufs -a /@admin:123
```
Listen on a specific port
```
dufs -p 80
```
Hide directories from directory listings
```
dufs --hidden .git,.DS_Store
``` ```
Use https Use https
``` ```
duf --tls-cert my.crt --tls-key my.key dufs --tls-cert my.crt --tls-key my.key
``` ```
## API ## API
Download a file Download a file
``` ```
curl http://127.0.0.1:5000/some-file curl http://127.0.0.1:5000/path-to-file
``` ```
Download a folder as zip file Download a folder as zip file
``` ```
curl -o some-folder.zip http://127.0.0.1:5000/some-folder?zip curl -o path-to-folder.zip http://127.0.0.1:5000/path-to-folder?zip
``` ```
Upload a file Upload a file
``` ```
curl --upload-file some-file http://127.0.0.1:5000/some-file curl --upload-file path-to-file http://127.0.0.1:5000/path-to-file
``` ```
Delete a file/folder Delete a file/folder
``` ```
curl -X DELETE http://127.0.0.1:5000/some-file curl -X DELETE http://127.0.0.1:5000/path-to-file
``` ```
## Access Control
Dufs supports path level access control. You can control who can do what on which path with `--auth`/`-a`.
```
dufs -a <path>@<readwrite>[@<readonly>|@*]
```
- `<path>`: Protected url path
- `<readwrite>`: Account with upload/delete/view/download permission, required
- `<readonly>`: Account with view/download permission, optional
> `*` means `<path>` is public, everyone can view/download it.
For example:
```
dufs -a /@admin:pass1@* -a /ui@designer:pass2 -A
```
- All files/folders are public to view/download.
- Account `admin:pass1` can upload/delete/view/download any files/folders.
- Account `designer:pass2` can upload/delete/view/download any files/folders in the `ui` folder.
## License ## License
Copyright (c) 2022 duf-developers. Copyright (c) 2022 dufs-developers.
duf is made available under the terms of either the MIT License or the Apache License 2.0, at your option. dufs is made available under the terms of either the MIT License or the Apache License 2.0, at your option.
See the LICENSE-APACHE and LICENSE-MIT files for license details. See the LICENSE-APACHE and LICENSE-MIT files for license details.

View File

@@ -1,15 +1,16 @@
html { html {
font-family: -apple-system,BlinkMacSystemFont,Helvetica,Arial,sans-serif; font-family: -apple-system,BlinkMacSystemFont,Roboto,Helvetica,Arial,sans-serif;
line-height: 1.5; line-height: 1.5;
color: #24292e; color: #24292e;
} }
body { body {
width: 700px; /* prevent premature breadcrumb wrapping on mobile */
min-width: 500px;
} }
.hidden { .hidden {
display: none; display: none !important;
} }
.head { .head {
@@ -21,6 +22,7 @@ body {
.breadcrumb { .breadcrumb {
font-size: 1.25em; font-size: 1.25em;
padding-right: 0.6em;
} }
.breadcrumb > a { .breadcrumb > a {
@@ -45,12 +47,25 @@ body {
.breadcrumb svg { .breadcrumb svg {
height: 100%; height: 100%;
fill: rgba(3,47,98,0.5); fill: rgba(3,47,98,0.5);
padding-right: 0.5em;
padding-left: 0.5em;
} }
.toolbox { .toolbox {
display: flex; display: flex;
margin-right: 10px;
}
.toolbox > div {
/* vertically align with breadcrumb text */
height: 1.1rem;
}
.toolbox .control {
cursor: pointer;
padding-left: 0.25em;
}
.upload-file input {
display: none;
} }
.searchbar { .searchbar {
@@ -62,7 +77,7 @@ body {
transition: all .15s; transition: all .15s;
border: 1px #ddd solid; border: 1px #ddd solid;
border-radius: 15px; border-radius: 15px;
margin: 0 0 2px 10px; margin-bottom: 2px;
} }
.searchbar #search { .searchbar #search {
@@ -84,15 +99,6 @@ body {
cursor: pointer; cursor: pointer;
} }
.upload-control {
cursor: pointer;
padding-left: 0.25em;
}
.upload-control input {
display: none;
}
.upload-status span { .upload-status span {
width: 70px; width: 70px;
display: inline-block; display: inline-block;
@@ -120,18 +126,13 @@ body {
white-space: nowrap; white-space: nowrap;
} }
.uploaders-table .cell-name,
.paths-table .cell-name {
width: 500px;
}
.uploaders-table .cell-status { .uploaders-table .cell-status {
width: 80px; width: 80px;
padding-left: 0.6em; padding-left: 0.6em;
} }
.paths-table .cell-actions { .paths-table .cell-actions {
width: 60px; width: 75px;
display: flex; display: flex;
padding-left: 0.6em; padding-left: 0.6em;
} }
@@ -147,15 +148,14 @@ body {
padding-left: 0.6em; padding-left: 0.6em;
} }
.path svg { .path svg {
height: 100%; height: 16px;
fill: rgba(3,47,98,0.5); fill: rgba(3,47,98,0.5);
padding-right: 0.5em; padding-right: 0.5em;
vertical-align: text-top;
} }
.path { .path {
display: flex;
list-style: none; list-style: none;
} }
@@ -166,6 +166,8 @@ body {
overflow: hidden; overflow: hidden;
display: block; display: block;
text-decoration: none; text-decoration: none;
max-width: calc(100vw - 375px);
min-width: 200px;
} }
.path a:hover { .path a:hover {
@@ -173,7 +175,7 @@ body {
} }
.action-btn { .action-btn {
padding-left: 0.4em; padding-right: 0.3em;
} }
.uploaders-table { .uploaders-table {
@@ -184,6 +186,12 @@ body {
padding-right: 1em; padding-right: 1em;
} }
@media (min-width: 768px) {
.path a {
min-width: 400px;
}
}
/* dark theme */ /* dark theme */
@media (prefers-color-scheme: dark) { @media (prefers-color-scheme: dark) {
body { body {
@@ -202,8 +210,9 @@ body {
} }
svg, svg,
.path svg { .path svg,
fill: #d0e6ff; .breadcrumb svg {
fill: #fff;
} }
.searchbar { .searchbar {

View File

@@ -15,14 +15,20 @@
<svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg> <svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg>
</a> </a>
</div> </div>
<div class="upload-control hidden" title="Upload files"> <div class="control upload-file hidden" title="Upload files">
<label for="file"> <label for="file">
<svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 1.146a.5.5 0 0 1 .708 0l3 3a.5.5 0 0 1-.708.708L8.5 2.707V11.5a.5.5 0 0 1-1 0V2.707L5.354 4.854a.5.5 0 1 1-.708-.708l3-3z"/></svg> <svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 1.146a.5.5 0 0 1 .708 0l3 3a.5.5 0 0 1-.708.708L8.5 2.707V11.5a.5.5 0 0 1-1 0V2.707L5.354 4.854a.5.5 0 1 1-.708-.708l3-3z"/></svg>
</label> </label>
<input type="file" id="file" name="file" multiple> <input type="file" id="file" name="file" multiple>
</div> </div>
<div class="control new-folder hidden" title="New folder">
<svg width="16" height="16" viewBox="0 0 16 16">
<path d="m.5 3 .04.87a1.99 1.99 0 0 0-.342 1.311l.637 7A2 2 0 0 0 2.826 14H9v-1H2.826a1 1 0 0 1-.995-.91l-.637-7A1 1 0 0 1 2.19 4h11.62a1 1 0 0 1 .996 1.09L14.54 8h1.005l.256-2.819A2 2 0 0 0 13.81 3H9.828a2 2 0 0 1-1.414-.586l-.828-.828A2 2 0 0 0 6.172 1H2.5a2 2 0 0 0-2 2zm5.672-1a1 1 0 0 1 .707.293L7.586 3H2.19c-.24 0-.47.042-.683.12L1.5 2.98a1 1 0 0 1 1-.98h3.672z"/>
<path d="M13.5 10a.5.5 0 0 1 .5.5V12h1.5a.5.5 0 1 1 0 1H14v1.5a.5.5 0 1 1-1 0V13h-1.5a.5.5 0 0 1 0-1H13v-1.5a.5.5 0 0 1 .5-.5z"/>
</svg>
</div> </div>
<form class="searchbar"> </div>
<form class="searchbar hidden">
<div class="icon"> <div class="icon">
<svg width="16" height="16" fill="currentColor" viewBox="0 0 16 16"><path d="M11.742 10.344a6.5 6.5 0 1 0-1.397 1.398h-.001c.03.04.062.078.098.115l3.85 3.85a1 1 0 0 0 1.415-1.414l-3.85-3.85a1.007 1.007 0 0 0-.115-.1zM12 6.5a5.5 5.5 0 1 1-11 0 5.5 5.5 0 0 1 11 0z"/></svg> <svg width="16" height="16" fill="currentColor" viewBox="0 0 16 16"><path d="M11.742 10.344a6.5 6.5 0 1 0-1.397 1.398h-.001c.03.04.062.078.098.115l3.85 3.85a1 1 0 0 0 1.415-1.414l-3.85-3.85a1.007 1.007 0 0 0-.115-.1zM12 6.5a5.5 5.5 0 1 1-11 0 5.5 5.5 0 0 1 11 0z"/></svg>
</div> </div>
@@ -35,16 +41,16 @@
<table class="uploaders-table hidden"> <table class="uploaders-table hidden">
<thead> <thead>
<tr> <tr>
<th class="cell-name">Name</th> <th class="cell-name" colspan="2">Name</th>
<th class="cell-status">Speed - Progress - Time Left</th> <th class="cell-status">Progress</th>
</tr> </tr>
</thead> </thead>
</table> </table>
<table class="paths-table hidden"> <table class="paths-table hidden">
<thead> <thead>
<tr> <tr>
<th class="cell-name">Name</th> <th class="cell-name" colspan="2">Name</th>
<th class="cell-mtime">Date modify</th> <th class="cell-mtime">Last modified</th>
<th class="cell-size">Size</th> <th class="cell-size">Size</th>
<th class="cell-actions">Actions</th> <th class="cell-actions">Actions</th>
</tr> </tr>

View File

@@ -1,7 +1,6 @@
/** /**
* @typedef {object} PathItem * @typedef {object} PathItem
* @property {"Dir"|"SymlinkDir"|"File"|"SymlinkFile"} path_type * @property {"Dir"|"SymlinkDir"|"File"|"SymlinkFile"} path_type
* @property {boolean} is_symlink
* @property {string} name * @property {string} name
* @property {number} mtime * @property {number} mtime
* @property {number} size * @property {number} size
@@ -31,58 +30,55 @@ let $uploadersTable;
*/ */
let $emptyFolder; let $emptyFolder;
/** /**
* @type string * @type Element
*/ */
let baseDir; let $newFolder;
class Uploader { class Uploader {
/** /**
* @type number *
* @param {File} file
* @param {string[]} dirs
*/ */
idx; constructor(file, dirs) {
/**
* @type File
*/
file;
/**
* @type string
*/
name;
/** /**
* @type Element * @type Element
*/ */
$uploadStatus; this.$uploadStatus = null
/** this.uploaded = 0;
* @type number this.lastUptime = 0;
*/
uploaded = 0;
/**
* @type number
*/
lastUptime = 0;
static globalIdx = 0;
constructor(file, dirs) {
this.name = [...dirs, file.name].join("/"); this.name = [...dirs, file.name].join("/");
this.idx = Uploader.globalIdx++; this.idx = Uploader.globalIdx++;
this.file = file; this.file = file;
} }
upload() { upload() {
const { file, idx, name } = this; const { idx, name } = this;
let url = getUrl(name); const url = getUrl(name);
const encodedUrl = encodedStr(url);
const encodedName = encodedStr(name);
$uploadersTable.insertAdjacentHTML("beforeend", ` $uploadersTable.insertAdjacentHTML("beforeend", `
<tr id="upload${idx}" class="uploader"> <tr id="upload${idx}" class="uploader">
<td class="path cell-icon">
${getSvg()}
</td>
<td class="path cell-name"> <td class="path cell-name">
<div>${getSvg("File")}</div> <a href="${encodedUrl}">${encodedName}</a>
<a href="${url}">${name}</a>
</td> </td>
<td class="cell-status upload-status" id="uploadStatus${idx}"></td> <td class="cell-status upload-status" id="uploadStatus${idx}"></td>
</tr>`); </tr>`);
$uploadersTable.classList.remove("hidden"); $uploadersTable.classList.remove("hidden");
$emptyFolder.classList.add("hidden"); $emptyFolder.classList.add("hidden");
this.$uploadStatus = document.getElementById(`uploadStatus${idx}`); this.$uploadStatus = document.getElementById(`uploadStatus${idx}`);
this.lastUptime = Date.now(); this.$uploadStatus.innerHTML = '-';
Uploader.queues.push(this);
Uploader.runQueue();
}
ajax() {
Uploader.runings += 1;
const url = getUrl(this.name);
this.lastUptime = Date.now();
const ajax = new XMLHttpRequest(); const ajax = new XMLHttpRequest();
ajax.upload.addEventListener("progress", e => this.progress(e), false); ajax.upload.addEventListener("progress", e => this.progress(e), false);
ajax.addEventListener("readystatechange", () => { ajax.addEventListener("readystatechange", () => {
@@ -97,13 +93,14 @@ class Uploader {
ajax.addEventListener("error", () => this.fail(), false); ajax.addEventListener("error", () => this.fail(), false);
ajax.addEventListener("abort", () => this.fail(), false); ajax.addEventListener("abort", () => this.fail(), false);
ajax.open("PUT", url); ajax.open("PUT", url);
ajax.send(file); ajax.send(this.file);
} }
progress(event) { progress(event) {
let now = Date.now(); const now = Date.now();
let speed = (event.loaded - this.uploaded) / (now - this.lastUptime) * 1000; const speed = (event.loaded - this.uploaded) / (now - this.lastUptime) * 1000;
let [speedValue, speedUnit] = formatSize(speed); const [speedValue, speedUnit] = formatSize(speed);
const speedText = `${speedValue}${speedUnit.toLowerCase()}/s`; const speedText = `${speedValue}${speedUnit.toLowerCase()}/s`;
const progress = formatPercent((event.loaded / event.total) * 100); const progress = formatPercent((event.loaded / event.total) * 100);
const duration = formatDuration((event.total - event.loaded) / speed) const duration = formatDuration((event.total - event.loaded) / speed)
@@ -114,37 +111,70 @@ class Uploader {
complete() { complete() {
this.$uploadStatus.innerHTML = ``; this.$uploadStatus.innerHTML = ``;
Uploader.runings -= 1;
Uploader.runQueue();
} }
fail() { fail() {
this.$uploadStatus.innerHTML = ``; this.$uploadStatus.innerHTML = ``;
Uploader.runings -= 1;
Uploader.runQueue();
} }
} }
Uploader.globalIdx = 0;
Uploader.runings = 0;
/**
* @type Uploader[]
*/
Uploader.queues = [];
Uploader.runQueue = () => {
if (Uploader.runings > 2) return;
let uploader = Uploader.queues.shift();
if (!uploader) return;
uploader.ajax();
}
/** /**
* Add breadcrumb * Add breadcrumb
* @param {string} value * @param {string} href
* @param {string} uri_prefix
*/ */
function addBreadcrumb(value) { function addBreadcrumb(href, uri_prefix) {
const $breadcrumb = document.querySelector(".breadcrumb"); const $breadcrumb = document.querySelector(".breadcrumb");
const parts = value.split("/").filter(v => !!v); let parts = [];
if (href === "/") {
parts = [""];
} else {
parts = href.split("/");
}
const len = parts.length; const len = parts.length;
let path = ""; let path = uri_prefix;
for (let i = 0; i < len; i++) { for (let i = 0; i < len; i++) {
const name = parts[i]; const name = parts[i];
if (i > 0) { if (i > 0) {
path += "/" + name; if (!path.endsWith("/")) {
path += "/";
} }
if (i === len - 1) { path += encodeURI(name);
$breadcrumb.insertAdjacentHTML("beforeend", `<b>${name}</b>`); }
baseDir = name; const encodedPath = encodedStr(path);
} else if (i === 0) { const encodedName = encodedStr(name);
$breadcrumb.insertAdjacentHTML("beforeend", `<a href="/"><b>${name}</b></a>`); if (i === 0) {
$breadcrumb.insertAdjacentHTML("beforeend", `<a href="${encodedPath}"><svg width="16" height="16" viewBox="0 0 16 16"><path d="M6.5 14.5v-3.505c0-.245.25-.495.5-.495h2c.25 0 .5.25.5.5v3.5a.5.5 0 0 0 .5.5h4a.5.5 0 0 0 .5-.5v-7a.5.5 0 0 0-.146-.354L13 5.793V2.5a.5.5 0 0 0-.5-.5h-1a.5.5 0 0 0-.5.5v1.293L8.354 1.146a.5.5 0 0 0-.708 0l-6 6A.5.5 0 0 0 1.5 7.5v7a.5.5 0 0 0 .5.5h4a.5.5 0 0 0 .5-.5z"/></svg></a>`);
} else if (i === len - 1) {
$breadcrumb.insertAdjacentHTML("beforeend", `<b>${encodedName}</b>`);
} else { } else {
$breadcrumb.insertAdjacentHTML("beforeend", `<a href="${encodeURI(path)}">${name}</a>`); $breadcrumb.insertAdjacentHTML("beforeend", `<a href="${encodedPath}">${encodedName}</a>`);
} }
if (i !== len - 1) {
$breadcrumb.insertAdjacentHTML("beforeend", `<span class="separator">/</span>`); $breadcrumb.insertAdjacentHTML("beforeend", `<span class="separator">/</span>`);
} }
}
} }
/** /**
@@ -153,41 +183,55 @@ function addBreadcrumb(value) {
* @param {number} index * @param {number} index
*/ */
function addPath(file, index) { function addPath(file, index) {
const url = getUrl(file.name) const encodedName = encodedStr(file.name);
let url = getUrl(file.name)
let encodedUrl = encodedStr(url);
let actionDelete = ""; let actionDelete = "";
let actionDownload = ""; let actionDownload = "";
let actionMove = "";
if (file.path_type.endsWith("Dir")) { if (file.path_type.endsWith("Dir")) {
url += "/";
encodedUrl += "/";
actionDownload = ` actionDownload = `
<div class="action-btn"> <div class="action-btn">
<a href="${url}?zip" title="Download folder as a .zip file"> <a href="${encodedUrl}?zip" title="Download folder as a .zip file">
<svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg> <svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg>
</a> </a>
</div>`; </div>`;
} else { } else {
actionDownload = ` actionDownload = `
<div class="action-btn" > <div class="action-btn" >
<a href="${url}" title="Download file" download> <a href="${encodedUrl}" title="Download file" download>
<svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg> <svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg>
</a> </a>
</div>`; </div>`;
} }
if (DATA.allow_delete) { if (DATA.allow_delete) {
if (DATA.allow_upload) {
actionMove = `
<div onclick="movePath(${index})" class="action-btn" id="moveBtn${index}" title="Move to new path">
<svg width="16" height="16" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M1.5 1.5A.5.5 0 0 0 1 2v4.8a2.5 2.5 0 0 0 2.5 2.5h9.793l-3.347 3.346a.5.5 0 0 0 .708.708l4.2-4.2a.5.5 0 0 0 0-.708l-4-4a.5.5 0 0 0-.708.708L13.293 8.3H3.5A1.5 1.5 0 0 1 2 6.8V2a.5.5 0 0 0-.5-.5z"/></svg>
</div>`;
}
actionDelete = ` actionDelete = `
<div onclick="deletePath(${index})" class="action-btn" id="deleteBtn${index}" title="Delete ${file.name}"> <div onclick="deletePath(${index})" class="action-btn" id="deleteBtn${index}" title="Delete">
<svg width="16" height="16" fill="currentColor"viewBox="0 0 16 16"><path d="M6.854 7.146a.5.5 0 1 0-.708.708L7.293 9l-1.147 1.146a.5.5 0 0 0 .708.708L8 9.707l1.146 1.147a.5.5 0 0 0 .708-.708L8.707 9l1.147-1.146a.5.5 0 0 0-.708-.708L8 8.293 6.854 7.146z"/><path d="M14 14V4.5L9.5 0H4a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h8a2 2 0 0 0 2-2zM9.5 3A1.5 1.5 0 0 0 11 4.5h2V14a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1V2a1 1 0 0 1 1-1h5.5v2z"/></svg> <svg width="16" height="16" fill="currentColor"viewBox="0 0 16 16"><path d="M6.854 7.146a.5.5 0 1 0-.708.708L7.293 9l-1.147 1.146a.5.5 0 0 0 .708.708L8 9.707l1.146 1.147a.5.5 0 0 0 .708-.708L8.707 9l1.147-1.146a.5.5 0 0 0-.708-.708L8 8.293 6.854 7.146z"/><path d="M14 14V4.5L9.5 0H4a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h8a2 2 0 0 0 2-2zM9.5 3A1.5 1.5 0 0 0 11 4.5h2V14a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1V2a1 1 0 0 1 1-1h5.5v2z"/></svg>
</div>`; </div>`;
} }
let actionCell = ` let actionCell = `
<td class="cell-actions"> <td class="cell-actions">
${actionDownload} ${actionDownload}
${actionMove}
${actionDelete} ${actionDelete}
</td>` </td>`
$pathsTableBody.insertAdjacentHTML("beforeend", ` $pathsTableBody.insertAdjacentHTML("beforeend", `
<tr id="addPath${index}"> <tr id="addPath${index}">
<td class="path cell-icon">
${getSvg(file.path_type)}
</td>
<td class="path cell-name"> <td class="path cell-name">
<div>${getSvg(file.path_type)}</div> <a href="${encodedUrl}">${encodedName}</a>
<a href="${url}" title="${file.name}">${file.name}</a>
</td> </td>
<td class="cell-mtime">${formatMtime(file.mtime)}</td> <td class="cell-mtime">${formatMtime(file.mtime)}</td>
<td class="cell-size">${formatSize(file.size).join(" ")}</td> <td class="cell-size">${formatSize(file.size).join(" ")}</td>
@@ -196,7 +240,7 @@ function addPath(file, index) {
} }
/** /**
* Delete pathitem * Delete path
* @param {number} index * @param {number} index
* @returns * @returns
*/ */
@@ -226,6 +270,44 @@ async function deletePath(index) {
} }
} }
/**
* Move path
* @param {number} index
* @returns
*/
async function movePath(index) {
const file = DATA.paths[index];
if (!file) return;
const fileUrl = getUrl(file.name);
const fileUrlObj = new URL(fileUrl)
const prefix = DATA.uri_prefix.slice(0, -1);
const filePath = decodeURIComponent(fileUrlObj.pathname.slice(prefix.length));
const newPath = prompt("Enter new path", filePath)
if (!newPath || filePath === newPath) return;
const newFileUrl = fileUrlObj.origin + prefix + encodeURI(newPath);
try {
const res = await fetch(getUrl(file.name), {
method: "MOVE",
headers: {
"Destination": newFileUrl,
}
});
if (res.status >= 200 && res.status < 300) {
location.href = newFileUrl.split("/").slice(0, -1).join("/")
} else {
throw new Error(await res.text())
}
} catch (err) {
alert(`Cannot move \`${filePath}\` to \`${newPath}\`, ${err.message}`);
}
}
function dropzone() { function dropzone() {
["drag", "dragstart", "dragend", "dragover", "dragenter", "dragleave", "drop"].forEach(name => { ["drag", "dragstart", "dragend", "dragover", "dragenter", "dragleave", "drop"].forEach(name => {
document.addEventListener(name, e => { document.addEventListener(name, e => {
@@ -250,6 +332,24 @@ function dropzone() {
}); });
} }
/**
* Create a folder
* @param {string} name
*/
async function createFolder(name) {
const url = getUrl(name);
try {
const res = await fetch(url, {
method: "MKCOL",
});
if (res.status >= 200 && res.status < 300) {
location.href = url;
}
} catch (err) {
alert(`Cannot create folder \`${name}\`, ${err.message}`);
}
}
async function addFileEntries(entries, dirs) { async function addFileEntries(entries, dirs) {
for (const entry of entries) { for (const entry of entries) {
if (entry.isFile) { if (entry.isFile) {
@@ -275,12 +375,12 @@ function getSvg(path_type) {
switch (path_type) { switch (path_type) {
case "Dir": case "Dir":
return `<svg height="16" viewBox="0 0 14 16" width="14"><path fill-rule="evenodd" d="M13 4H7V3c0-.66-.31-1-1-1H1c-.55 0-1 .45-1 1v10c0 .55.45 1 1 1h12c.55 0 1-.45 1-1V5c0-.55-.45-1-1-1zM6 4H1V3h5v1z"></path></svg>`; return `<svg height="16" viewBox="0 0 14 16" width="14"><path fill-rule="evenodd" d="M13 4H7V3c0-.66-.31-1-1-1H1c-.55 0-1 .45-1 1v10c0 .55.45 1 1 1h12c.55 0 1-.45 1-1V5c0-.55-.45-1-1-1zM6 4H1V3h5v1z"></path></svg>`;
case "File": case "SymlinkFile":
return `<svg height="16" viewBox="0 0 12 16" width="12"><path fill-rule="evenodd" d="M6 5H2V4h4v1zM2 8h7V7H2v1zm0 2h7V9H2v1zm0 2h7v-1H2v1zm10-7.5V14c0 .55-.45 1-1 1H1c-.55 0-1-.45-1-1V2c0-.55.45-1 1-1h7.5L12 4.5zM11 5L8 2H1v12h10V5z"></path></svg>`; return `<svg height="16" viewBox="0 0 12 16" width="12"><path fill-rule="evenodd" d="M8.5 1H1c-.55 0-1 .45-1 1v12c0 .55.45 1 1 1h10c.55 0 1-.45 1-1V4.5L8.5 1zM11 14H1V2h7l3 3v9zM6 4.5l4 3-4 3v-2c-.98-.02-1.84.22-2.55.7-.71.48-1.19 1.25-1.45 2.3.02-1.64.39-2.88 1.13-3.73.73-.84 1.69-1.27 2.88-1.27v-2H6z"></path></svg>`;
case "SymlinkDir": case "SymlinkDir":
return `<svg height="16" viewBox="0 0 14 16" width="14"><path fill-rule="evenodd" d="M13 4H7V3c0-.66-.31-1-1-1H1c-.55 0-1 .45-1 1v10c0 .55.45 1 1 1h12c.55 0 1-.45 1-1V5c0-.55-.45-1-1-1zM1 3h5v1H1V3zm6 9v-2c-.98-.02-1.84.22-2.55.7-.71.48-1.19 1.25-1.45 2.3.02-1.64.39-2.88 1.13-3.73C4.86 8.43 5.82 8 7.01 8V6l4 3-4 3H7z"></path></svg>`; return `<svg height="16" viewBox="0 0 14 16" width="14"><path fill-rule="evenodd" d="M13 4H7V3c0-.66-.31-1-1-1H1c-.55 0-1 .45-1 1v10c0 .55.45 1 1 1h12c.55 0 1-.45 1-1V5c0-.55-.45-1-1-1zM1 3h5v1H1V3zm6 9v-2c-.98-.02-1.84.22-2.55.7-.71.48-1.19 1.25-1.45 2.3.02-1.64.39-2.88 1.13-3.73C4.86 8.43 5.82 8 7.01 8V6l4 3-4 3H7z"></path></svg>`;
default: default:
return `<svg height="16" viewBox="0 0 12 16" width="12"><path fill-rule="evenodd" d="M8.5 1H1c-.55 0-1 .45-1 1v12c0 .55.45 1 1 1h10c.55 0 1-.45 1-1V4.5L8.5 1zM11 14H1V2h7l3 3v9zM6 4.5l4 3-4 3v-2c-.98-.02-1.84.22-2.55.7-.71.48-1.19 1.25-1.45 2.3.02-1.64.39-2.88 1.13-3.73.73-.84 1.69-1.27 2.88-1.27v-2H6z"></path></svg>`; return `<svg height="16" viewBox="0 0 12 16" width="12"><path fill-rule="evenodd" d="M6 5H2V4h4v1zM2 8h7V7H2v1zm0 2h7V9H2v1zm0 2h7v-1H2v1zm10-7.5V14c0 .55-.45 1-1 1H1c-.55 0-1-.45-1-1V2c0-.55.45-1 1-1h7.5L12 4.5zM11 5L8 2H1v12h10V5z"></path></svg>`;
} }
} }
@@ -292,7 +392,7 @@ function formatMtime(mtime) {
const day = padZero(date.getDate(), 2); const day = padZero(date.getDate(), 2);
const hours = padZero(date.getHours(), 2); const hours = padZero(date.getHours(), 2);
const minutes = padZero(date.getMinutes(), 2); const minutes = padZero(date.getMinutes(), 2);
return `${year}/${month}/${day} ${hours}:${minutes}`; return `${year}-${month}-${day} ${hours}:${minutes}`;
} }
function padZero(value, size) { function padZero(value, size) {
@@ -309,9 +409,9 @@ function formatSize(size) {
function formatDuration(seconds) { function formatDuration(seconds) {
seconds = Math.ceil(seconds); seconds = Math.ceil(seconds);
let h = Math.floor(seconds / 3600); const h = Math.floor(seconds / 3600);
let m = Math.floor((seconds - h * 3600) / 60); const m = Math.floor((seconds - h * 3600) / 60);
let s = seconds - h * 3600 - m * 60 const s = seconds - h * 3600 - m * 60
return `${padZero(h, 2)}:${padZero(m, 2)}:${padZero(s, 2)}`; return `${padZero(h, 2)}:${padZero(m, 2)}:${padZero(s, 2)}`;
} }
@@ -323,17 +423,29 @@ function formatPercent(precent) {
} }
} }
function encodedStr(rawStr) {
return rawStr.replace(/[\u00A0-\u9999<>\&]/g, function(i) {
return '&#'+i.charCodeAt(0)+';';
});
}
function ready() { function ready() {
document.title = `Index of ${DATA.href} - Dufs`;
$pathsTable = document.querySelector(".paths-table") $pathsTable = document.querySelector(".paths-table")
$pathsTableBody = document.querySelector(".paths-table tbody"); $pathsTableBody = document.querySelector(".paths-table tbody");
$uploadersTable = document.querySelector(".uploaders-table"); $uploadersTable = document.querySelector(".uploaders-table");
$emptyFolder = document.querySelector(".empty-folder"); $emptyFolder = document.querySelector(".empty-folder");
$newFolder = document.querySelector(".new-folder");
if (DATA.allow_search) {
document.querySelector(".searchbar").classList.remove("hidden");
if (params.q) { if (params.q) {
document.getElementById('search').value = params.q; document.getElementById('search').value = params.q;
} }
}
addBreadcrumb(DATA.breadcrumb);
addBreadcrumb(DATA.href, DATA.uri_prefix);
if (Array.isArray(DATA.paths)) { if (Array.isArray(DATA.paths)) {
const len = DATA.paths.length; const len = DATA.paths.length;
if (len > 0) { if (len > 0) {
@@ -349,7 +461,14 @@ function ready() {
} }
if (DATA.allow_upload) { if (DATA.allow_upload) {
dropzone(); dropzone();
document.querySelector(".upload-control").classList.remove("hidden"); if (DATA.allow_delete) {
$newFolder.classList.remove("hidden");
$newFolder.addEventListener("click", () => {
const name = prompt("Enter name of new folder");
if (name) createFolder(name);
});
}
document.querySelector(".upload-file").classList.remove("hidden");
document.getElementById("file").addEventListener("change", e => { document.getElementById("file").addEventListener("change", e => {
const files = e.target.files; const files = e.target.files;
for (let file of files) { for (let file of files) {

View File

@@ -1,15 +1,19 @@
use clap::{Arg, ArgMatches, Command}; use clap::{value_parser, AppSettings, Arg, ArgMatches, Command};
use clap_complete::{generate, Generator, Shell};
#[cfg(feature = "tls")]
use rustls::{Certificate, PrivateKey}; use rustls::{Certificate, PrivateKey};
use std::env; use std::env;
use std::net::IpAddr; use std::net::IpAddr;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use crate::auth::parse_auth; use crate::auth::AccessControl;
use crate::auth::AuthMethod;
#[cfg(feature = "tls")]
use crate::tls::{load_certs, load_private_key}; use crate::tls::{load_certs, load_private_key};
use crate::BoxResult; use crate::BoxResult;
fn app() -> Command<'static> { pub fn build_cli() -> Command<'static> {
Command::new(env!("CARGO_CRATE_NAME")) let app = Command::new(env!("CARGO_CRATE_NAME"))
.version(env!("CARGO_PKG_VERSION")) .version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS")) .author(env!("CARGO_PKG_AUTHORS"))
.about(concat!( .about(concat!(
@@ -17,14 +21,15 @@ fn app() -> Command<'static> {
" - ", " - ",
env!("CARGO_PKG_REPOSITORY") env!("CARGO_PKG_REPOSITORY")
)) ))
.global_setting(AppSettings::DeriveDisplayOrder)
.arg( .arg(
Arg::new("address") Arg::new("bind")
.short('b') .short('b')
.long("bind") .long("bind")
.help("Specify bind address") .help("Specify bind address")
.multiple_values(true) .multiple_values(true)
.multiple_occurrences(true) .multiple_occurrences(true)
.value_name("address"), .value_name("addr"),
) )
.arg( .arg(
Arg::new("port") Arg::new("port")
@@ -38,13 +43,36 @@ fn app() -> Command<'static> {
Arg::new("path") Arg::new("path")
.default_value(".") .default_value(".")
.allow_invalid_utf8(true) .allow_invalid_utf8(true)
.help("Path to a root directory for serving files"), .help("Specific path to serve"),
) )
.arg( .arg(
Arg::new("path-prefix") Arg::new("path-prefix")
.long("path-prefix") .long("path-prefix")
.value_name("path") .value_name("path")
.help("Specify an url path prefix"), .help("Specify an path prefix"),
)
.arg(
Arg::new("hidden")
.long("hidden")
.help("Hide directories from directory listings, separated by `,`")
.value_name("value"),
)
.arg(
Arg::new("auth")
.short('a')
.long("auth")
.help("Add auth for path")
.multiple_values(true)
.multiple_occurrences(true)
.value_name("rule"),
)
.arg(
Arg::new("auth-method")
.long("auth-method")
.help("Select auth method")
.possible_values(["basic", "digest"])
.default_value("digest")
.value_name("value"),
) )
.arg( .arg(
Arg::new("allow-all") Arg::new("allow-all")
@@ -62,40 +90,46 @@ fn app() -> Command<'static> {
.long("allow-delete") .long("allow-delete")
.help("Allow delete files/folders"), .help("Allow delete files/folders"),
) )
.arg(
Arg::new("allow-search")
.long("allow-search")
.help("Allow search files/folders"),
)
.arg( .arg(
Arg::new("allow-symlink") Arg::new("allow-symlink")
.long("allow-symlink") .long("allow-symlink")
.help("Allow symlink to files/folders outside root directory"), .help("Allow symlink to files/folders outside root directory"),
) )
.arg(
Arg::new("enable-cors")
.long("enable-cors")
.help("Enable CORS, sets `Access-Control-Allow-Origin: *`"),
)
.arg( .arg(
Arg::new("render-index") Arg::new("render-index")
.long("render-index") .long("render-index")
.help("Render index.html when requesting a directory"), .help("Serve index.html when requesting a directory, returns 404 if not found index.html"),
)
.arg(
Arg::new("render-try-index")
.long("render-try-index")
.help("Serve index.html when requesting a directory, returns directory listing if not found index.html"),
) )
.arg( .arg(
Arg::new("render-spa") Arg::new("render-spa")
.long("render-spa") .long("render-spa")
.help("Render for single-page application"), .help("Serve SPA(Single Page Application)"),
) )
.arg( .arg(
Arg::new("auth") Arg::new("completions")
.short('a') .long("completions")
.display_order(1) .value_name("shell")
.long("auth") .value_parser(value_parser!(Shell))
.help("Use HTTP authentication") .help("Print shell completion script for <shell>"),
.value_name("user:pass"), );
)
.arg( #[cfg(feature = "tls")]
Arg::new("no-auth-access") let app = app
.display_order(1)
.long("no-auth-access")
.help("Not required auth when access static files"),
)
.arg(
Arg::new("cors")
.long("cors")
.help("Enable CORS, sets `Access-Control-Allow-Origin: *`"),
)
.arg( .arg(
Arg::new("tls-cert") Arg::new("tls-cert")
.long("tls-cert") .long("tls-cert")
@@ -107,29 +141,38 @@ fn app() -> Command<'static> {
.long("tls-key") .long("tls-key")
.value_name("path") .value_name("path")
.help("Path to the SSL/TLS certificate's private key"), .help("Path to the SSL/TLS certificate's private key"),
) );
app
} }
pub fn matches() -> ArgMatches { pub fn print_completions<G: Generator>(gen: G, cmd: &mut Command) {
app().get_matches() generate(gen, cmd, cmd.get_name().to_string(), &mut std::io::stdout());
} }
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug)]
pub struct Args { pub struct Args {
pub addrs: Vec<IpAddr>, pub addrs: Vec<IpAddr>,
pub port: u16, pub port: u16,
pub path: PathBuf, pub path: PathBuf,
pub path_is_file: bool,
pub path_prefix: String, pub path_prefix: String,
pub uri_prefix: String, pub uri_prefix: String,
pub auth: Option<(String, String)>, pub hidden: String,
pub no_auth_access: bool, pub auth_method: AuthMethod,
pub auth: AccessControl,
pub allow_upload: bool, pub allow_upload: bool,
pub allow_delete: bool, pub allow_delete: bool,
pub allow_search: bool,
pub allow_symlink: bool, pub allow_symlink: bool,
pub render_index: bool, pub render_index: bool,
pub render_spa: bool, pub render_spa: bool,
pub cors: bool, pub render_try_index: bool,
pub enable_cors: bool,
#[cfg(feature = "tls")]
pub tls: Option<(Vec<Certificate>, PrivateKey)>, pub tls: Option<(Vec<Certificate>, PrivateKey)>,
#[cfg(not(feature = "tls"))]
pub tls: Option<()>,
} }
impl Args { impl Args {
@@ -140,11 +183,12 @@ impl Args {
pub fn parse(matches: ArgMatches) -> BoxResult<Args> { pub fn parse(matches: ArgMatches) -> BoxResult<Args> {
let port = matches.value_of_t::<u16>("port")?; let port = matches.value_of_t::<u16>("port")?;
let addrs = matches let addrs = matches
.values_of("address") .values_of("bind")
.map(|v| v.collect()) .map(|v| v.collect())
.unwrap_or_else(|| vec!["0.0.0.0", "::"]); .unwrap_or_else(|| vec!["0.0.0.0", "::"]);
let addrs: Vec<IpAddr> = Args::parse_addrs(&addrs)?; let addrs: Vec<IpAddr> = Args::parse_addrs(&addrs)?;
let path = Args::parse_path(matches.value_of_os("path").unwrap_or_default())?; let path = Args::parse_path(matches.value_of_os("path").unwrap_or_default())?;
let path_is_file = path.metadata()?.is_file();
let path_prefix = matches let path_prefix = matches
.value_of("path-prefix") .value_of("path-prefix")
.map(|v| v.trim_matches('/').to_owned()) .map(|v| v.trim_matches('/').to_owned())
@@ -154,17 +198,28 @@ impl Args {
} else { } else {
format!("/{}/", &path_prefix) format!("/{}/", &path_prefix)
}; };
let cors = matches.is_present("cors"); let hidden: String = matches
let auth = match matches.value_of("auth") { .value_of("hidden")
Some(auth) => Some(parse_auth(auth)?), .map(|v| format!(",{},", v))
None => None, .unwrap_or_default();
let enable_cors = matches.is_present("enable-cors");
let auth: Vec<&str> = matches
.values_of("auth")
.map(|v| v.collect())
.unwrap_or_default();
let auth_method = match matches.value_of("auth-method").unwrap() {
"basic" => AuthMethod::Basic,
_ => AuthMethod::Digest,
}; };
let no_auth_access = matches.is_present("no-auth-access"); let auth = AccessControl::new(&auth, &uri_prefix)?;
let allow_upload = matches.is_present("allow-all") || matches.is_present("allow-upload"); let allow_upload = matches.is_present("allow-all") || matches.is_present("allow-upload");
let allow_delete = matches.is_present("allow-all") || matches.is_present("allow-delete"); let allow_delete = matches.is_present("allow-all") || matches.is_present("allow-delete");
let allow_search = matches.is_present("allow-all") || matches.is_present("allow-search");
let allow_symlink = matches.is_present("allow-all") || matches.is_present("allow-symlink"); let allow_symlink = matches.is_present("allow-all") || matches.is_present("allow-symlink");
let render_index = matches.is_present("render-index"); let render_index = matches.is_present("render-index");
let render_try_index = matches.is_present("render-try-index");
let render_spa = matches.is_present("render-spa"); let render_spa = matches.is_present("render-spa");
#[cfg(feature = "tls")]
let tls = match (matches.value_of("tls-cert"), matches.value_of("tls-key")) { let tls = match (matches.value_of("tls-cert"), matches.value_of("tls-key")) {
(Some(certs_file), Some(key_file)) => { (Some(certs_file), Some(key_file)) => {
let certs = load_certs(certs_file)?; let certs = load_certs(certs_file)?;
@@ -173,20 +228,26 @@ impl Args {
} }
_ => None, _ => None,
}; };
#[cfg(not(feature = "tls"))]
let tls = None;
Ok(Args { Ok(Args {
addrs, addrs,
port, port,
path, path,
path_is_file,
path_prefix, path_prefix,
uri_prefix, uri_prefix,
hidden,
auth_method,
auth, auth,
no_auth_access, enable_cors,
cors,
allow_delete, allow_delete,
allow_upload, allow_upload,
allow_search,
allow_symlink, allow_symlink,
render_index, render_index,
render_try_index,
render_spa, render_spa,
tls, tls,
}) })

View File

@@ -1,4 +1,5 @@
use headers::HeaderValue; use headers::HeaderValue;
use hyper::Method;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use md5::Context; use md5::Context;
use std::{ use std::{
@@ -7,9 +8,10 @@ use std::{
}; };
use uuid::Uuid; use uuid::Uuid;
use crate::utils::encode_uri;
use crate::BoxResult; use crate::BoxResult;
const REALM: &str = "DUF"; const REALM: &str = "DUFS";
lazy_static! { lazy_static! {
static ref NONCESTARTHASH: Context = { static ref NONCESTARTHASH: Context = {
@@ -20,7 +22,171 @@ lazy_static! {
}; };
} }
pub fn generate_www_auth(stale: bool) -> String { #[derive(Debug)]
pub struct AccessControl {
rules: HashMap<String, PathControl>,
}
#[derive(Debug)]
pub struct PathControl {
readwrite: Account,
readonly: Option<Account>,
share: bool,
}
impl AccessControl {
pub fn new(raw_rules: &[&str], uri_prefix: &str) -> BoxResult<Self> {
let mut rules = HashMap::default();
if raw_rules.is_empty() {
return Ok(Self { rules });
}
for rule in raw_rules {
let parts: Vec<&str> = rule.split('@').collect();
let create_err = || format!("Invalid auth `{}`", rule).into();
match parts.as_slice() {
[path, readwrite] => {
let control = PathControl {
readwrite: Account::new(readwrite).ok_or_else(create_err)?,
readonly: None,
share: false,
};
rules.insert(sanitize_path(path, uri_prefix), control);
}
[path, readwrite, readonly] => {
let (readonly, share) = if *readonly == "*" {
(None, true)
} else {
(Some(Account::new(readonly).ok_or_else(create_err)?), false)
};
let control = PathControl {
readwrite: Account::new(readwrite).ok_or_else(create_err)?,
readonly,
share,
};
rules.insert(sanitize_path(path, uri_prefix), control);
}
_ => return Err(create_err()),
}
}
Ok(Self { rules })
}
pub fn guard(
&self,
path: &str,
method: &Method,
authorization: Option<&HeaderValue>,
auth_method: AuthMethod,
) -> GuardType {
if self.rules.is_empty() {
return GuardType::ReadWrite;
}
let mut controls = vec![];
for path in walk_path(path) {
if let Some(control) = self.rules.get(path) {
controls.push(control);
if let Some(authorization) = authorization {
let Account { user, pass } = &control.readwrite;
if auth_method
.validate(authorization, method.as_str(), user, pass)
.is_some()
{
return GuardType::ReadWrite;
}
}
}
}
if is_readonly_method(method) {
for control in controls.into_iter() {
if control.share {
return GuardType::ReadOnly;
}
if let Some(authorization) = authorization {
if let Some(Account { user, pass }) = &control.readonly {
if auth_method
.validate(authorization, method.as_str(), user, pass)
.is_some()
{
return GuardType::ReadOnly;
}
}
}
}
}
GuardType::Reject
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum GuardType {
Reject,
ReadWrite,
ReadOnly,
}
impl GuardType {
pub fn is_reject(&self) -> bool {
*self == GuardType::Reject
}
}
fn sanitize_path(path: &str, uri_prefix: &str) -> String {
encode_uri(&format!("{}{}", uri_prefix, path.trim_matches('/')))
}
fn walk_path(path: &str) -> impl Iterator<Item = &str> {
let mut idx = 0;
path.split('/').enumerate().map(move |(i, part)| {
let end = if i == 0 { 1 } else { idx + part.len() + i };
let value = &path[..end];
idx += part.len();
value
})
}
fn is_readonly_method(method: &Method) -> bool {
method == Method::GET
|| method == Method::OPTIONS
|| method == Method::HEAD
|| method.as_str() == "PROPFIND"
}
#[derive(Debug, Clone)]
struct Account {
user: String,
pass: String,
}
impl Account {
fn new(data: &str) -> Option<Self> {
let p: Vec<&str> = data.trim().split(':').collect();
if p.len() != 2 {
return None;
}
let user = p[0];
let pass = p[1];
let mut h = Context::new();
h.consume(format!("{}:{}:{}", user, REALM, pass).as_bytes());
Some(Account {
user: user.to_owned(),
pass: format!("{:x}", h.compute()),
})
}
}
#[derive(Debug, Clone)]
pub enum AuthMethod {
Basic,
Digest,
}
impl AuthMethod {
pub fn www_auth(&self, stale: bool) -> String {
match self {
AuthMethod::Basic => {
format!("Basic realm=\"{}\"", REALM)
}
AuthMethod::Digest => {
let str_stale = if stale { "stale=true," } else { "" }; let str_stale = if stale { "stale=true," } else { "" };
format!( format!(
"Digest realm=\"{}\",nonce=\"{}\",{}qop=\"auth\"", "Digest realm=\"{}\",nonce=\"{}\",{}qop=\"auth\"",
@@ -28,28 +194,40 @@ pub fn generate_www_auth(stale: bool) -> String {
create_nonce(), create_nonce(),
str_stale str_stale
) )
}
pub fn parse_auth(auth: &str) -> BoxResult<(String, String)> {
let p: Vec<&str> = auth.trim().split(':').collect();
let err = "Invalid auth value";
if p.len() != 2 {
return Err(err.into());
} }
let user = p[0]; }
let pass = p[1]; }
let mut h = Context::new(); pub fn validate(
h.consume(format!("{}:{}:{}", user, REALM, pass).as_bytes()); &self,
Ok((user.to_owned(), format!("{:x}", h.compute()))) authorization: &HeaderValue,
}
pub fn valid_digest(
header_value: &HeaderValue,
method: &str, method: &str,
auth_user: &str, auth_user: &str,
auth_pass: &str, auth_pass: &str,
) -> Option<()> { ) -> Option<()> {
let digest_value = strip_prefix(header_value.as_bytes(), b"Digest ")?; match self {
AuthMethod::Basic => {
let value: Vec<u8> =
base64::decode(strip_prefix(authorization.as_bytes(), b"Basic ").unwrap())
.unwrap();
let parts: Vec<&str> = std::str::from_utf8(&value).unwrap().split(':').collect();
if parts[0] != auth_user {
return None;
}
let mut h = Context::new();
h.consume(format!("{}:{}:{}", parts[0], REALM, parts[1]).as_bytes());
let http_pass = format!("{:x}", h.compute());
if http_pass == auth_pass {
return Some(());
}
None
}
AuthMethod::Digest => {
let digest_value = strip_prefix(authorization.as_bytes(), b"Digest ")?;
let user_vals = to_headermap(digest_value).ok()?; let user_vals = to_headermap(digest_value).ok()?;
if let (Some(username), Some(nonce), Some(user_response)) = ( if let (Some(username), Some(nonce), Some(user_response)) = (
user_vals user_vals
@@ -114,6 +292,9 @@ pub fn valid_digest(
} }
} }
None None
}
}
}
} }
/// Check if a nonce is still valid. /// Check if a nonce is still valid.

30
src/logger.rs Normal file
View File

@@ -0,0 +1,30 @@
use chrono::{Local, SecondsFormat};
use log::{Level, Metadata, Record};
use log::{LevelFilter, SetLoggerError};
struct SimpleLogger;
impl log::Log for SimpleLogger {
fn enabled(&self, metadata: &Metadata) -> bool {
metadata.level() <= Level::Info
}
fn log(&self, record: &Record) {
if self.enabled(record.metadata()) {
let timestamp = Local::now().to_rfc3339_opts(SecondsFormat::Secs, true);
if record.level() < Level::Info {
eprintln!("{} {} - {}", timestamp, record.level(), record.args());
} else {
println!("{} {} - {}", timestamp, record.level(), record.args());
}
}
}
fn flush(&self) {}
}
static LOGGER: SimpleLogger = SimpleLogger;
pub fn init() -> Result<(), SetLoggerError> {
log::set_logger(&LOGGER).map(|()| log::set_max_level(LevelFilter::Info))
}

View File

@@ -1,25 +1,32 @@
mod args; mod args;
mod auth; mod auth;
mod logger;
mod server; mod server;
mod streamer;
#[cfg(feature = "tls")]
mod tls; mod tls;
mod utils;
#[macro_use] #[macro_use]
extern crate log; extern crate log;
use crate::args::{matches, Args}; use crate::args::{build_cli, print_completions, Args};
use crate::server::{Request, Server}; use crate::server::{Request, Server};
#[cfg(feature = "tls")]
use crate::tls::{TlsAcceptor, TlsStream}; use crate::tls::{TlsAcceptor, TlsStream};
use std::io::Write;
use std::net::{IpAddr, SocketAddr, TcpListener as StdTcpListener}; use std::net::{IpAddr, SocketAddr, TcpListener as StdTcpListener};
use std::{env, sync::Arc}; use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use clap_complete::Shell;
use futures::future::join_all; use futures::future::join_all;
use tokio::net::TcpListener; use tokio::net::TcpListener;
use tokio::task::JoinHandle; use tokio::task::JoinHandle;
use hyper::server::conn::{AddrIncoming, AddrStream}; use hyper::server::conn::{AddrIncoming, AddrStream};
use hyper::service::{make_service_fn, service_fn}; use hyper::service::{make_service_fn, service_fn};
#[cfg(feature = "tls")]
use rustls::ServerConfig; use rustls::ServerConfig;
pub type BoxResult<T> = Result<T, Box<dyn std::error::Error>>; pub type BoxResult<T> = Result<T, Box<dyn std::error::Error>>;
@@ -30,19 +37,18 @@ async fn main() {
} }
async fn run() -> BoxResult<()> { async fn run() -> BoxResult<()> {
if env::var("RUST_LOG").is_err() { logger::init().map_err(|e| format!("Failed to init logger, {}", e))?;
env::set_var("RUST_LOG", "info") let cmd = build_cli();
let matches = cmd.get_matches();
if let Some(generator) = matches.get_one::<Shell>("completions") {
let mut cmd = build_cli();
print_completions(*generator, &mut cmd);
return Ok(());
} }
env_logger::builder() let args = Args::parse(matches)?;
.format(|buf, record| {
let timestamp = buf.timestamp_millis();
writeln!(buf, "[{} {}] {}", timestamp, record.level(), record.args())
})
.init();
let args = Args::parse(matches())?;
let args = Arc::new(args); let args = Arc::new(args);
let handles = serve(args.clone())?; let running = Arc::new(AtomicBool::new(true));
let handles = serve(args.clone(), running.clone())?;
print_listening(args)?; print_listening(args)?;
tokio::select! { tokio::select! {
@@ -55,13 +61,17 @@ async fn run() -> BoxResult<()> {
Ok(()) Ok(())
}, },
_ = shutdown_signal() => { _ = shutdown_signal() => {
running.store(false, Ordering::SeqCst);
Ok(()) Ok(())
}, },
} }
} }
fn serve(args: Arc<Args>) -> BoxResult<Vec<JoinHandle<Result<(), hyper::Error>>>> { fn serve(
let inner = Arc::new(Server::new(args.clone())); args: Arc<Args>,
running: Arc<AtomicBool>,
) -> BoxResult<Vec<JoinHandle<Result<(), hyper::Error>>>> {
let inner = Arc::new(Server::new(args.clone(), running));
let mut handles = vec![]; let mut handles = vec![];
let port = args.port; let port = args.port;
for ip in args.addrs.iter() { for ip in args.addrs.iter() {
@@ -77,12 +87,13 @@ fn serve(args: Arc<Args>) -> BoxResult<Vec<JoinHandle<Result<(), hyper::Error>>>
})) }))
} }
}; };
match args.tls.clone() { match args.tls.as_ref() {
#[cfg(feature = "tls")]
Some((certs, key)) => { Some((certs, key)) => {
let config = ServerConfig::builder() let config = ServerConfig::builder()
.with_safe_defaults() .with_safe_defaults()
.with_no_client_auth() .with_no_client_auth()
.with_single_cert(certs, key)?; .with_single_cert(certs.clone(), key.clone())?;
let config = Arc::new(config); let config = Arc::new(config);
let accepter = TlsAcceptor::new(config.clone(), incoming); let accepter = TlsAcceptor::new(config.clone(), incoming);
let new_service = make_service_fn(move |socket: &TlsStream| { let new_service = make_service_fn(move |socket: &TlsStream| {
@@ -92,6 +103,10 @@ fn serve(args: Arc<Args>) -> BoxResult<Vec<JoinHandle<Result<(), hyper::Error>>>
let server = tokio::spawn(hyper::Server::builder(accepter).serve(new_service)); let server = tokio::spawn(hyper::Server::builder(accepter).serve(new_service));
handles.push(server); handles.push(server);
} }
#[cfg(not(feature = "tls"))]
Some(_) => {
unreachable!()
}
None => { None => {
let new_service = make_service_fn(move |socket: &AddrStream| { let new_service = make_service_fn(move |socket: &AddrStream| {
let remote_addr = socket.remote_addr(); let remote_addr = socket.remote_addr();
@@ -135,7 +150,7 @@ fn print_listening(args: Arc<Args>) -> BoxResult<()> {
} }
} }
if ipv4 || ipv6 { if ipv4 || ipv6 {
let ifaces = get_if_addrs::get_if_addrs() let ifaces = if_addrs::get_if_addrs()
.map_err(|e| format!("Failed to get local interface addresses: {}", e))?; .map_err(|e| format!("Failed to get local interface addresses: {}", e))?;
for iface in ifaces.into_iter() { for iface in ifaces.into_iter() {
let local_ip = iface.ip(); let local_ip = iface.ip();

View File

@@ -1,35 +1,35 @@
use crate::auth::{generate_www_auth, valid_digest}; use crate::streamer::Streamer;
use crate::utils::{decode_uri, encode_uri, get_file_name, try_get_file_name};
use crate::{Args, BoxResult}; use crate::{Args, BoxResult};
use walkdir::WalkDir;
use xml::escape::escape_str_pcdata; use xml::escape::escape_str_pcdata;
use async_walkdir::WalkDir;
use async_zip::write::{EntryOptions, ZipFileWriter}; use async_zip::write::{EntryOptions, ZipFileWriter};
use async_zip::Compression; use async_zip::Compression;
use chrono::{TimeZone, Utc}; use chrono::{TimeZone, Utc};
use futures::stream::StreamExt;
use futures::TryStreamExt; use futures::TryStreamExt;
use headers::{ use headers::{
AcceptRanges, AccessControlAllowCredentials, AccessControlAllowHeaders, AcceptRanges, AccessControlAllowCredentials, AccessControlAllowHeaders,
AccessControlAllowOrigin, Connection, ContentLength, ContentRange, ContentType, ETag, AccessControlAllowOrigin, Connection, ContentLength, ContentType, ETag, HeaderMap,
HeaderMap, HeaderMapExt, IfModifiedSince, IfNoneMatch, IfRange, LastModified, Range, HeaderMapExt, IfModifiedSince, IfNoneMatch, IfRange, LastModified, Range,
}; };
use hyper::header::{ use hyper::header::{
HeaderValue, ACCEPT, AUTHORIZATION, CONTENT_DISPOSITION, CONTENT_TYPE, ORIGIN, RANGE, HeaderValue, ACCEPT, AUTHORIZATION, CONTENT_DISPOSITION, CONTENT_LENGTH, CONTENT_RANGE,
WWW_AUTHENTICATE, CONTENT_TYPE, ORIGIN, RANGE, WWW_AUTHENTICATE,
}; };
use hyper::{Body, Method, StatusCode, Uri}; use hyper::{Body, Method, StatusCode, Uri};
use percent_encoding::percent_decode;
use serde::Serialize; use serde::Serialize;
use std::fs::Metadata; use std::fs::Metadata;
use std::io::SeekFrom;
use std::net::SocketAddr; use std::net::SocketAddr;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc; use std::sync::Arc;
use std::time::SystemTime; use std::time::SystemTime;
use tokio::fs::File; use tokio::fs::File;
use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWrite}; use tokio::io::{AsyncSeekExt, AsyncWrite};
use tokio::{fs, io}; use tokio::{fs, io};
use tokio_util::codec::{BytesCodec, FramedRead}; use tokio_util::io::StreamReader;
use tokio_util::io::{ReaderStream, StreamReader};
use uuid::Uuid; use uuid::Uuid;
pub type Request = hyper::Request<Body>; pub type Request = hyper::Request<Body>;
@@ -40,15 +40,22 @@ const INDEX_CSS: &str = include_str!("../assets/index.css");
const INDEX_JS: &str = include_str!("../assets/index.js"); const INDEX_JS: &str = include_str!("../assets/index.js");
const FAVICON_ICO: &[u8] = include_bytes!("../assets/favicon.ico"); const FAVICON_ICO: &[u8] = include_bytes!("../assets/favicon.ico");
const INDEX_NAME: &str = "index.html"; const INDEX_NAME: &str = "index.html";
const BUF_SIZE: usize = 1024 * 16; const BUF_SIZE: usize = 65536;
pub struct Server { pub struct Server {
args: Arc<Args>, args: Arc<Args>,
assets_prefix: String,
running: Arc<AtomicBool>,
} }
impl Server { impl Server {
pub fn new(args: Arc<Args>) -> Self { pub fn new(args: Arc<Args>, running: Arc<AtomicBool>) -> Self {
Self { args } let assets_prefix = format!("{}__dufs_v{}_", args.uri_prefix, env!("CARGO_PKG_VERSION"));
Self {
args,
running,
assets_prefix,
}
} }
pub async fn call( pub async fn call(
@@ -58,12 +65,15 @@ impl Server {
) -> Result<Response, hyper::Error> { ) -> Result<Response, hyper::Error> {
let method = req.method().clone(); let method = req.method().clone();
let uri = req.uri().clone(); let uri = req.uri().clone();
let cors = self.args.cors; let assets_prefix = self.assets_prefix.clone();
let enable_cors = self.args.enable_cors;
let mut res = match self.handle(req).await { let mut res = match self.handle(req).await {
Ok(res) => { Ok(res) => {
let status = res.status().as_u16(); let status = res.status().as_u16();
if !uri.path().starts_with(&assets_prefix) {
info!(r#"{} "{} {}" - {}"#, addr.ip(), method, uri, status,); info!(r#"{} "{} {}" - {}"#, addr.ip(), method, uri, status,);
}
res res
} }
Err(err) => { Err(err) => {
@@ -76,7 +86,7 @@ impl Server {
} }
}; };
if cors { if enable_cors {
add_cors(&mut res); add_cors(&mut res);
} }
Ok(res) Ok(res)
@@ -85,16 +95,31 @@ impl Server {
pub async fn handle(self: Arc<Self>, req: Request) -> BoxResult<Response> { pub async fn handle(self: Arc<Self>, req: Request) -> BoxResult<Response> {
let mut res = Response::default(); let mut res = Response::default();
if !self.auth_guard(&req, &mut res) {
return Ok(res);
}
let req_path = req.uri().path(); let req_path = req.uri().path();
let headers = req.headers(); let headers = req.headers();
let method = req.method().clone(); let method = req.method().clone();
if req_path == "/favicon.ico" && method == Method::GET { if method == Method::GET && self.handle_embed_assets(req_path, &mut res).await? {
self.handle_send_favicon(req.headers(), &mut res).await?; return Ok(res);
}
let authorization = headers.get(AUTHORIZATION);
let guard_type = self.args.auth.guard(
req_path,
&method,
authorization,
self.args.auth_method.clone(),
);
if guard_type.is_reject() {
self.auth_reject(&mut res);
return Ok(res);
}
let head_only = method == Method::HEAD;
if self.args.path_is_file {
self.handle_send_file(&self.args.path, headers, head_only, &mut res)
.await?;
return Ok(res); return Ok(res);
} }
@@ -105,6 +130,7 @@ impl Server {
return Ok(res); return Ok(res);
} }
}; };
let path = path.as_path(); let path = path.as_path();
let query = req.uri().query().unwrap_or_default(); let query = req.uri().query().unwrap_or_default();
@@ -116,8 +142,10 @@ impl Server {
let allow_upload = self.args.allow_upload; let allow_upload = self.args.allow_upload;
let allow_delete = self.args.allow_delete; let allow_delete = self.args.allow_delete;
let allow_search = self.args.allow_search;
let render_index = self.args.render_index; let render_index = self.args.render_index;
let render_spa = self.args.render_spa; let render_spa = self.args.render_spa;
let render_try_index = self.args.render_try_index;
if !self.args.allow_symlink && !is_miss && !self.is_root_contained(path).await { if !self.args.allow_symlink && !is_miss && !self.is_root_contained(path).await {
status_not_found(&mut res); status_not_found(&mut res);
@@ -126,15 +154,27 @@ impl Server {
match method { match method {
Method::GET | Method::HEAD => { Method::GET | Method::HEAD => {
let head_only = method == Method::HEAD;
if is_dir { if is_dir {
if render_index || render_spa { if render_try_index {
if query == "zip" {
self.handle_zip_dir(path, head_only, &mut res).await?;
} else if allow_search && query.starts_with("q=") {
let q = decode_uri(&query[2..]).unwrap_or_default();
self.handle_search_dir(path, &q, head_only, &mut res)
.await?;
} else {
self.handle_render_index(path, headers, head_only, &mut res)
.await?;
}
} else if render_index || render_spa {
self.handle_render_index(path, headers, head_only, &mut res) self.handle_render_index(path, headers, head_only, &mut res)
.await?; .await?;
} else if query == "zip" { } else if query == "zip" {
self.handle_zip_dir(path, head_only, &mut res).await?; self.handle_zip_dir(path, head_only, &mut res).await?;
} else if let Some(q) = query.strip_prefix("q=") { } else if allow_search && query.starts_with("q=") {
self.handle_query_dir(path, q, head_only, &mut res).await?; let q = decode_uri(&query[2..]).unwrap_or_default();
self.handle_search_dir(path, &q, head_only, &mut res)
.await?;
} else { } else {
self.handle_ls_dir(path, true, head_only, &mut res).await?; self.handle_ls_dir(path, true, head_only, &mut res).await?;
} }
@@ -199,7 +239,7 @@ impl Server {
} else if is_miss { } else if is_miss {
status_not_found(&mut res); status_not_found(&mut res);
} else { } else {
self.handle_copy(path, headers, &mut res).await? self.handle_copy(path, &req, &mut res).await?
} }
} }
"MOVE" => { "MOVE" => {
@@ -208,13 +248,14 @@ impl Server {
} else if is_miss { } else if is_miss {
status_not_found(&mut res); status_not_found(&mut res);
} else { } else {
self.handle_move(path, headers, &mut res).await? self.handle_move(path, &req, &mut res).await?
} }
} }
"LOCK" => { "LOCK" => {
// Fake lock // Fake lock
if is_file { if is_file {
self.handle_lock(req_path, &mut res).await?; let has_auth = authorization.is_some();
self.handle_lock(req_path, has_auth, &mut res).await?;
} else { } else {
status_not_found(&mut res); status_not_found(&mut res);
} }
@@ -293,33 +334,50 @@ impl Server {
self.send_index(path, paths, exist, head_only, res) self.send_index(path, paths, exist, head_only, res)
} }
async fn handle_query_dir( async fn handle_search_dir(
&self, &self,
path: &Path, path: &Path,
query: &str, search: &str,
head_only: bool, head_only: bool,
res: &mut Response, res: &mut Response,
) -> BoxResult<()> { ) -> BoxResult<()> {
let mut paths: Vec<PathItem> = vec![]; let mut paths: Vec<PathItem> = vec![];
let mut walkdir = WalkDir::new(path); let path_buf = path.to_path_buf();
while let Some(entry) = walkdir.next().await { let hidden = self.args.hidden.to_string();
if let Ok(entry) = entry { let running = self.running.clone();
if !entry let search = search.to_lowercase();
.file_name() let search_paths = tokio::task::spawn_blocking(move || {
.to_string_lossy() let mut it = WalkDir::new(&path_buf).into_iter();
.to_lowercase() let mut paths: Vec<PathBuf> = vec![];
.contains(&query.to_lowercase()) while let Some(Ok(entry)) = it.next() {
{ if !running.load(Ordering::SeqCst) {
break;
}
let entry_path = entry.path();
let base_name = get_file_name(entry_path);
let file_type = entry.file_type();
if is_hidden(&hidden, base_name) {
if file_type.is_dir() {
it.skip_current_dir();
}
continue; continue;
} }
if fs::symlink_metadata(entry.path()).await.is_err() { if !base_name.to_lowercase().contains(&search) {
continue; continue;
} }
if let Ok(Some(item)) = self.to_pathitem(entry.path(), path.to_path_buf()).await { if entry.path().symlink_metadata().is_err() {
continue;
}
paths.push(entry_path.to_path_buf());
}
paths
})
.await?;
for search_path in search_paths.into_iter() {
if let Ok(Some(item)) = self.to_pathitem(search_path, path.to_path_buf()).await {
paths.push(item); paths.push(item);
} }
} }
}
self.send_index(path, paths, true, head_only, res) self.send_index(path, paths, true, head_only, res)
} }
@@ -330,10 +388,7 @@ impl Server {
res: &mut Response, res: &mut Response,
) -> BoxResult<()> { ) -> BoxResult<()> {
let (mut writer, reader) = tokio::io::duplex(BUF_SIZE); let (mut writer, reader) = tokio::io::duplex(BUF_SIZE);
let filename = path let filename = try_get_file_name(path)?;
.file_name()
.and_then(|v| v.to_str())
.ok_or_else(|| format!("Failed to get name of `{}`", path.display()))?;
res.headers_mut().insert( res.headers_mut().insert(
CONTENT_DISPOSITION, CONTENT_DISPOSITION,
HeaderValue::from_str(&format!( HeaderValue::from_str(&format!(
@@ -348,13 +403,15 @@ impl Server {
return Ok(()); return Ok(());
} }
let path = path.to_owned(); let path = path.to_owned();
let hidden = self.args.hidden.clone();
let running = self.running.clone();
tokio::spawn(async move { tokio::spawn(async move {
if let Err(e) = zip_dir(&mut writer, &path).await { if let Err(e) = zip_dir(&mut writer, &path, &hidden, running).await {
error!("Failed to zip {}, {}", path.display(), e); error!("Failed to zip {}, {}", path.display(), e);
} }
}); });
let stream = ReaderStream::new(reader); let reader = Streamer::new(reader, BUF_SIZE);
*res.body_mut() = Body::wrap_stream(stream); *res.body_mut() = Body::wrap_stream(reader.into_stream());
Ok(()) Ok(())
} }
@@ -365,15 +422,17 @@ impl Server {
head_only: bool, head_only: bool,
res: &mut Response, res: &mut Response,
) -> BoxResult<()> { ) -> BoxResult<()> {
let path = path.join(INDEX_NAME); let index_path = path.join(INDEX_NAME);
if fs::metadata(&path) if fs::metadata(&index_path)
.await .await
.ok() .ok()
.map(|v| v.is_file()) .map(|v| v.is_file())
.unwrap_or_default() .unwrap_or_default()
{ {
self.handle_send_file(&path, headers, head_only, res) self.handle_send_file(&index_path, headers, head_only, res)
.await?; .await?;
} else if self.args.render_try_index {
self.handle_ls_dir(path, true, head_only, res).await?;
} else { } else {
status_not_found(res) status_not_found(res)
} }
@@ -397,23 +456,38 @@ impl Server {
Ok(()) Ok(())
} }
async fn handle_send_favicon( async fn handle_embed_assets(&self, req_path: &str, res: &mut Response) -> BoxResult<bool> {
&self, if let Some(name) = req_path.strip_prefix(&self.assets_prefix) {
headers: &HeaderMap<HeaderValue>, match name {
res: &mut Response, "index.js" => {
) -> BoxResult<()> { *res.body_mut() = Body::from(INDEX_JS);
let path = self.args.path.join("favicon.ico"); res.headers_mut().insert(
let meta = fs::metadata(&path).await.ok(); "content-type",
let is_file = meta.map(|v| v.is_file()).unwrap_or_default(); HeaderValue::from_static("application/javascript"),
if is_file { );
self.handle_send_file(path.as_path(), headers, false, res) }
.await?; "index.css" => {
} else { *res.body_mut() = Body::from(INDEX_CSS);
res.headers_mut()
.insert("content-type", HeaderValue::from_static("text/css"));
}
"favicon.ico" => {
*res.body_mut() = Body::from(FAVICON_ICO); *res.body_mut() = Body::from(FAVICON_ICO);
res.headers_mut() res.headers_mut()
.insert("content-type", HeaderValue::from_static("image/x-icon")); .insert("content-type", HeaderValue::from_static("image/x-icon"));
} }
Ok(()) _ => {
return Ok(false);
}
}
res.headers_mut().insert(
"cache-control",
HeaderValue::from_static("max-age=2592000, public"),
);
Ok(true)
} else {
Ok(false)
}
} }
async fn handle_send_file( async fn handle_send_file(
@@ -425,7 +499,7 @@ impl Server {
) -> BoxResult<()> { ) -> BoxResult<()> {
let (file, meta) = tokio::join!(fs::File::open(path), fs::metadata(path),); let (file, meta) = tokio::join!(fs::File::open(path), fs::metadata(path),);
let (mut file, meta) = (file?, meta?); let (mut file, meta) = (file?, meta?);
let mut maybe_range = true; let mut use_range = true;
if let Some((etag, last_modified)) = extract_cache_headers(&meta) { if let Some((etag, last_modified)) = extract_cache_headers(&meta) {
let cached = { let cached = {
if let Some(if_none_match) = headers.typed_get::<IfNoneMatch>() { if let Some(if_none_match) = headers.typed_get::<IfNoneMatch>() {
@@ -436,55 +510,84 @@ impl Server {
false false
} }
}; };
res.headers_mut().typed_insert(last_modified);
res.headers_mut().typed_insert(etag.clone());
if cached { if cached {
*res.status_mut() = StatusCode::NOT_MODIFIED; *res.status_mut() = StatusCode::NOT_MODIFIED;
return Ok(()); return Ok(());
} }
res.headers_mut().typed_insert(last_modified);
res.headers_mut().typed_insert(etag.clone());
if headers.typed_get::<Range>().is_some() { if headers.typed_get::<Range>().is_some() {
maybe_range = headers use_range = headers
.typed_get::<IfRange>() .typed_get::<IfRange>()
.map(|if_range| !if_range.is_modified(Some(&etag), Some(&last_modified))) .map(|if_range| !if_range.is_modified(Some(&etag), Some(&last_modified)))
// Always be fresh if there is no validators // Always be fresh if there is no validators
.unwrap_or(true); .unwrap_or(true);
} else { } else {
maybe_range = false; use_range = false;
} }
} }
let file_range = if maybe_range {
if let Some(content_range) = headers let range = if use_range {
.typed_get::<Range>() parse_range(headers)
.and_then(|range| to_content_range(&range, meta.len()))
{
res.headers_mut().typed_insert(content_range.clone());
*res.status_mut() = StatusCode::PARTIAL_CONTENT;
content_range.bytes_range()
} else {
None
}
} else { } else {
None None
}; };
if let Some(mime) = mime_guess::from_path(&path).first() { if let Some(mime) = mime_guess::from_path(&path).first() {
res.headers_mut().typed_insert(ContentType::from(mime)); res.headers_mut().typed_insert(ContentType::from(mime));
} else {
res.headers_mut().insert(
CONTENT_TYPE,
HeaderValue::from_static("application/octet-stream"),
);
} }
let filename = try_get_file_name(path)?;
res.headers_mut().insert(
CONTENT_DISPOSITION,
HeaderValue::from_str(&format!("inline; filename=\"{}\"", encode_uri(filename),))
.unwrap(),
);
res.headers_mut().typed_insert(AcceptRanges::bytes()); res.headers_mut().typed_insert(AcceptRanges::bytes());
let size = meta.len();
if let Some(range) = range {
if range
.end
.map_or_else(|| range.start < size, |v| v >= range.start)
&& file.seek(SeekFrom::Start(range.start)).await.is_ok()
{
let end = range.end.unwrap_or(size - 1).min(size - 1);
let part_size = end - range.start + 1;
let reader = Streamer::new(file, BUF_SIZE);
*res.status_mut() = StatusCode::PARTIAL_CONTENT;
let content_range = format!("bytes {}-{}/{}", range.start, end, size);
res.headers_mut() res.headers_mut()
.typed_insert(ContentLength(meta.len() as u64)); .insert(CONTENT_RANGE, content_range.parse().unwrap());
res.headers_mut()
.insert(CONTENT_LENGTH, format!("{}", part_size).parse().unwrap());
if head_only { if head_only {
return Ok(()); return Ok(());
} }
*res.body_mut() = Body::wrap_stream(reader.into_stream_sized(part_size));
let body = if let Some((begin, end)) = file_range {
file.seek(io::SeekFrom::Start(begin)).await?;
let stream = FramedRead::new(file.take(end - begin + 1), BytesCodec::new());
Body::wrap_stream(stream)
} else { } else {
let stream = FramedRead::new(file, BytesCodec::new()); *res.status_mut() = StatusCode::RANGE_NOT_SATISFIABLE;
Body::wrap_stream(stream) res.headers_mut()
}; .insert(CONTENT_RANGE, format!("bytes */{}", size).parse().unwrap());
*res.body_mut() = body; }
} else {
res.headers_mut()
.insert(CONTENT_LENGTH, format!("{}", size).parse().unwrap());
if head_only {
return Ok(());
}
let reader = Streamer::new(file, BUF_SIZE);
*res.body_mut() = Body::wrap_stream(reader.into_stream());
}
Ok(()) Ok(())
} }
@@ -540,16 +643,10 @@ impl Server {
Ok(()) Ok(())
} }
async fn handle_copy( async fn handle_copy(&self, path: &Path, req: &Request, res: &mut Response) -> BoxResult<()> {
&self, let dest = match self.extract_dest(req, res) {
path: &Path,
headers: &HeaderMap<HeaderValue>,
res: &mut Response,
) -> BoxResult<()> {
let dest = match self.extract_dest(headers) {
Some(dest) => dest, Some(dest) => dest,
None => { None => {
*res.status_mut() = StatusCode::BAD_REQUEST;
return Ok(()); return Ok(());
} }
}; };
@@ -568,16 +665,10 @@ impl Server {
Ok(()) Ok(())
} }
async fn handle_move( async fn handle_move(&self, path: &Path, req: &Request, res: &mut Response) -> BoxResult<()> {
&self, let dest = match self.extract_dest(req, res) {
path: &Path,
headers: &HeaderMap<HeaderValue>,
res: &mut Response,
) -> BoxResult<()> {
let dest = match self.extract_dest(headers) {
Some(dest) => dest, Some(dest) => dest,
None => { None => {
*res.status_mut() = StatusCode::BAD_REQUEST;
return Ok(()); return Ok(());
} }
}; };
@@ -590,11 +681,11 @@ impl Server {
Ok(()) Ok(())
} }
async fn handle_lock(&self, req_path: &str, res: &mut Response) -> BoxResult<()> { async fn handle_lock(&self, req_path: &str, auth: bool, res: &mut Response) -> BoxResult<()> {
let token = if self.args.auth.is_none() { let token = if auth {
Utc::now().timestamp().to_string()
} else {
format!("opaquelocktoken:{}", Uuid::new_v4()) format!("opaquelocktoken:{}", Uuid::new_v4())
} else {
Utc::now().timestamp().to_string()
}; };
res.headers_mut().insert( res.headers_mut().insert(
@@ -640,33 +731,32 @@ impl Server {
res: &mut Response, res: &mut Response,
) -> BoxResult<()> { ) -> BoxResult<()> {
paths.sort_unstable(); paths.sort_unstable();
let rel_path = match self.args.path.parent() { let href = format!("/{}", normalize_path(path.strip_prefix(&self.args.path)?));
Some(p) => path.strip_prefix(p).unwrap(),
None => path,
};
let data = IndexData { let data = IndexData {
breadcrumb: normalize_path(rel_path), href,
uri_prefix: self.args.uri_prefix.clone(),
paths, paths,
allow_upload: self.args.allow_upload, allow_upload: self.args.allow_upload,
allow_delete: self.args.allow_delete, allow_delete: self.args.allow_delete,
allow_search: self.args.allow_search,
dir_exists: exist, dir_exists: exist,
}; };
let data = serde_json::to_string(&data).unwrap(); let data = serde_json::to_string(&data).unwrap();
let asset_js = format!("{}index.js", self.assets_prefix);
let asset_css = format!("{}index.css", self.assets_prefix);
let asset_ico = format!("{}favicon.ico", self.assets_prefix);
let output = INDEX_HTML.replace( let output = INDEX_HTML.replace(
"__SLOT__", "__SLOT__",
&format!( &format!(
r#" r#"
<title>Files in {}/ - Duf</title> <link rel="icon" type="image/x-icon" href="{}">
<style>{}</style> <link rel="stylesheet" href="{}">
<script> <script>
const DATA = DATA = {}
{} </script>
{}</script> <script src="{}"></script>
"#, "#,
rel_path.display(), asset_ico, asset_css, data, asset_js
INDEX_CSS,
data,
INDEX_JS
), ),
); );
res.headers_mut() res.headers_mut()
@@ -680,34 +770,13 @@ const DATA =
Ok(()) Ok(())
} }
fn auth_guard(&self, req: &Request, res: &mut Response) -> bool { fn auth_reject(&self, res: &mut Response) {
let method = req.method(); let value = self.args.auth_method.www_auth(false);
let pass = {
match &self.args.auth {
None => true,
Some((user, pass)) => match req.headers().get(AUTHORIZATION) {
Some(value) => {
valid_digest(value, method.as_str(), user.as_str(), pass.as_str()).is_some()
}
None => {
self.args.no_auth_access
&& (method == Method::GET
|| method == Method::OPTIONS
|| method == Method::HEAD
|| method.as_str() == "PROPFIND")
}
},
}
};
if !pass {
let value = generate_www_auth(false);
set_webdav_headers(res); set_webdav_headers(res);
*res.status_mut() = StatusCode::UNAUTHORIZED;
res.headers_mut().typed_insert(Connection::close()); res.headers_mut().typed_insert(Connection::close());
res.headers_mut() res.headers_mut()
.insert(WWW_AUTHENTICATE, value.parse().unwrap()); .insert(WWW_AUTHENTICATE, value.parse().unwrap());
} *res.status_mut() = StatusCode::UNAUTHORIZED;
pass
} }
async fn is_root_contained(&self, path: &Path) -> bool { async fn is_root_contained(&self, path: &Path) -> bool {
@@ -718,14 +787,47 @@ const DATA =
.unwrap_or_default() .unwrap_or_default()
} }
fn extract_dest(&self, headers: &HeaderMap<HeaderValue>) -> Option<PathBuf> { fn extract_dest(&self, req: &Request, res: &mut Response) -> Option<PathBuf> {
let headers = req.headers();
let dest_path = match self.extract_destination_header(headers) {
Some(dest) => dest,
None => {
*res.status_mut() = StatusCode::BAD_REQUEST;
return None;
}
};
let authorization = headers.get(AUTHORIZATION);
let guard_type = self.args.auth.guard(
&dest_path,
req.method(),
authorization,
self.args.auth_method.clone(),
);
if guard_type.is_reject() {
*res.status_mut() = StatusCode::FORBIDDEN;
*res.body_mut() = Body::from("Forbidden");
return None;
}
let dest = match self.extract_path(&dest_path) {
Some(dest) => dest,
None => {
*res.status_mut() = StatusCode::BAD_REQUEST;
return None;
}
};
Some(dest)
}
fn extract_destination_header(&self, headers: &HeaderMap<HeaderValue>) -> Option<String> {
let dest = headers.get("Destination")?.to_str().ok()?; let dest = headers.get("Destination")?.to_str().ok()?;
let uri: Uri = dest.parse().ok()?; let uri: Uri = dest.parse().ok()?;
self.extract_path(uri.path()) Some(uri.path().to_string())
} }
fn extract_path(&self, path: &str) -> Option<PathBuf> { fn extract_path(&self, path: &str) -> Option<PathBuf> {
let decoded_path = percent_decode(path[1..].as_bytes()).decode_utf8().ok()?; let decoded_path = decode_uri(&path[1..])?;
let slashes_switched = if cfg!(windows) { let slashes_switched = if cfg!(windows) {
decoded_path.replace('/', "\\") decoded_path.replace('/', "\\")
} else { } else {
@@ -752,6 +854,10 @@ const DATA =
let mut rd = fs::read_dir(entry_path).await?; let mut rd = fs::read_dir(entry_path).await?;
while let Ok(Some(entry)) = rd.next_entry().await { while let Ok(Some(entry)) = rd.next_entry().await {
let entry_path = entry.path(); let entry_path = entry.path();
let base_name = get_file_name(&entry_path);
if is_hidden(&self.args.hidden, base_name) {
continue;
}
if let Ok(Some(item)) = self.to_pathitem(entry_path.as_path(), base_path).await { if let Ok(Some(item)) = self.to_pathitem(entry_path.as_path(), base_path).await {
paths.push(item); paths.push(item);
} }
@@ -796,10 +902,12 @@ const DATA =
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
struct IndexData { struct IndexData {
breadcrumb: String, href: String,
uri_prefix: String,
paths: Vec<PathItem>, paths: Vec<PathItem>,
allow_upload: bool, allow_upload: bool,
allow_delete: bool, allow_delete: bool,
allow_search: bool,
dir_exists: bool, dir_exists: bool,
} }
@@ -858,11 +966,8 @@ impl PathItem {
), ),
} }
} }
fn base_name(&self) -> &str { pub fn base_name(&self) -> &str {
Path::new(&self.name) self.name.split('/').last().unwrap_or_default()
.file_name()
.and_then(|v| v.to_str())
.unwrap_or_default()
} }
} }
@@ -926,30 +1031,55 @@ fn res_multistatus(res: &mut Response, content: &str) {
)); ));
} }
async fn zip_dir<W: AsyncWrite + Unpin>(writer: &mut W, dir: &Path) -> BoxResult<()> { async fn zip_dir<W: AsyncWrite + Unpin>(
writer: &mut W,
dir: &Path,
hidden: &str,
running: Arc<AtomicBool>,
) -> BoxResult<()> {
let mut writer = ZipFileWriter::new(writer); let mut writer = ZipFileWriter::new(writer);
let mut walkdir = WalkDir::new(dir); let hidden = Arc::new(hidden.to_string());
while let Some(entry) = walkdir.next().await { let hidden = hidden.to_string();
if let Ok(entry) = entry { let dir_path_buf = dir.to_path_buf();
let zip_paths = tokio::task::spawn_blocking(move || {
let mut it = WalkDir::new(&dir_path_buf).into_iter();
let mut paths: Vec<PathBuf> = vec![];
while let Some(Ok(entry)) = it.next() {
if !running.load(Ordering::SeqCst) {
break;
}
let entry_path = entry.path(); let entry_path = entry.path();
let meta = match fs::symlink_metadata(entry.path()).await { let base_name = get_file_name(entry_path);
Ok(meta) => meta, let file_type = entry.file_type();
Err(_) => continue, if is_hidden(&hidden, base_name) {
}; if file_type.is_dir() {
if !meta.is_file() { it.skip_current_dir();
}
continue; continue;
} }
let filename = match entry_path.strip_prefix(dir).ok().and_then(|v| v.to_str()) { if entry.path().symlink_metadata().is_err() {
continue;
}
if !file_type.is_file() {
continue;
}
paths.push(entry_path.to_path_buf());
}
paths
})
.await?;
for zip_path in zip_paths.into_iter() {
let filename = match zip_path.strip_prefix(dir).ok().and_then(|v| v.to_str()) {
Some(v) => v, Some(v) => v,
None => continue, None => continue,
}; };
let entry_options = EntryOptions::new(filename.to_owned(), Compression::Deflate); let entry_options =
let mut file = File::open(&entry_path).await?; EntryOptions::new(filename.to_owned(), Compression::Deflate).unix_permissions(0o644);
let mut file = File::open(&zip_path).await?;
let mut file_writer = writer.write_entry_stream(entry_options).await?; let mut file_writer = writer.write_entry_stream(entry_options).await?;
io::copy(&mut file, &mut file_writer).await?; io::copy(&mut file, &mut file_writer).await?;
file_writer.close().await?; file_writer.close().await?;
} }
}
writer.close().await?; writer.close().await?;
Ok(()) Ok(())
} }
@@ -965,51 +1095,54 @@ fn extract_cache_headers(meta: &Metadata) -> Option<(ETag, LastModified)> {
Some((etag, last_modified)) Some((etag, last_modified))
} }
fn to_content_range(range: &Range, complete_length: u64) -> Option<ContentRange> { #[derive(Debug)]
use core::ops::Bound::{Included, Unbounded}; struct RangeValue {
let mut iter = range.iter(); start: u64,
let bounds = iter.next(); end: Option<u64>,
if iter.next().is_some() {
// Found multiple byte-range-spec. Drop.
return None;
}
bounds.and_then(|b| match b {
(Included(start), Included(end)) if start <= end && start < complete_length => {
ContentRange::bytes(
start..=end.min(complete_length.saturating_sub(1)),
complete_length,
)
.ok()
}
(Included(start), Unbounded) if start < complete_length => {
ContentRange::bytes(start.., complete_length).ok()
}
(Unbounded, Included(end)) if end > 0 => {
ContentRange::bytes(complete_length.saturating_sub(end).., complete_length).ok()
}
_ => None,
})
} }
fn encode_uri(v: &str) -> String { fn parse_range(headers: &HeaderMap<HeaderValue>) -> Option<RangeValue> {
let parts: Vec<_> = v.split('/').map(urlencoding::encode).collect(); let range_hdr = headers.get(RANGE)?;
parts.join("/") let hdr = range_hdr.to_str().ok()?;
let mut sp = hdr.splitn(2, '=');
let units = sp.next().unwrap();
if units == "bytes" {
let range = sp.next()?;
let mut sp_range = range.splitn(2, '-');
let start: u64 = sp_range.next().unwrap().parse().ok()?;
let end: Option<u64> = if let Some(end) = sp_range.next() {
if end.is_empty() {
None
} else {
Some(end.parse().ok()?)
}
} else {
None
};
Some(RangeValue { start, end })
} else {
None
}
} }
fn status_forbid(res: &mut Response) { fn status_forbid(res: &mut Response) {
*res.status_mut() = StatusCode::FORBIDDEN; *res.status_mut() = StatusCode::FORBIDDEN;
*res.body_mut() = Body::from("Forbidden");
} }
fn status_not_found(res: &mut Response) { fn status_not_found(res: &mut Response) {
*res.status_mut() = StatusCode::NOT_FOUND; *res.status_mut() = StatusCode::NOT_FOUND;
*res.body_mut() = Body::from("Not Found");
} }
fn status_no_content(res: &mut Response) { fn status_no_content(res: &mut Response) {
*res.status_mut() = StatusCode::NO_CONTENT; *res.status_mut() = StatusCode::NO_CONTENT;
} }
fn is_hidden(hidden: &str, file_name: &str) -> bool {
hidden.contains(&format!(",{},", file_name))
}
fn set_webdav_headers(res: &mut Response) { fn set_webdav_headers(res: &mut Response) {
res.headers_mut().insert( res.headers_mut().insert(
"Allow", "Allow",

68
src/streamer.rs Normal file
View File

@@ -0,0 +1,68 @@
use async_stream::stream;
use futures::{Stream, StreamExt};
use std::io::Error;
use std::pin::Pin;
use tokio::io::{AsyncRead, AsyncReadExt};
pub struct Streamer<R>
where
R: AsyncRead + Unpin + Send + 'static,
{
reader: R,
buf_size: usize,
}
impl<R> Streamer<R>
where
R: AsyncRead + Unpin + Send + 'static,
{
#[inline]
pub fn new(reader: R, buf_size: usize) -> Self {
Self { reader, buf_size }
}
pub fn into_stream(
mut self,
) -> Pin<Box<impl ?Sized + Stream<Item = Result<Vec<u8>, Error>> + 'static>> {
let stream = stream! {
loop {
let mut buf = vec![0; self.buf_size];
let r = self.reader.read(&mut buf).await?;
if r == 0 {
break
}
buf.truncate(r);
yield Ok(buf);
}
};
stream.boxed()
}
// allow truncation as truncated remaining is always less than buf_size: usize
pub fn into_stream_sized(
mut self,
max_length: u64,
) -> Pin<Box<impl ?Sized + Stream<Item = Result<Vec<u8>, Error>> + 'static>> {
let stream = stream! {
let mut remaining = max_length;
loop {
if remaining == 0 {
break;
}
let bs = if remaining >= self.buf_size as u64 {
self.buf_size
} else {
remaining as usize
};
let mut buf = vec![0; bs];
let r = self.reader.read(&mut buf).await?;
if r == 0 {
break;
} else {
buf.truncate(r);
yield Ok(buf);
}
remaining -= r as u64;
}
};
stream.boxed()
}
}

25
src/utils.rs Normal file
View File

@@ -0,0 +1,25 @@
use crate::BoxResult;
use std::{borrow::Cow, path::Path};
pub fn encode_uri(v: &str) -> String {
let parts: Vec<_> = v.split('/').map(urlencoding::encode).collect();
parts.join("/")
}
pub fn decode_uri(v: &str) -> Option<Cow<str>> {
percent_encoding::percent_decode(v.as_bytes())
.decode_utf8()
.ok()
}
pub fn get_file_name(path: &Path) -> &str {
path.file_name()
.and_then(|v| v.to_str())
.unwrap_or_default()
}
pub fn try_get_file_name(path: &Path) -> BoxResult<&str> {
path.file_name()
.and_then(|v| v.to_str())
.ok_or_else(|| format!("Failed to get file name of `{}`", path.display()).into())
}

View File

@@ -59,3 +59,15 @@ fn allow_upload_delete_can_override(#[with(&["-A"])] server: TestServer) -> Resu
assert_eq!(resp.status(), 201); assert_eq!(resp.status(), 201);
Ok(()) Ok(())
} }
#[rstest]
fn allow_search(#[with(&["--allow-search"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "test.html"))?;
assert_eq!(resp.status(), 200);
let paths = utils::retrive_index_paths(&resp.text()?);
assert!(!paths.is_empty());
for p in paths {
assert!(p.contains("test.html"));
}
Ok(())
}

View File

@@ -1,13 +1,18 @@
//! Run file server with different args
mod fixtures; mod fixtures;
mod utils; mod utils;
use fixtures::{server, Error, TestServer}; use assert_cmd::prelude::*;
use assert_fs::fixture::TempDir;
use fixtures::{port, server, tmpdir, wait_for_port, Error, TestServer};
use rstest::rstest; use rstest::rstest;
use std::process::{Command, Stdio};
#[rstest] #[rstest]
fn path_prefix_index(#[with(&["--path-prefix", "xyz"])] server: TestServer) -> Result<(), Error> { fn path_prefix_index(#[with(&["--path-prefix", "xyz"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}{}", server.url(), "xyz"))?; let resp = reqwest::blocking::get(format!("{}{}", server.url(), "xyz"))?;
assert_index_resp!(resp); assert_resp_paths!(resp);
Ok(()) Ok(())
} }
@@ -28,3 +33,22 @@ fn path_prefix_propfind(
assert!(text.contains("<D:href>/xyz/</D:href>")); assert!(text.contains("<D:href>/xyz/</D:href>"));
Ok(()) Ok(())
} }
#[rstest]
#[case("index.html")]
fn serve_single_file(tmpdir: TempDir, port: u16, #[case] file: &str) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")?
.arg(tmpdir.path().join(file))
.arg("-p")
.arg(port.to_string())
.stdout(Stdio::piped())
.spawn()?;
wait_for_port(port);
let resp = reqwest::blocking::get(format!("http://localhost:{}/index.html", port))?;
assert_eq!(resp.text()?, "This is index.html");
child.kill()?;
Ok(())
}

93
tests/assets.rs Normal file
View File

@@ -0,0 +1,93 @@
mod fixtures;
mod utils;
use fixtures::{server, Error, TestServer};
use rstest::rstest;
#[rstest]
fn assets(server: TestServer) -> Result<(), Error> {
let ver = env!("CARGO_PKG_VERSION");
let resp = reqwest::blocking::get(server.url())?;
let index_js = format!("/__dufs_v{}_index.js", ver);
let index_css = format!("/__dufs_v{}_index.css", ver);
let favicon_ico = format!("/__dufs_v{}_favicon.ico", ver);
let text = resp.text()?;
assert!(text.contains(&format!(r#"href="{}""#, index_css)));
assert!(text.contains(&format!(r#"href="{}""#, favicon_ico)));
assert!(text.contains(&format!(r#"src="{}""#, index_js)));
Ok(())
}
#[rstest]
fn asset_js(server: TestServer) -> Result<(), Error> {
let url = format!(
"{}__dufs_v{}_index.js",
server.url(),
env!("CARGO_PKG_VERSION")
);
let resp = reqwest::blocking::get(url)?;
assert_eq!(resp.status(), 200);
assert_eq!(
resp.headers().get("content-type").unwrap(),
"application/javascript"
);
Ok(())
}
#[rstest]
fn asset_css(server: TestServer) -> Result<(), Error> {
let url = format!(
"{}__dufs_v{}_index.css",
server.url(),
env!("CARGO_PKG_VERSION")
);
let resp = reqwest::blocking::get(url)?;
assert_eq!(resp.status(), 200);
assert_eq!(resp.headers().get("content-type").unwrap(), "text/css");
Ok(())
}
#[rstest]
fn asset_ico(server: TestServer) -> Result<(), Error> {
let url = format!(
"{}__dufs_v{}_favicon.ico",
server.url(),
env!("CARGO_PKG_VERSION")
);
let resp = reqwest::blocking::get(url)?;
assert_eq!(resp.status(), 200);
assert_eq!(resp.headers().get("content-type").unwrap(), "image/x-icon");
Ok(())
}
#[rstest]
fn assets_with_prefix(#[with(&["--path-prefix", "xyz"])] server: TestServer) -> Result<(), Error> {
let ver = env!("CARGO_PKG_VERSION");
let resp = reqwest::blocking::get(format!("{}xyz/", server.url()))?;
let index_js = format!("/xyz/__dufs_v{}_index.js", ver);
let index_css = format!("/xyz/__dufs_v{}_index.css", ver);
let favicon_ico = format!("/xyz/__dufs_v{}_favicon.ico", ver);
let text = resp.text()?;
assert!(text.contains(&format!(r#"href="{}""#, index_css)));
assert!(text.contains(&format!(r#"href="{}""#, favicon_ico)));
assert!(text.contains(&format!(r#"src="{}""#, index_js)));
Ok(())
}
#[rstest]
fn asset_js_with_prefix(
#[with(&["--path-prefix", "xyz"])] server: TestServer,
) -> Result<(), Error> {
let url = format!(
"{}xyz/__dufs_v{}_index.js",
server.url(),
env!("CARGO_PKG_VERSION")
);
let resp = reqwest::blocking::get(url)?;
assert_eq!(resp.status(), 200);
assert_eq!(
resp.headers().get("content-type").unwrap(),
"application/javascript"
);
Ok(())
}

View File

@@ -6,7 +6,7 @@ use fixtures::{server, Error, TestServer};
use rstest::rstest; use rstest::rstest;
#[rstest] #[rstest]
fn no_auth(#[with(&["--auth", "user:pass", "-A"])] server: TestServer) -> Result<(), Error> { fn no_auth(#[with(&["--auth", "/@user:pass", "-A"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
assert!(resp.headers().contains_key("www-authenticate")); assert!(resp.headers().contains_key("www-authenticate"));
@@ -17,7 +17,7 @@ fn no_auth(#[with(&["--auth", "user:pass", "-A"])] server: TestServer) -> Result
} }
#[rstest] #[rstest]
fn auth(#[with(&["--auth", "user:pass", "-A"])] server: TestServer) -> Result<(), Error> { fn auth(#[with(&["--auth", "/@user:pass", "-A"])] server: TestServer) -> Result<(), Error> {
let url = format!("{}file1", server.url()); let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?; let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 401); assert_eq!(resp.status(), 401);
@@ -29,10 +29,95 @@ fn auth(#[with(&["--auth", "user:pass", "-A"])] server: TestServer) -> Result<()
} }
#[rstest] #[rstest]
fn auth_skip_access( fn auth_skip(#[with(&["--auth", "/@user:pass@*"])] server: TestServer) -> Result<(), Error> {
#[with(&["--auth", "user:pass", "--no-auth-access"])] server: TestServer,
) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
Ok(()) Ok(())
} }
#[rstest]
fn auth_readonly(
#[with(&["--auth", "/@user:pass@user2:pass2", "-A"])] server: TestServer,
) -> Result<(), Error> {
let url = format!("{}index.html", server.url());
let resp = fetch!(b"GET", &url).send()?;
assert_eq!(resp.status(), 401);
let resp = fetch!(b"GET", &url).send_with_digest_auth("user2", "pass2")?;
assert_eq!(resp.status(), 200);
let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url)
.body(b"abc".to_vec())
.send_with_digest_auth("user2", "pass2")?;
assert_eq!(resp.status(), 401);
Ok(())
}
#[rstest]
fn auth_nest(
#[with(&["--auth", "/@user:pass@user2:pass2", "--auth", "/dira@user3:pass3", "-A"])]
server: TestServer,
) -> Result<(), Error> {
let url = format!("{}dira/file1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 401);
let resp = fetch!(b"PUT", &url)
.body(b"abc".to_vec())
.send_with_digest_auth("user3", "pass3")?;
assert_eq!(resp.status(), 201);
let resp = fetch!(b"PUT", &url)
.body(b"abc".to_vec())
.send_with_digest_auth("user", "pass")?;
assert_eq!(resp.status(), 201);
Ok(())
}
#[rstest]
fn auth_nest_share(
#[with(&["--auth", "/@user:pass@*", "--auth", "/dira@user3:pass3", "-A"])] server: TestServer,
) -> Result<(), Error> {
let url = format!("{}index.html", server.url());
let resp = fetch!(b"GET", &url).send()?;
assert_eq!(resp.status(), 200);
Ok(())
}
#[rstest]
fn auth_basic(
#[with(&["--auth", "/@user:pass", "--auth-method", "basic", "-A"])] server: TestServer,
) -> Result<(), Error> {
let url = format!("{}file1", server.url());
let resp = fetch!(b"PUT", &url).body(b"abc".to_vec()).send()?;
assert_eq!(resp.status(), 401);
let resp = fetch!(b"PUT", &url)
.body(b"abc".to_vec())
.basic_auth("user", Some("pass"))
.send()?;
assert_eq!(resp.status(), 201);
Ok(())
}
#[rstest]
fn auth_webdav_move(
#[with(&["--auth", "/@user:pass@*", "--auth", "/dira@user3:pass3", "-A"])] server: TestServer,
) -> Result<(), Error> {
let origin_url = format!("{}dira/test.html", server.url());
let new_url = format!("{}test2.html", server.url());
let resp = fetch!(b"MOVE", &origin_url)
.header("Destination", &new_url)
.send_with_digest_auth("user3", "pass3")?;
assert_eq!(resp.status(), 403);
Ok(())
}
#[rstest]
fn auth_webdav_copy(
#[with(&["--auth", "/@user:pass@*", "--auth", "/dira@user3:pass3", "-A"])] server: TestServer,
) -> Result<(), Error> {
let origin_url = format!("{}dira/test.html", server.url());
let new_url = format!("{}test2.html", server.url());
let resp = fetch!(b"COPY", &origin_url)
.header("Destination", &new_url)
.send_with_digest_auth("user3", "pass3")?;
assert_eq!(resp.status(), 403);
Ok(())
}

View File

@@ -1,19 +1,18 @@
mod fixtures; mod fixtures;
use fixtures::{port, server, tmpdir, Error, TestServer}; use fixtures::{port, server, tmpdir, wait_for_port, Error, TestServer};
use assert_cmd::prelude::*; use assert_cmd::prelude::*;
use assert_fs::fixture::TempDir; use assert_fs::fixture::TempDir;
use regex::Regex; use regex::Regex;
use rstest::rstest; use rstest::rstest;
use std::io::{BufRead, BufReader}; use std::io::Read;
use std::process::{Command, Stdio}; use std::process::{Command, Stdio};
#[rstest] #[rstest]
#[case(&["-b", "20.205.243.166"])] #[case(&["-b", "20.205.243.166"])]
fn bind_fails(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> { fn bind_fails(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> {
Command::cargo_bin("duf")? Command::cargo_bin("dufs")?
.env("RUST_LOG", "false")
.arg(tmpdir.path()) .arg(tmpdir.path())
.arg("-p") .arg("-p")
.arg(port.to_string()) .arg(port.to_string())
@@ -50,8 +49,7 @@ fn bind_ipv4_ipv6(
#[case(&[] as &[&str])] #[case(&[] as &[&str])]
#[case(&["--path-prefix", "/prefix"])] #[case(&["--path-prefix", "/prefix"])]
fn validate_printed_urls(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> { fn validate_printed_urls(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> {
let mut child = Command::cargo_bin("duf")? let mut child = Command::cargo_bin("dufs")?
.env("RUST_LOG", "false")
.arg(tmpdir.path()) .arg(tmpdir.path())
.arg("-p") .arg("-p")
.arg(port.to_string()) .arg(port.to_string())
@@ -59,22 +57,25 @@ fn validate_printed_urls(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> R
.stdout(Stdio::piped()) .stdout(Stdio::piped())
.spawn()?; .spawn()?;
// WARN assumes urls list is terminated by an empty line wait_for_port(port);
let url_lines = BufReader::new(child.stdout.take().unwrap())
let stdout = child.stdout.as_mut().expect("Failed to get stdout");
let mut buf = [0; 1000];
let buf_len = stdout.read(&mut buf)?;
let output = std::str::from_utf8(&buf[0..buf_len])?;
let url_lines = output
.lines() .lines()
.map(|line| line.expect("Error reading stdout"))
.take_while(|line| !line.is_empty()) /* non-empty lines */ .take_while(|line| !line.is_empty()) /* non-empty lines */
.collect::<Vec<_>>(); .collect::<Vec<_>>()
let url_lines = url_lines.join("\n"); .join("\n");
let urls = Regex::new(r"http://[a-zA-Z0-9\.\[\]:/]+") let urls = Regex::new(r"http://[a-zA-Z0-9\.\[\]:/]+")
.unwrap() .unwrap()
.captures_iter(url_lines.as_str()) .captures_iter(url_lines.as_str())
.map(|caps| caps.get(0).unwrap().as_str()) .filter_map(|caps| caps.get(0).map(|v| v.as_str()))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
assert!(!urls.is_empty()); assert!(!urls.is_empty());
for url in urls { for url in urls {
reqwest::blocking::get(url)?.error_for_status()?; reqwest::blocking::get(url)?.error_for_status()?;
} }

32
tests/cli.rs Normal file
View File

@@ -0,0 +1,32 @@
//! Run cli with different args, not starting a server
mod fixtures;
use assert_cmd::prelude::*;
use clap::ValueEnum;
use clap_complete::Shell;
use fixtures::Error;
use std::process::Command;
#[test]
/// Show help and exit.
fn help_shows() -> Result<(), Error> {
Command::cargo_bin("dufs")?.arg("-h").assert().success();
Ok(())
}
#[test]
/// Print completions and exit.
fn print_completions() -> Result<(), Error> {
// let shell_enums = EnumValueParser::<Shell>::new();
for shell in Shell::value_variants() {
Command::cargo_bin("dufs")?
.arg("--completions")
.arg(shell.to_string())
.assert()
.success();
}
Ok(())
}

View File

@@ -5,7 +5,7 @@ use fixtures::{server, Error, TestServer};
use rstest::rstest; use rstest::rstest;
#[rstest] #[rstest]
fn cors(#[with(&["--cors"])] server: TestServer) -> Result<(), Error> { fn cors(#[with(&["--enable-cors"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;
assert_eq!( assert_eq!(
@@ -21,7 +21,7 @@ fn cors(#[with(&["--cors"])] server: TestServer) -> Result<(), Error> {
} }
#[rstest] #[rstest]
fn cors_options(#[with(&["--cors"])] server: TestServer) -> Result<(), Error> { fn cors_options(#[with(&["--enable-cors"])] server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"OPTIONS", server.url()).send()?; let resp = fetch!(b"OPTIONS", server.url()).send()?;
assert_eq!( assert_eq!(

View File

@@ -1,25 +0,0 @@
mod fixtures;
mod utils;
use fixtures::{server, Error, TestServer};
use rstest::rstest;
#[rstest]
fn default_favicon(server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}favicon.ico", server.url()))?;
assert_eq!(resp.status(), 200);
assert_eq!(resp.headers().get("content-type").unwrap(), "image/x-icon");
Ok(())
}
#[rstest]
fn exist_favicon(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let url = format!("{}favicon.ico", server.url());
let data = b"abc";
let resp = fetch!(b"PUT", &url).body(data.to_vec()).send()?;
assert_eq!(resp.status(), 201);
let resp = reqwest::blocking::get(url)?;
assert_eq!(resp.status(), 200);
assert_eq!(resp.bytes()?, data.to_vec());
Ok(())
}

View File

@@ -13,33 +13,23 @@ pub type Error = Box<dyn std::error::Error>;
/// File names for testing purpose /// File names for testing purpose
#[allow(dead_code)] #[allow(dead_code)]
pub static FILES: &[&str] = &[ pub static FILES: &[&str] = &["test.txt", "test.html", "index.html", "😀.bin"];
"test.txt",
"test.html",
"index.html",
"test.mkv",
#[cfg(not(windows))]
"test \" \' & < >.csv",
"😀.data",
"⎙.mp4",
"#[]{}()@!$&'`+,;= %20.test",
#[cfg(unix)]
":?#[]{}<>()@!$&'`|*+,;= %20.test",
#[cfg(not(windows))]
"foo\\bar.test",
];
/// Directory names for testing purpose /// Directory names for testing diretory don't exist
#[allow(dead_code)]
pub static DIR_NO_FOUND: &str = "dir-no-found/";
/// Directory names for testing diretory don't have index.html
#[allow(dead_code)] #[allow(dead_code)]
pub static DIR_NO_INDEX: &str = "dir-no-index/"; pub static DIR_NO_INDEX: &str = "dir-no-index/";
/// Directory names for testing hidden
#[allow(dead_code)]
pub static DIR_GIT: &str = ".git/";
/// Directory names for testing purpose /// Directory names for testing purpose
#[allow(dead_code)] #[allow(dead_code)]
pub static DIRECTORIES: &[&str] = &["dira/", "dirb/", "dirc/", DIR_NO_INDEX]; pub static DIRECTORIES: &[&str] = &["dira/", "dirb/", "dirc/", DIR_NO_INDEX, DIR_GIT];
/// Name of a deeply nested file
#[allow(dead_code)]
pub static DEEPLY_NESTED_FILE: &str = "very/deeply/nested/test.rs";
/// Test fixture which creates a temporary directory with a few files and directories inside. /// Test fixture which creates a temporary directory with a few files and directories inside.
/// The directories also contain files. /// The directories also contain files.
@@ -55,7 +45,7 @@ pub fn tmpdir() -> TempDir {
} }
for directory in DIRECTORIES { for directory in DIRECTORIES {
for file in FILES { for file in FILES {
if *directory == DIR_NO_INDEX { if *directory == DIR_NO_INDEX && *file == "index.html" {
continue; continue;
} }
tmpdir tmpdir
@@ -65,10 +55,6 @@ pub fn tmpdir() -> TempDir {
} }
} }
tmpdir
.child(&DEEPLY_NESTED_FILE)
.write_str("File in a deeply nested directory.")
.expect("Couldn't write to file");
tmpdir tmpdir
} }
@@ -79,7 +65,7 @@ pub fn port() -> u16 {
free_local_port().expect("Couldn't find a free local port") free_local_port().expect("Couldn't find a free local port")
} }
/// Run duf as a server; Start with a temporary directory, a free port and some /// Run dufs as a server; Start with a temporary directory, a free port and some
/// optional arguments then wait for a while for the server setup to complete. /// optional arguments then wait for a while for the server setup to complete.
#[fixture] #[fixture]
#[allow(dead_code)] #[allow(dead_code)]
@@ -90,9 +76,8 @@ where
{ {
let port = port(); let port = port();
let tmpdir = tmpdir(); let tmpdir = tmpdir();
let child = Command::cargo_bin("duf") let child = Command::cargo_bin("dufs")
.expect("Couldn't find test binary") .expect("Couldn't find test binary")
.env("RUST_LOG", "false")
.arg(tmpdir.path()) .arg(tmpdir.path())
.arg("-p") .arg("-p")
.arg(port.to_string()) .arg(port.to_string())
@@ -118,9 +103,8 @@ where
{ {
let port = port(); let port = port();
let tmpdir = tmpdir(); let tmpdir = tmpdir();
let child = Command::cargo_bin("duf") let child = Command::cargo_bin("dufs")
.expect("Couldn't find test binary") .expect("Couldn't find test binary")
.env("RUST_LOG", "false")
.arg(tmpdir.path()) .arg(tmpdir.path())
.arg("-p") .arg("-p")
.arg(port.to_string()) .arg(port.to_string())
@@ -138,7 +122,7 @@ where
} }
/// Wait a max of 1s for the port to become available. /// Wait a max of 1s for the port to become available.
fn wait_for_port(port: u16) { pub fn wait_for_port(port: u16) {
let start_wait = Instant::now(); let start_wait = Instant::now();
while !port_check::is_port_reachable(format!("localhost:{}", port)) { while !port_check::is_port_reachable(format!("localhost:{}", port)) {

42
tests/hidden.rs Normal file
View File

@@ -0,0 +1,42 @@
mod fixtures;
mod utils;
use fixtures::{server, Error, TestServer};
use rstest::rstest;
#[rstest]
#[case(server(&[] as &[&str]), true)]
#[case(server(&["--hidden", ".git,index.html"]), false)]
fn hidden_get_dir(#[case] server: TestServer, #[case] exist: bool) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?;
assert_eq!(resp.status(), 200);
let paths = utils::retrive_index_paths(&resp.text()?);
assert_eq!(paths.contains(".git/"), exist);
assert_eq!(paths.contains("index.html"), exist);
Ok(())
}
#[rstest]
#[case(server(&[] as &[&str]), true)]
#[case(server(&["--hidden", ".git,index.html"]), false)]
fn hidden_propfind_dir(#[case] server: TestServer, #[case] exist: bool) -> Result<(), Error> {
let resp = fetch!(b"PROPFIND", server.url()).send()?;
assert_eq!(resp.status(), 207);
let body = resp.text()?;
assert_eq!(body.contains("<D:href>/.git/</D:href>"), exist);
assert_eq!(body.contains("<D:href>/index.html</D:href>"), exist);
Ok(())
}
#[rstest]
#[case(server(&["--allow-search"] as &[&str]), true)]
#[case(server(&["--allow-search", "--hidden", ".git,test.html"]), false)]
fn hidden_search_dir(#[case] server: TestServer, #[case] exist: bool) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "test.html"))?;
assert_eq!(resp.status(), 200);
let paths = utils::retrive_index_paths(&resp.text()?);
for p in paths {
assert_eq!(p.contains("test.html"), exist);
}
Ok(())
}

View File

@@ -7,7 +7,7 @@ use rstest::rstest;
#[rstest] #[rstest]
fn get_dir(server: TestServer) -> Result<(), Error> { fn get_dir(server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;
assert_index_resp!(resp); assert_resp_paths!(resp);
Ok(()) Ok(())
} }
@@ -63,19 +63,31 @@ fn head_dir_zip(server: TestServer) -> Result<(), Error> {
} }
#[rstest] #[rstest]
fn get_dir_search(server: TestServer) -> Result<(), Error> { fn get_dir_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "test.html"))?; let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "test.html"))?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let paths = utils::retrive_index_paths(&resp.text()?); let paths = utils::retrive_index_paths(&resp.text()?);
assert!(!paths.is_empty()); assert!(!paths.is_empty());
for p in paths { for p in paths {
assert!(p.contains(&"test.html")); assert!(p.contains("test.html"));
} }
Ok(()) Ok(())
} }
#[rstest] #[rstest]
fn head_dir_search(server: TestServer) -> Result<(), Error> { fn get_dir_search2(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "😀.bin"))?;
assert_eq!(resp.status(), 200);
let paths = utils::retrive_index_paths(&resp.text()?);
assert!(!paths.is_empty());
for p in paths {
assert!(p.contains("😀.bin"));
}
Ok(())
}
#[rstest]
fn head_dir_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"HEAD", format!("{}?q={}", server.url(), "test.html")).send()?; let resp = fetch!(b"HEAD", format!("{}?q={}", server.url(), "test.html")).send()?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
assert_eq!( assert_eq!(
@@ -105,6 +117,7 @@ fn head_file(server: TestServer) -> Result<(), Error> {
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
assert_eq!(resp.headers().get("content-type").unwrap(), "text/html"); assert_eq!(resp.headers().get("content-type").unwrap(), "text/html");
assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes"); assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes");
assert!(resp.headers().contains_key("content-disposition"));
assert!(resp.headers().contains_key("etag")); assert!(resp.headers().contains_key("etag"));
assert!(resp.headers().contains_key("last-modified")); assert!(resp.headers().contains_key("last-modified"));
assert!(resp.headers().contains_key("content-length")); assert!(resp.headers().contains_key("content-length"));

45
tests/range.rs Normal file
View File

@@ -0,0 +1,45 @@
mod fixtures;
mod utils;
use fixtures::{server, Error, TestServer};
use headers::HeaderValue;
use rstest::rstest;
#[rstest]
fn get_file_range(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"GET", format!("{}index.html", server.url()))
.header("range", HeaderValue::from_static("bytes=0-6"))
.send()?;
assert_eq!(resp.status(), 206);
assert_eq!(resp.headers().get("content-range").unwrap(), "bytes 0-6/18");
assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes");
assert_eq!(resp.headers().get("content-length").unwrap(), "7");
assert_eq!(resp.text()?, "This is");
Ok(())
}
#[rstest]
fn get_file_range_beyond(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"GET", format!("{}index.html", server.url()))
.header("range", HeaderValue::from_static("bytes=12-20"))
.send()?;
assert_eq!(resp.status(), 206);
assert_eq!(
resp.headers().get("content-range").unwrap(),
"bytes 12-17/18"
);
assert_eq!(resp.headers().get("accept-ranges").unwrap(), "bytes");
assert_eq!(resp.headers().get("content-length").unwrap(), "6");
assert_eq!(resp.text()?, "x.html");
Ok(())
}
#[rstest]
fn get_file_range_invalid(server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"GET", format!("{}index.html", server.url()))
.header("range", HeaderValue::from_static("bytes=20-"))
.send()?;
assert_eq!(resp.status(), 416);
assert_eq!(resp.headers().get("content-range").unwrap(), "bytes */18");
Ok(())
}

View File

@@ -1,6 +1,7 @@
mod fixtures; mod fixtures;
mod utils;
use fixtures::{server, Error, TestServer, DIR_NO_INDEX}; use fixtures::{server, Error, TestServer, DIR_NO_FOUND, DIR_NO_INDEX, FILES};
use rstest::rstest; use rstest::rstest;
#[rstest] #[rstest]
@@ -12,12 +13,55 @@ fn render_index(#[with(&["--render-index"])] server: TestServer) -> Result<(), E
} }
#[rstest] #[rstest]
fn render_index_404(#[with(&["--render-index"])] server: TestServer) -> Result<(), Error> { fn render_index2(#[with(&["--render-index"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}/{}", server.url(), DIR_NO_INDEX))?; let resp = reqwest::blocking::get(format!("{}{}", server.url(), DIR_NO_INDEX))?;
assert_eq!(resp.status(), 404); assert_eq!(resp.status(), 404);
Ok(()) Ok(())
} }
#[rstest]
fn render_try_index(#[with(&["--render-try-index"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?;
let text = resp.text()?;
assert_eq!(text, "This is index.html");
Ok(())
}
#[rstest]
fn render_try_index2(#[with(&["--render-try-index"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}{}", server.url(), DIR_NO_INDEX))?;
let files: Vec<&str> = FILES
.iter()
.filter(|v| **v != "index.html")
.cloned()
.collect();
assert_resp_paths!(resp, files);
Ok(())
}
#[rstest]
fn render_try_index3(#[with(&["--render-try-index"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}{}?zip", server.url(), DIR_NO_INDEX))?;
assert_eq!(resp.status(), 200);
assert_eq!(
resp.headers().get("content-type").unwrap(),
"application/zip"
);
Ok(())
}
#[rstest]
#[case(server(&["--render-try-index"] as &[&str]), false)]
#[case(server(&["--render-try-index", "--allow-search"] as &[&str]), true)]
fn render_try_index4(#[case] server: TestServer, #[case] searched: bool) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}{}?q={}", server.url(), DIR_NO_INDEX, "😀.bin"))?;
assert_eq!(resp.status(), 200);
let paths = utils::retrive_index_paths(&resp.text()?);
assert!(!paths.is_empty());
assert_eq!(paths.iter().all(|v| v.contains("😀.bin")), searched);
Ok(())
}
#[rstest] #[rstest]
fn render_spa(#[with(&["--render-spa"])] server: TestServer) -> Result<(), Error> { fn render_spa(#[with(&["--render-spa"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;
@@ -27,8 +71,8 @@ fn render_spa(#[with(&["--render-spa"])] server: TestServer) -> Result<(), Error
} }
#[rstest] #[rstest]
fn render_spa_no_404(#[with(&["--render-spa"])] server: TestServer) -> Result<(), Error> { fn render_spa2(#[with(&["--render-spa"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}/{}", server.url(), DIR_NO_INDEX))?; let resp = reqwest::blocking::get(format!("{}{}", server.url(), DIR_NO_FOUND))?;
let text = resp.text()?; let text = resp.text()?;
assert_eq!(text, "This is index.html"); assert_eq!(text, "This is index.html");
Ok(()) Ok(())

View File

@@ -22,14 +22,14 @@ fn tls_works(#[case] server: TestServer) -> Result<(), Error> {
.danger_accept_invalid_certs(true) .danger_accept_invalid_certs(true)
.build()?; .build()?;
let resp = client.get(server.url()).send()?.error_for_status()?; let resp = client.get(server.url()).send()?.error_for_status()?;
assert_index_resp!(resp); assert_resp_paths!(resp);
Ok(()) Ok(())
} }
/// Wrong path for cert throws error. /// Wrong path for cert throws error.
#[rstest] #[rstest]
fn wrong_path_cert() -> Result<(), Error> { fn wrong_path_cert() -> Result<(), Error> {
Command::cargo_bin("duf")? Command::cargo_bin("dufs")?
.args(&["--tls-cert", "wrong", "--tls-key", "tests/data/key.pem"]) .args(&["--tls-cert", "wrong", "--tls-key", "tests/data/key.pem"])
.assert() .assert()
.failure() .failure()
@@ -41,7 +41,7 @@ fn wrong_path_cert() -> Result<(), Error> {
/// Wrong paths for key throws errors. /// Wrong paths for key throws errors.
#[rstest] #[rstest]
fn wrong_path_key() -> Result<(), Error> { fn wrong_path_key() -> Result<(), Error> {
Command::cargo_bin("duf")? Command::cargo_bin("dufs")?
.args(&["--tls-cert", "tests/data/cert.pem", "--tls-key", "wrong"]) .args(&["--tls-cert", "tests/data/cert.pem", "--tls-key", "wrong"])
.assert() .assert()
.failure() .failure()

View File

@@ -2,9 +2,9 @@ use serde_json::Value;
use std::collections::HashSet; use std::collections::HashSet;
#[macro_export] #[macro_export]
macro_rules! assert_index_resp { macro_rules! assert_resp_paths {
($resp:ident) => { ($resp:ident) => {
assert_index_resp!($resp, self::fixtures::FILES) assert_resp_paths!($resp, self::fixtures::FILES)
}; };
($resp:ident, $files:expr) => { ($resp:ident, $files:expr) => {
assert_eq!($resp.status(), 200); assert_eq!($resp.status(), 200);
@@ -37,12 +37,8 @@ pub fn encode_uri(v: &str) -> String {
fn retrive_index_paths_impl(index: &str) -> Option<HashSet<String>> { fn retrive_index_paths_impl(index: &str) -> Option<HashSet<String>> {
let lines: Vec<&str> = index.lines().collect(); let lines: Vec<&str> = index.lines().collect();
let (i, _) = lines let line = lines.iter().find(|v| v.contains("DATA ="))?;
.iter() let value: Value = line[7..].parse().ok()?;
.enumerate()
.find(|(_, v)| v.contains("const DATA"))?;
let line = lines.get(i + 1)?;
let value: Value = line.parse().ok()?;
let paths = value let paths = value
.get("paths")? .get("paths")?
.as_array()? .as_array()?