Compare commits

..

45 Commits

Author SHA1 Message Date
sigoden
3b3ea718d9 chore: improve readme 2022-09-09 21:43:40 +08:00
sigoden
3debf88da1 chore: improve readme 2022-09-09 21:37:07 +08:00
sigoden
7eaa6f2484 chore: undo hidden arg changes 2022-09-09 21:30:27 +08:00
sigoden
68def1c1d9 chore: update screenshot.png in readme 2022-09-09 21:22:03 +08:00
sigoden
868f4158f5 chore(release): version 0.30.0 2022-09-09 21:04:05 +08:00
sigoden
3063dca0a6 chore: update readme 2022-09-05 10:34:18 +08:00
sigoden
a74e40aee5 feat: add --assets options to override assets (#134)
* feat: add --assets options to override assets

* update readme
2022-09-05 10:30:45 +08:00
sigoden
bde06fef94 chore: refactor clap multiple_occurrences and multiple_values (#130) 2022-08-27 10:30:08 +08:00
sigoden
31c832a742 feat: support sort by name, mtime, size (#128) 2022-08-23 14:24:42 +08:00
Daniel Flannery
9f8171a22f chore: Corrected type in README (#127) 2022-08-17 07:41:02 +08:00
sigoden
0fb9f3b2c8 chore: update readme 2022-08-06 08:30:19 +08:00
sigoden
3ae75d3558 fix: hide path by ext name (#126) 2022-08-06 07:48:34 +08:00
sigoden
dff489398e chore(release): version v0.29.0 2022-08-03 09:05:39 +08:00
sigoden
64e397d18a chore: update --hidden help message 2022-08-03 08:58:52 +08:00
sigoden
cc0014c183 chore: fix typo 2022-08-03 08:51:12 +08:00
sigoden
a489c5647a fix: table row hover highlighting in dark mode (#122) 2022-08-03 07:02:58 +08:00
sigoden
0918fb3fe4 feat: support ecdsa tls cert (#119) 2022-08-02 09:32:11 +08:00
sigoden
14efeb6360 chore: update readme 2022-08-02 07:07:53 +08:00
sigoden
30b8f75bba chore: update deps and remove dependabot 2022-08-02 07:07:33 +08:00
sigoden
a39065beff chore: update readme 2022-08-01 15:12:25 +08:00
sigoden
a493c13734 chore(release): version v0.28.0 2022-08-01 08:47:18 +08:00
sigoden
ae2f878e62 feat: support customize http log format (#116) 2022-07-31 08:27:09 +08:00
sigoden
277d9d22d4 feat(ui): add table row hover (#115) 2022-07-30 08:04:31 +08:00
sigoden
c62926d19c fix(ui): file path contains special charactors (#114) 2022-07-30 07:53:27 +08:00
sigoden
cccbbe9ea4 chore: update deps 2022-07-29 08:54:46 +08:00
sigoden
147048690f chore(release): version v0.27.0 2022-07-25 09:59:32 +08:00
sigoden
9cfd66dab9 feat: adjust digest auth timeout to 1day (#110) 2022-07-21 11:47:47 +08:00
sigoden
b791549ec7 feat: improve hidden to support glob (#108) 2022-07-19 20:37:14 +08:00
sigoden
f148817c52 chore(release): version v0.26.0 2022-07-11 08:54:29 +08:00
sigoden
00ae36d486 chore: improve readme 2022-07-08 22:36:16 +08:00
sigoden
4e823e8bba feat: make --path-prefix works on serving single file (#102) 2022-07-08 19:30:05 +08:00
sigoden
4e84e6c532 fix: cors headers (#100) 2022-07-08 16:18:10 +08:00
sigoden
f49b590a56 chore: update description of --path-prefix 2022-07-07 15:44:25 +08:00
sigoden
cb1f3cddea chore(release): version v0.25.0 2022-07-07 07:51:51 +08:00
sigoden
05dbcfb2df feat: limit the number of concurrent uploads (#98) 2022-07-06 19:17:30 +08:00
sigoden
76e967fa59 feat: add completions (#97) 2022-07-06 12:11:00 +08:00
sigoden
140a360e37 chore: optimize move path default value 2022-07-05 09:16:21 +08:00
sigoden
604cbb7412 feat: check permission on move/copy destination (#93) 2022-07-04 23:25:05 +08:00
sigoden
c6541b1c36 feat: ui supports move folder/file to new path (#92) 2022-07-04 21:20:00 +08:00
sigoden
b6729a3d64 feat: ui supports creating folder (#91) 2022-07-04 20:12:35 +08:00
sigoden
4f1a35de5d chore(release): version v0.24.0 2022-07-03 06:47:49 +08:00
sigoden
2ffdcdf106 feat: allow search with --render-try-index (#88) 2022-07-02 23:25:57 +08:00
sigoden
1e0cdafbcf fix: unexpect stack overflow when searching a lot (#87) 2022-07-02 22:55:22 +08:00
sigoden
0a03941e05 chore: update deps 2022-07-02 11:48:47 +08:00
sigoden
07a7322748 chore: update readme 2022-07-01 21:37:56 +08:00
34 changed files with 1593 additions and 1144 deletions

View File

@@ -1,6 +0,0 @@
version: 2
updates:
- package-ecosystem: "cargo" # See documentation for possible values
directory: "/" # Location of package manifests
schedule:
interval: "monthly"

View File

@@ -2,6 +2,75 @@
All notable changes to this project will be documented in this file. All notable changes to this project will be documented in this file.
## [0.30.0] - 2022-09-05
### Bug Fixes
- Hide path by ext name ([#126](https://github.com/sigoden/dufs/issues/126))
### Features
- Support sort by name, mtime, size ([#128](https://github.com/sigoden/dufs/issues/128))
- Add --assets options to override assets ([#134](https://github.com/sigoden/dufs/issues/134))
## [0.29.0] - 2022-08-03
### Bug Fixes
- Table row hover highlighting in dark mode ([#122](https://github.com/sigoden/dufs/issues/122))
### Features
- Support ecdsa tls cert ([#119](https://github.com/sigoden/dufs/issues/119))
## [0.28.0] - 2022-08-01
### Bug Fixes
- File path contains special charactors ([#114](https://github.com/sigoden/dufs/issues/114))
### Features
- Add table row hover ([#115](https://github.com/sigoden/dufs/issues/115))
- Support customize http log format ([#116](https://github.com/sigoden/dufs/issues/116))
## [0.27.0] - 2022-07-25
### Features
- Improve hidden to support glob ([#108](https://github.com/sigoden/dufs/issues/108))
- Adjust digest auth timeout to 1day ([#110](https://github.com/sigoden/dufs/issues/110))
## [0.26.0] - 2022-07-11
### Bug Fixes
- Cors headers ([#100](https://github.com/sigoden/dufs/issues/100))
### Features
- Make --path-prefix works on serving single file ([#102](https://github.com/sigoden/dufs/issues/102))
## [0.25.0] - 2022-07-06
### Features
- Ui supports creating folder ([#91](https://github.com/sigoden/dufs/issues/91))
- Ui supports move folder/file to new path ([#92](https://github.com/sigoden/dufs/issues/92))
- Check permission on move/copy destination ([#93](https://github.com/sigoden/dufs/issues/93))
- Add completions ([#97](https://github.com/sigoden/dufs/issues/97))
- Limit the number of concurrent uploads ([#98](https://github.com/sigoden/dufs/issues/98))
## [0.24.0] - 2022-07-02
### Bug Fixes
- Unexpect stack overflow when searching a lot ([#87](https://github.com/sigoden/dufs/issues/87))
### Features
- Allow search with --render-try-index ([#88](https://github.com/sigoden/dufs/issues/88))
## [0.23.1] - 2022-06-30 ## [0.23.1] - 2022-06-30
### Bug Fixes ### Bug Fixes

951
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "dufs" name = "dufs"
version = "0.23.1" version = "0.30.0"
edition = "2021" edition = "2021"
authors = ["sigoden <sigoden@gmail.com>"] authors = ["sigoden <sigoden@gmail.com>"]
description = "Dufs is a distinctive utility file server" description = "Dufs is a distinctive utility file server"
@@ -12,6 +12,7 @@ keywords = ["static", "file", "server", "webdav", "cli"]
[dependencies] [dependencies]
clap = { version = "3", default-features = false, features = ["std", "wrap_help"] } clap = { version = "3", default-features = false, features = ["std", "wrap_help"] }
clap_complete = "3"
chrono = "0.4" chrono = "0.4"
tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util", "signal"]} tokio = { version = "1", features = ["rt-multi-thread", "macros", "fs", "io-util", "signal"]}
tokio-util = { version = "0.7", features = ["io-util"] } tokio-util = { version = "0.7", features = ["io-util"] }
@@ -21,8 +22,7 @@ serde = { version = "1", features = ["derive"] }
serde_json = "1" serde_json = "1"
futures = "0.3" futures = "0.3"
base64 = "0.13" base64 = "0.13"
async_zip = { git = "https://github.com/sigoden/rs-async-zip", branch = "patch01" } async_zip = { version = "0.0.8", default-features = false, features = ["deflate"] }
async-walkdir = "0.2"
headers = "0.3" headers = "0.3"
mime_guess = "2.0" mime_guess = "2.0"
if-addrs = "0.7" if-addrs = "0.7"
@@ -37,6 +37,9 @@ xml-rs = "0.8"
log = "0.4" log = "0.4"
socket2 = "0.4" socket2 = "0.4"
async-stream = "0.3" async-stream = "0.3"
walkdir = "2.3"
form_urlencoded = "1.0"
alphanumeric-sort = "1.4"
[features] [features]
default = ["tls"] default = ["tls"]
@@ -46,14 +49,13 @@ tls = ["rustls", "rustls-pemfile", "tokio-rustls"]
assert_cmd = "2" assert_cmd = "2"
reqwest = { version = "0.11", features = ["blocking", "multipart", "rustls-tls"], default-features = false } reqwest = { version = "0.11", features = ["blocking", "multipart", "rustls-tls"], default-features = false }
assert_fs = "1" assert_fs = "1"
select = "0.5"
port_check = "0.1" port_check = "0.1"
rstest = "0.15" rstest = "0.15"
regex = "1" regex = "1"
pretty_assertions = "1.2"
url = "2" url = "2"
diqwest = { version = "1", features = ["blocking"] } diqwest = { version = "1", features = ["blocking"] }
predicates = "2" predicates = "2"
indexmap = "1.9"
[profile.release] [profile.release]
lto = true lto = true

176
README.md
View File

@@ -5,7 +5,7 @@
Dufs is a distinctive utility file server that supports static serving, uploading, searching, accessing control, webdav... Dufs is a distinctive utility file server that supports static serving, uploading, searching, accessing control, webdav...
![demo](https://user-images.githubusercontent.com/4012553/174486522-7af350e6-0195-4f4a-8480-d9464fc6452f.png) ![demo](https://user-images.githubusercontent.com/4012553/189362357-b2f7aa6b-9df0-4438-a57c-c8f92850fc4f.png)
## Features ## Features
@@ -30,7 +30,7 @@ cargo install dufs
### With docker ### With docker
``` ```
docker run -v `pwd`:/data -p 5000:5000 --rm -it sigoden/dufs /data docker run -v `pwd`:/data -p 5000:5000 --rm -it sigoden/dufs /data -A
``` ```
### Binaries on macOS, Linux, Windows ### Binaries on macOS, Linux, Windows
@@ -43,16 +43,16 @@ Download from [Github Releases](https://github.com/sigoden/dufs/releases), unzip
Dufs is a distinctive utility file server - https://github.com/sigoden/dufs Dufs is a distinctive utility file server - https://github.com/sigoden/dufs
USAGE: USAGE:
dufs [OPTIONS] [--] [path] dufs [OPTIONS] [--] [root]
ARGS: ARGS:
<path> Specific path to serve [default: .] <root> Specific path to serve [default: .]
OPTIONS: OPTIONS:
-b, --bind <addr>... Specify bind address -b, --bind <addr>... Specify bind address
-p, --port <port> Specify port to listen on [default: 5000] -p, --port <port> Specify port to listen on [default: 5000]
--path-prefix <path> Specify an path prefix --path-prefix <path> Specify a path prefix
--hidden <value> Hide directories from directory listings, separated by `,` --hidden <value> Hide paths from directory listings, separated by `,`
-a, --auth <rule>... Add auth for path -a, --auth <rule>... Add auth for path
--auth-method <value> Select auth method [default: digest] [possible values: basic, digest] --auth-method <value> Select auth method [default: digest] [possible values: basic, digest]
-A, --allow-all Allow all operations -A, --allow-all Allow all operations
@@ -64,8 +64,11 @@ OPTIONS:
--render-index Serve index.html when requesting a directory, returns 404 if not found index.html --render-index Serve index.html when requesting a directory, returns 404 if not found index.html
--render-try-index Serve index.html when requesting a directory, returns directory listing if not found index.html --render-try-index Serve index.html when requesting a directory, returns directory listing if not found index.html
--render-spa Serve SPA(Single Page Application) --render-spa Serve SPA(Single Page Application)
--assets <path> Use custom assets to override builtin assets
--tls-cert <path> Path to an SSL/TLS certificate to serve with HTTPS --tls-cert <path> Path to an SSL/TLS certificate to serve with HTTPS
--tls-key <path> Path to the SSL/TLS certificate's private key --tls-key <path> Path to the SSL/TLS certificate's private key
--log-format <format> Customize http log format
--completions <shell> Print shell completion script for <shell> [possible values: bash, elvish, fish, powershell, zsh]
-h, --help Print help information -h, --help Print help information
-V, --version Print version information -V, --version Print version information
``` ```
@@ -78,7 +81,7 @@ Serve current working directory
dufs dufs
``` ```
Explicitly allow all operations including upload/delete Allow all operations like upload/delete/search...
``` ```
dufs -A dufs -A
@@ -90,7 +93,7 @@ Only allow upload operation
dufs --allow-upload dufs --allow-upload
``` ```
Serve a directory Serve a specific directory
``` ```
dufs Downloads dufs Downloads
@@ -102,18 +105,18 @@ Serve a single file
dufs linux-distro.iso dufs linux-distro.iso
``` ```
Serve index.html when requesting a directory Serve a single-page application like react/vue
```
dufs --render-index
```
Serve SPA(Single Page Application)
``` ```
dufs --render-spa dufs --render-spa
``` ```
Serve a static website with index.html
```
dufs --render-index
```
Require username/password Require username/password
``` ```
@@ -126,12 +129,6 @@ Listen on a specific port
dufs -p 80 dufs -p 80
``` ```
Hide directories from directory listings
```
dufs --hidden .git,.DS_Store
```
Use https Use https
``` ```
@@ -140,6 +137,12 @@ dufs --tls-cert my.crt --tls-key my.key
## API ## API
Upload a file
```
curl -T path-to-file http://127.0.0.1:5000/new-path/path-to-file
```
Download a file Download a file
``` ```
curl http://127.0.0.1:5000/path-to-file curl http://127.0.0.1:5000/path-to-file
@@ -151,40 +154,131 @@ Download a folder as zip file
curl -o path-to-folder.zip http://127.0.0.1:5000/path-to-folder?zip curl -o path-to-folder.zip http://127.0.0.1:5000/path-to-folder?zip
``` ```
Upload a file
```
curl --upload-file path-to-file http://127.0.0.1:5000/path-to-file
```
Delete a file/folder Delete a file/folder
``` ```
curl -X DELETE http://127.0.0.1:5000/path-to-file curl -X DELETE http://127.0.0.1:5000/path-to-file-or-folder
``` ```
## Access Control <details>
<summary><h2>Advanced topics</h2></summary>
### Access Control
Dufs supports path level access control. You can control who can do what on which path with `--auth`/`-a`. Dufs supports path level access control. You can control who can do what on which path with `--auth`/`-a`.
``` ```
dufs -a <path>@<readwrite>[@<readonly>|@*] dufs -a <path>@<readwrite>
dufs -a <path>@<readwrite>@<readonly>
dufs -a <path>@<readwrite>@*
``` ```
- `<path>`: Protected url path - `<path>`: Protected url path
- `<readwrite>`: Account with upload/delete/view/download permission, required - `<readwrite>`: Account with readwrite permissions. If dufs is run with `dufs --allow-all`, the permissions are upload/delete/search/view/download. If dufs is run with `dufs --allow-upload`, the permissions are upload/view/download.
- `<readonly>`: Account with view/download permission, optional - `<readonly>`: Account with readonly permissions. The permissions are search/view/download if dufs allow search, otherwise view/download..
> `*` means `<path>` is public, everyone can view/download it.
For example:
``` ```
dufs -a /@admin:pass1@* -a /ui@designer:pass2 -A dufs -A -a /@admin:admin
``` ```
- All files/folders are public to view/download. `admin` has all permissions for all paths.
- Account `admin:pass1` can upload/delete/view/download any files/folders.
- Account `designer:pass2` can upload/delete/view/download any files/folders in the `ui` folder. ```
dufs -A -a /@admin:admin@guest:guest
```
`guest` has readonly permissions for all paths.
```
dufs -A -a /@admin:admin@*
```
All paths is public, everyone can view/download it.
```
dufs -A -a /@admin:admin -a /user1@user1:pass1 -a /user2@pass2:user2
```
`user1` has all permissions for `/user1*` path.
`user2` has all permissions for `/user2*` path.
```
dufs -a /@admin:admin
```
Since dufs only allows viewing/downloading, `admin` can only view/download files.
### Hide Paths
Dufs supports hiding paths from directory listings via option `--hidden`.
```
dufs --hidden .git,.DS_Store,tmp
```
`--hidden` also supports a variant glob:
- `?` matches any single character
- `*` matches any (possibly empty) sequence of characters
- `**`, `[..]`, `[!..]` is not supported
```sh
dufs --hidden '.*'
dufs --hidden '*.log,*.lock'
```
### Log Format
Dufs supports customize http log format with option `--log-format`.
The log format can use following variables.
| variable | description |
| ------------ | ------------------------------------------------------------------------- |
| $remote_addr | client address |
| $remote_user | user name supplied with authentication |
| $request | full original request line |
| $status | response status |
| $http_ | arbitrary request header field. examples: $http_user_agent, $http_referer |
The default log format is `'$remote_addr "$request" $status'`.
```
2022-08-06T06:59:31+08:00 INFO - 127.0.0.1 "GET /" 200
```
Disable http log
```
dufs --log-format=''
```
Log user-agent
```
dufs --log-format '$remote_addr "$request" $status $http_user_agent'
```
```
2022-08-06T06:53:55+08:00 INFO - 127.0.0.1 "GET /" 200 Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36
```
Log remote-user
```
dufs --log-format '$remote_addr $remote_user "$request" $status' -a /@admin:admin -a /folder1@user1:pass1
```
```
2022-08-06T07:04:37+08:00 INFO - 127.0.0.1 admin "GET /" 200
```
### Customize UI
Dufs allows users to customize the UI with your own assets.
```
dufs --assets my-assets-dir/
```
Your assets folder must contains a `index.html` file.
`index.html` can use the following placeholder variables to retrieve internal data.
- `__INDEX_DATA__`: directory listing data
- `__ASSERTS_PREFIX__`: assets url prefix
</details>
## License ## License
@@ -192,4 +286,4 @@ Copyright (c) 2022 dufs-developers.
dufs is made available under the terms of either the MIT License or the Apache License 2.0, at your option. dufs is made available under the terms of either the MIT License or the Apache License 2.0, at your option.
See the LICENSE-APACHE and LICENSE-MIT files for license details. See the LICENSE-APACHE and LICENSE-MIT files for license details.

View File

@@ -59,6 +59,15 @@ body {
height: 1.1rem; height: 1.1rem;
} }
.toolbox .control {
cursor: pointer;
padding-left: 0.25em;
}
.upload-file input {
display: none;
}
.searchbar { .searchbar {
display: flex; display: flex;
flex-wrap: nowrap; flex-wrap: nowrap;
@@ -90,15 +99,6 @@ body {
cursor: pointer; cursor: pointer;
} }
.upload-control {
cursor: pointer;
padding-left: 0.25em;
}
.upload-control input {
display: none;
}
.upload-status span { .upload-status span {
width: 70px; width: 70px;
display: inline-block; display: inline-block;
@@ -131,8 +131,21 @@ body {
padding-left: 0.6em; padding-left: 0.6em;
} }
.paths-table thead a {
color: unset;
text-decoration: none;
}
.paths-table thead a > span {
padding-left: 2px;
}
.paths-table tbody tr:hover {
background-color: #fafafa;
}
.paths-table .cell-actions { .paths-table .cell-actions {
width: 60px; width: 75px;
display: flex; display: flex;
padding-left: 0.6em; padding-left: 0.6em;
} }
@@ -175,7 +188,7 @@ body {
} }
.action-btn { .action-btn {
padding-left: 0.4em; padding-right: 0.3em;
} }
.uploaders-table { .uploaders-table {
@@ -227,4 +240,8 @@ body {
.path a { .path a {
color: #3191ff; color: #3191ff;
} }
.paths-table tbody tr:hover {
background-color: #1a1a1a;
}
} }

View File

@@ -4,7 +4,12 @@
<head> <head>
<meta charset="utf-8" /> <meta charset="utf-8" />
<meta name="viewport" content="width=device-width" /> <meta name="viewport" content="width=device-width" />
__SLOT__ <link rel="icon" type="image/x-icon" href="__ASSERTS_PREFIX__favicon.ico">
<link rel="stylesheet" href="__ASSERTS_PREFIX__index.css">
<script>
DATA = __INDEX_DATA__
</script>
<script src="__ASSERTS_PREFIX__index.js"></script>
</head> </head>
<body> <body>
<div class="head"> <div class="head">
@@ -15,12 +20,18 @@
<svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg> <svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg>
</a> </a>
</div> </div>
<div class="upload-control hidden" title="Upload files"> <div class="control upload-file hidden" title="Upload files">
<label for="file"> <label for="file">
<svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 1.146a.5.5 0 0 1 .708 0l3 3a.5.5 0 0 1-.708.708L8.5 2.707V11.5a.5.5 0 0 1-1 0V2.707L5.354 4.854a.5.5 0 1 1-.708-.708l3-3z"/></svg> <svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 1.146a.5.5 0 0 1 .708 0l3 3a.5.5 0 0 1-.708.708L8.5 2.707V11.5a.5.5 0 0 1-1 0V2.707L5.354 4.854a.5.5 0 1 1-.708-.708l3-3z"/></svg>
</label> </label>
<input type="file" id="file" name="file" multiple> <input type="file" id="file" name="file" multiple>
</div> </div>
<div class="control new-folder hidden" title="New folder">
<svg width="16" height="16" viewBox="0 0 16 16">
<path d="m.5 3 .04.87a1.99 1.99 0 0 0-.342 1.311l.637 7A2 2 0 0 0 2.826 14H9v-1H2.826a1 1 0 0 1-.995-.91l-.637-7A1 1 0 0 1 2.19 4h11.62a1 1 0 0 1 .996 1.09L14.54 8h1.005l.256-2.819A2 2 0 0 0 13.81 3H9.828a2 2 0 0 1-1.414-.586l-.828-.828A2 2 0 0 0 6.172 1H2.5a2 2 0 0 0-2 2zm5.672-1a1 1 0 0 1 .707.293L7.586 3H2.19c-.24 0-.47.042-.683.12L1.5 2.98a1 1 0 0 1 1-.98h3.672z"/>
<path d="M13.5 10a.5.5 0 0 1 .5.5V12h1.5a.5.5 0 1 1 0 1H14v1.5a.5.5 0 1 1-1 0V13h-1.5a.5.5 0 0 1 0-1H13v-1.5a.5.5 0 0 1 .5-.5z"/>
</svg>
</div>
</div> </div>
<form class="searchbar hidden"> <form class="searchbar hidden">
<div class="icon"> <div class="icon">
@@ -42,12 +53,6 @@
</table> </table>
<table class="paths-table hidden"> <table class="paths-table hidden">
<thead> <thead>
<tr>
<th class="cell-name" colspan="2">Name</th>
<th class="cell-mtime">Last modified</th>
<th class="cell-size">Size</th>
<th class="cell-actions">Actions</th>
</tr>
</thead> </thead>
<tbody> <tbody>
</tbody> </tbody>

View File

@@ -6,17 +6,41 @@
* @property {number} size * @property {number} size
*/ */
// https://stackoverflow.com/a/901144/3642588 /**
const params = new Proxy(new URLSearchParams(window.location.search), { * @typedef {object} DATA
get: (searchParams, prop) => searchParams.get(prop), * @property {string} href
}); * @property {string} uri_prefix
* @property {PathItem[]} paths
* @property {boolean} allow_upload
* @property {boolean} allow_delete
* @property {boolean} allow_search
* @property {boolean} dir_exists
*/
const dirEmptyNote = params.q ? 'No results' : DATA.dir_exists ? 'Empty folder' : 'Folder will be created when a file is uploaded'; /**
* @type {DATA} DATA
*/
var DATA;
/**
* @type {PARAMS}
* @typedef {object} PARAMS
* @property {string} q
* @property {string} sort
* @property {string} order
*/
const PARAMS = Object.fromEntries(new URLSearchParams(window.location.search).entries());
const dirEmptyNote = PARAMS.q ? 'No results' : DATA.dir_exists ? 'Empty folder' : 'Folder will be created when a file is uploaded';
/** /**
* @type Element * @type Element
*/ */
let $pathsTable; let $pathsTable;
/**
* @type Element
*/
let $pathsTableHead;
/** /**
* @type Element * @type Element
*/ */
@@ -29,6 +53,10 @@ let $uploadersTable;
* @type Element * @type Element
*/ */
let $emptyFolder; let $emptyFolder;
/**
* @type Element
*/
let $newFolder;
class Uploader { class Uploader {
/** /**
@@ -49,9 +77,8 @@ class Uploader {
} }
upload() { upload() {
const { file, idx, name } = this; const { idx, name } = this;
const url = getUrl(name); const url = getUrl(name);
const encodedUrl = encodedStr(url);
const encodedName = encodedStr(name); const encodedName = encodedStr(name);
$uploadersTable.insertAdjacentHTML("beforeend", ` $uploadersTable.insertAdjacentHTML("beforeend", `
<tr id="upload${idx}" class="uploader"> <tr id="upload${idx}" class="uploader">
@@ -59,15 +86,22 @@ class Uploader {
${getSvg()} ${getSvg()}
</td> </td>
<td class="path cell-name"> <td class="path cell-name">
<a href="${encodedUrl}">${encodedName}</a> <a href="${url}">${encodedName}</a>
</td> </td>
<td class="cell-status upload-status" id="uploadStatus${idx}"></td> <td class="cell-status upload-status" id="uploadStatus${idx}"></td>
</tr>`); </tr>`);
$uploadersTable.classList.remove("hidden"); $uploadersTable.classList.remove("hidden");
$emptyFolder.classList.add("hidden"); $emptyFolder.classList.add("hidden");
this.$uploadStatus = document.getElementById(`uploadStatus${idx}`); this.$uploadStatus = document.getElementById(`uploadStatus${idx}`);
this.lastUptime = Date.now(); this.$uploadStatus.innerHTML = '-';
Uploader.queues.push(this);
Uploader.runQueue();
}
ajax() {
Uploader.runnings += 1;
const url = getUrl(this.name);
this.lastUptime = Date.now();
const ajax = new XMLHttpRequest(); const ajax = new XMLHttpRequest();
ajax.upload.addEventListener("progress", e => this.progress(e), false); ajax.upload.addEventListener("progress", e => this.progress(e), false);
ajax.addEventListener("readystatechange", () => { ajax.addEventListener("readystatechange", () => {
@@ -82,13 +116,14 @@ class Uploader {
ajax.addEventListener("error", () => this.fail(), false); ajax.addEventListener("error", () => this.fail(), false);
ajax.addEventListener("abort", () => this.fail(), false); ajax.addEventListener("abort", () => this.fail(), false);
ajax.open("PUT", url); ajax.open("PUT", url);
ajax.send(file); ajax.send(this.file);
} }
progress(event) { progress(event) {
let now = Date.now(); const now = Date.now();
let speed = (event.loaded - this.uploaded) / (now - this.lastUptime) * 1000; const speed = (event.loaded - this.uploaded) / (now - this.lastUptime) * 1000;
let [speedValue, speedUnit] = formatSize(speed); const [speedValue, speedUnit] = formatSize(speed);
const speedText = `${speedValue}${speedUnit.toLowerCase()}/s`; const speedText = `${speedValue}${speedUnit.toLowerCase()}/s`;
const progress = formatPercent((event.loaded / event.total) * 100); const progress = formatPercent((event.loaded / event.total) * 100);
const duration = formatDuration((event.total - event.loaded) / speed) const duration = formatDuration((event.total - event.loaded) / speed)
@@ -99,15 +134,34 @@ class Uploader {
complete() { complete() {
this.$uploadStatus.innerHTML = ``; this.$uploadStatus.innerHTML = ``;
Uploader.runnings -= 1;
Uploader.runQueue();
} }
fail() { fail() {
this.$uploadStatus.innerHTML = ``; this.$uploadStatus.innerHTML = ``;
Uploader.runnings -= 1;
Uploader.runQueue();
} }
} }
Uploader.globalIdx = 0; Uploader.globalIdx = 0;
Uploader.runnings = 0;
/**
* @type Uploader[]
*/
Uploader.queues = [];
Uploader.runQueue = () => {
if (Uploader.runnings > 2) return;
let uploader = Uploader.queues.shift();
if (!uploader) return;
uploader.ajax();
}
/** /**
* Add breadcrumb * Add breadcrumb
* @param {string} href * @param {string} href
@@ -129,16 +183,15 @@ function addBreadcrumb(href, uri_prefix) {
if (!path.endsWith("/")) { if (!path.endsWith("/")) {
path += "/"; path += "/";
} }
path += encodeURI(name); path += encodeURIComponent(name);
} }
const encodedPath = encodedStr(path);
const encodedName = encodedStr(name); const encodedName = encodedStr(name);
if (i === 0) { if (i === 0) {
$breadcrumb.insertAdjacentHTML("beforeend", `<a href="${encodedPath}"><svg width="16" height="16" viewBox="0 0 16 16"><path d="M6.5 14.5v-3.505c0-.245.25-.495.5-.495h2c.25 0 .5.25.5.5v3.5a.5.5 0 0 0 .5.5h4a.5.5 0 0 0 .5-.5v-7a.5.5 0 0 0-.146-.354L13 5.793V2.5a.5.5 0 0 0-.5-.5h-1a.5.5 0 0 0-.5.5v1.293L8.354 1.146a.5.5 0 0 0-.708 0l-6 6A.5.5 0 0 0 1.5 7.5v7a.5.5 0 0 0 .5.5h4a.5.5 0 0 0 .5-.5z"/></svg></a>`); $breadcrumb.insertAdjacentHTML("beforeend", `<a href="${path}"><svg width="16" height="16" viewBox="0 0 16 16"><path d="M6.5 14.5v-3.505c0-.245.25-.495.5-.495h2c.25 0 .5.25.5.5v3.5a.5.5 0 0 0 .5.5h4a.5.5 0 0 0 .5-.5v-7a.5.5 0 0 0-.146-.354L13 5.793V2.5a.5.5 0 0 0-.5-.5h-1a.5.5 0 0 0-.5.5v1.293L8.354 1.146a.5.5 0 0 0-.708 0l-6 6A.5.5 0 0 0 1.5 7.5v7a.5.5 0 0 0 .5.5h4a.5.5 0 0 0 .5-.5z"/></svg></a>`);
} else if (i === len - 1) { } else if (i === len - 1) {
$breadcrumb.insertAdjacentHTML("beforeend", `<b>${encodedName}</b>`); $breadcrumb.insertAdjacentHTML("beforeend", `<b>${encodedName}</b>`);
} else { } else {
$breadcrumb.insertAdjacentHTML("beforeend", `<a href="${encodedPath}">${encodedName}</a>`); $breadcrumb.insertAdjacentHTML("beforeend", `<a href="${path}">${encodedName}</a>`);
} }
if (i !== len - 1) { if (i !== len - 1) {
$breadcrumb.insertAdjacentHTML("beforeend", `<span class="separator">/</span>`); $breadcrumb.insertAdjacentHTML("beforeend", `<span class="separator">/</span>`);
@@ -146,6 +199,67 @@ function addBreadcrumb(href, uri_prefix) {
} }
} }
/**
* Render path table thead
*/
function renderPathsTableHead() {
const headerItems = [
{
name: "name",
props: `colspan="2"`,
text: "Name",
},
{
name: "mtime",
props: ``,
text: "Last Modified",
},
{
name: "size",
props: ``,
text: "Size",
}
];
$pathsTableHead.insertAdjacentHTML("beforeend", `
<tr>
${headerItems.map(item => {
let svg = `<svg width="12" height="12" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M11.5 15a.5.5 0 0 0 .5-.5V2.707l3.146 3.147a.5.5 0 0 0 .708-.708l-4-4a.5.5 0 0 0-.708 0l-4 4a.5.5 0 1 0 .708.708L11 2.707V14.5a.5.5 0 0 0 .5.5zm-7-14a.5.5 0 0 1 .5.5v11.793l3.146-3.147a.5.5 0 0 1 .708.708l-4 4a.5.5 0 0 1-.708 0l-4-4a.5.5 0 0 1 .708-.708L4 13.293V1.5a.5.5 0 0 1 .5-.5z"/></svg>`;
let order = "asc";
if (PARAMS.sort === item.name) {
if (PARAMS.order === "asc") {
order = "desc";
svg = `<svg width="12" height="12" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M8 15a.5.5 0 0 0 .5-.5V2.707l3.146 3.147a.5.5 0 0 0 .708-.708l-4-4a.5.5 0 0 0-.708 0l-4 4a.5.5 0 1 0 .708.708L7.5 2.707V14.5a.5.5 0 0 0 .5.5z"/></svg>`
} else {
svg = `<svg width="12" height="12" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M8 1a.5.5 0 0 1 .5.5v11.793l3.146-3.147a.5.5 0 0 1 .708.708l-4 4a.5.5 0 0 1-.708 0l-4-4a.5.5 0 0 1 .708-.708L7.5 13.293V1.5A.5.5 0 0 1 8 1z"/></svg>`
}
}
const qs = new URLSearchParams({...PARAMS, order, sort: item.name }).toString();
const icon = `<span>${svg}</span>`
return `<th class="cell-${item.name}" ${item.props}><a href="?${qs}">${item.text}${icon}</a></th>`
}).join("\n")}
<th class="cell-actions">Actions</th>
</tr>
`);
}
/**
* Render path table tbody
*/
function renderPathsTableBody() {
if (DATA.paths && DATA.paths.length > 0) {
const len = DATA.paths.length;
if (len > 0) {
$pathsTable.classList.remove("hidden");
}
for (let i = 0; i < len; i++) {
addPath(DATA.paths[i], i);
}
} else {
$emptyFolder.textContent = dirEmptyNote;
$emptyFolder.classList.remove("hidden");
}
}
/** /**
* Add pathitem * Add pathitem
* @param {PathItem} file * @param {PathItem} file
@@ -154,35 +268,41 @@ function addBreadcrumb(href, uri_prefix) {
function addPath(file, index) { function addPath(file, index) {
const encodedName = encodedStr(file.name); const encodedName = encodedStr(file.name);
let url = getUrl(file.name) let url = getUrl(file.name)
let encodedUrl = encodedStr(url);
let actionDelete = ""; let actionDelete = "";
let actionDownload = ""; let actionDownload = "";
let actionMove = "";
if (file.path_type.endsWith("Dir")) { if (file.path_type.endsWith("Dir")) {
url += "/"; url += "/";
encodedUrl += "/";
actionDownload = ` actionDownload = `
<div class="action-btn"> <div class="action-btn">
<a href="${encodedUrl}?zip" title="Download folder as a .zip file"> <a href="${url}?zip" title="Download folder as a .zip file">
<svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg> <svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg>
</a> </a>
</div>`; </div>`;
} else { } else {
actionDownload = ` actionDownload = `
<div class="action-btn" > <div class="action-btn" >
<a href="${encodedUrl}" title="Download file" download> <a href="${url}" title="Download file" download>
<svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg> <svg width="16" height="16" viewBox="0 0 16 16"><path d="M.5 9.9a.5.5 0 0 1 .5.5v2.5a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-2.5a.5.5 0 0 1 1 0v2.5a2 2 0 0 1-2 2H2a2 2 0 0 1-2-2v-2.5a.5.5 0 0 1 .5-.5z"/><path d="M7.646 11.854a.5.5 0 0 0 .708 0l3-3a.5.5 0 0 0-.708-.708L8.5 10.293V1.5a.5.5 0 0 0-1 0v8.793L5.354 8.146a.5.5 0 1 0-.708.708l3 3z"/></svg>
</a> </a>
</div>`; </div>`;
} }
if (DATA.allow_delete) { if (DATA.allow_delete) {
if (DATA.allow_upload) {
actionMove = `
<div onclick="movePath(${index})" class="action-btn" id="moveBtn${index}" title="Move to new path">
<svg width="16" height="16" viewBox="0 0 16 16"><path fill-rule="evenodd" d="M1.5 1.5A.5.5 0 0 0 1 2v4.8a2.5 2.5 0 0 0 2.5 2.5h9.793l-3.347 3.346a.5.5 0 0 0 .708.708l4.2-4.2a.5.5 0 0 0 0-.708l-4-4a.5.5 0 0 0-.708.708L13.293 8.3H3.5A1.5 1.5 0 0 1 2 6.8V2a.5.5 0 0 0-.5-.5z"/></svg>
</div>`;
}
actionDelete = ` actionDelete = `
<div onclick="deletePath(${index})" class="action-btn" id="deleteBtn${index}" title="Delete ${encodedName}"> <div onclick="deletePath(${index})" class="action-btn" id="deleteBtn${index}" title="Delete">
<svg width="16" height="16" fill="currentColor"viewBox="0 0 16 16"><path d="M6.854 7.146a.5.5 0 1 0-.708.708L7.293 9l-1.147 1.146a.5.5 0 0 0 .708.708L8 9.707l1.146 1.147a.5.5 0 0 0 .708-.708L8.707 9l1.147-1.146a.5.5 0 0 0-.708-.708L8 8.293 6.854 7.146z"/><path d="M14 14V4.5L9.5 0H4a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h8a2 2 0 0 0 2-2zM9.5 3A1.5 1.5 0 0 0 11 4.5h2V14a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1V2a1 1 0 0 1 1-1h5.5v2z"/></svg> <svg width="16" height="16" fill="currentColor"viewBox="0 0 16 16"><path d="M6.854 7.146a.5.5 0 1 0-.708.708L7.293 9l-1.147 1.146a.5.5 0 0 0 .708.708L8 9.707l1.146 1.147a.5.5 0 0 0 .708-.708L8.707 9l1.147-1.146a.5.5 0 0 0-.708-.708L8 8.293 6.854 7.146z"/><path d="M14 14V4.5L9.5 0H4a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h8a2 2 0 0 0 2-2zM9.5 3A1.5 1.5 0 0 0 11 4.5h2V14a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1V2a1 1 0 0 1 1-1h5.5v2z"/></svg>
</div>`; </div>`;
} }
let actionCell = ` let actionCell = `
<td class="cell-actions"> <td class="cell-actions">
${actionDownload} ${actionDownload}
${actionMove}
${actionDelete} ${actionDelete}
</td>` </td>`
@@ -192,7 +312,7 @@ function addPath(file, index) {
${getSvg(file.path_type)} ${getSvg(file.path_type)}
</td> </td>
<td class="path cell-name"> <td class="path cell-name">
<a href="${encodedUrl}">${encodedName}</a> <a href="${url}">${encodedName}</a>
</td> </td>
<td class="cell-mtime">${formatMtime(file.mtime)}</td> <td class="cell-mtime">${formatMtime(file.mtime)}</td>
<td class="cell-size">${formatSize(file.size).join(" ")}</td> <td class="cell-size">${formatSize(file.size).join(" ")}</td>
@@ -201,7 +321,7 @@ function addPath(file, index) {
} }
/** /**
* Delete pathitem * Delete path
* @param {number} index * @param {number} index
* @returns * @returns
*/ */
@@ -231,6 +351,46 @@ async function deletePath(index) {
} }
} }
/**
* Move path
* @param {number} index
* @returns
*/
async function movePath(index) {
const file = DATA.paths[index];
if (!file) return;
const fileUrl = getUrl(file.name);
const fileUrlObj = new URL(fileUrl)
const prefix = DATA.uri_prefix.slice(0, -1);
const filePath = decodeURIComponent(fileUrlObj.pathname.slice(prefix.length));
let newPath = prompt("Enter new path", filePath)
if (!newPath) return;
if (!newPath.startsWith("/")) newPath = "/" + newPath;
if (filePath === newPath) return;
const newFileUrl = fileUrlObj.origin + prefix + newPath.split("/").map(encodeURIComponent).join("/");
try {
const res = await fetch(fileUrl, {
method: "MOVE",
headers: {
"Destination": newFileUrl,
}
});
if (res.status >= 200 && res.status < 300) {
location.href = newFileUrl.split("/").slice(0, -1).join("/")
} else {
throw new Error(await res.text())
}
} catch (err) {
alert(`Cannot move \`${filePath}\` to \`${newPath}\`, ${err.message}`);
}
}
function dropzone() { function dropzone() {
["drag", "dragstart", "dragend", "dragover", "dragenter", "dragleave", "drop"].forEach(name => { ["drag", "dragstart", "dragend", "dragover", "dragenter", "dragleave", "drop"].forEach(name => {
document.addEventListener(name, e => { document.addEventListener(name, e => {
@@ -255,6 +415,24 @@ function dropzone() {
}); });
} }
/**
* Create a folder
* @param {string} name
*/
async function createFolder(name) {
const url = getUrl(name);
try {
const res = await fetch(url, {
method: "MKCOL",
});
if (res.status >= 200 && res.status < 300) {
location.href = url;
}
} catch (err) {
alert(`Cannot create folder \`${name}\`, ${err.message}`);
}
}
async function addFileEntries(entries, dirs) { async function addFileEntries(entries, dirs) {
for (const entry of entries) { for (const entry of entries) {
if (entry.isFile) { if (entry.isFile) {
@@ -272,7 +450,7 @@ async function addFileEntries(entries, dirs) {
function getUrl(name) { function getUrl(name) {
let url = location.href.split('?')[0]; let url = location.href.split('?')[0];
if (!url.endsWith("/")) url += "/"; if (!url.endsWith("/")) url += "/";
url += encodeURI(name); url += name.split("/").map(encodeURIComponent).join("/");
return url; return url;
} }
@@ -314,9 +492,9 @@ function formatSize(size) {
function formatDuration(seconds) { function formatDuration(seconds) {
seconds = Math.ceil(seconds); seconds = Math.ceil(seconds);
let h = Math.floor(seconds / 3600); const h = Math.floor(seconds / 3600);
let m = Math.floor((seconds - h * 3600) / 60); const m = Math.floor((seconds - h * 3600) / 60);
let s = seconds - h * 3600 - m * 60 const s = seconds - h * 3600 - m * 60
return `${padZero(h, 2)}:${padZero(m, 2)}:${padZero(s, 2)}`; return `${padZero(h, 2)}:${padZero(m, 2)}:${padZero(s, 2)}`;
} }
@@ -337,35 +515,33 @@ function encodedStr(rawStr) {
function ready() { function ready() {
document.title = `Index of ${DATA.href} - Dufs`; document.title = `Index of ${DATA.href} - Dufs`;
$pathsTable = document.querySelector(".paths-table") $pathsTable = document.querySelector(".paths-table")
$pathsTableHead = document.querySelector(".paths-table thead");
$pathsTableBody = document.querySelector(".paths-table tbody"); $pathsTableBody = document.querySelector(".paths-table tbody");
$uploadersTable = document.querySelector(".uploaders-table"); $uploadersTable = document.querySelector(".uploaders-table");
$emptyFolder = document.querySelector(".empty-folder"); $emptyFolder = document.querySelector(".empty-folder");
$newFolder = document.querySelector(".new-folder");
if (DATA.allow_search) { if (DATA.allow_search) {
document.querySelector(".searchbar").classList.remove("hidden"); document.querySelector(".searchbar").classList.remove("hidden");
if (params.q) { if (PARAMS.q) {
document.getElementById('search').value = params.q; document.getElementById('search').value = PARAMS.q;
} }
} }
addBreadcrumb(DATA.href, DATA.uri_prefix); addBreadcrumb(DATA.href, DATA.uri_prefix);
if (Array.isArray(DATA.paths)) { renderPathsTableHead();
const len = DATA.paths.length; renderPathsTableBody();
if (len > 0) {
$pathsTable.classList.remove("hidden");
}
for (let i = 0; i < len; i++) {
addPath(DATA.paths[i], i);
}
if (len == 0) {
$emptyFolder.textContent = dirEmptyNote;
$emptyFolder.classList.remove("hidden");
}
}
if (DATA.allow_upload) { if (DATA.allow_upload) {
dropzone(); dropzone();
document.querySelector(".upload-control").classList.remove("hidden"); if (DATA.allow_delete) {
$newFolder.classList.remove("hidden");
$newFolder.addEventListener("click", () => {
const name = prompt("Enter name of new folder");
if (name) createFolder(name);
});
}
document.querySelector(".upload-file").classList.remove("hidden");
document.getElementById("file").addEventListener("change", e => { document.getElementById("file").addEventListener("change", e => {
const files = e.target.files; const files = e.target.files;
for (let file of files) { for (let file of files) {

View File

@@ -1,4 +1,5 @@
use clap::{AppSettings, Arg, ArgMatches, Command}; use clap::{value_parser, AppSettings, Arg, ArgAction, ArgMatches, Command};
use clap_complete::{generate, Generator, Shell};
#[cfg(feature = "tls")] #[cfg(feature = "tls")]
use rustls::{Certificate, PrivateKey}; use rustls::{Certificate, PrivateKey};
use std::env; use std::env;
@@ -7,11 +8,13 @@ use std::path::{Path, PathBuf};
use crate::auth::AccessControl; use crate::auth::AccessControl;
use crate::auth::AuthMethod; use crate::auth::AuthMethod;
use crate::log_http::{LogHttp, DEFAULT_LOG_FORMAT};
#[cfg(feature = "tls")] #[cfg(feature = "tls")]
use crate::tls::{load_certs, load_private_key}; use crate::tls::{load_certs, load_private_key};
use crate::utils::encode_uri;
use crate::BoxResult; use crate::BoxResult;
fn app() -> Command<'static> { pub fn build_cli() -> Command<'static> {
let app = Command::new(env!("CARGO_CRATE_NAME")) let app = Command::new(env!("CARGO_CRATE_NAME"))
.version(env!("CARGO_PKG_VERSION")) .version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS")) .author(env!("CARGO_PKG_AUTHORS"))
@@ -27,7 +30,8 @@ fn app() -> Command<'static> {
.long("bind") .long("bind")
.help("Specify bind address") .help("Specify bind address")
.multiple_values(true) .multiple_values(true)
.multiple_occurrences(true) .value_delimiter(',')
.action(ArgAction::Append)
.value_name("addr"), .value_name("addr"),
) )
.arg( .arg(
@@ -39,7 +43,7 @@ fn app() -> Command<'static> {
.value_name("port"), .value_name("port"),
) )
.arg( .arg(
Arg::new("path") Arg::new("root")
.default_value(".") .default_value(".")
.allow_invalid_utf8(true) .allow_invalid_utf8(true)
.help("Specific path to serve"), .help("Specific path to serve"),
@@ -48,12 +52,12 @@ fn app() -> Command<'static> {
Arg::new("path-prefix") Arg::new("path-prefix")
.long("path-prefix") .long("path-prefix")
.value_name("path") .value_name("path")
.help("Specify an path prefix"), .help("Specify a path prefix"),
) )
.arg( .arg(
Arg::new("hidden") Arg::new("hidden")
.long("hidden") .long("hidden")
.help("Hide directories from directory listings, separated by `,`") .help("Hide paths from directory listings, separated by `,`")
.value_name("value"), .value_name("value"),
) )
.arg( .arg(
@@ -61,8 +65,9 @@ fn app() -> Command<'static> {
.short('a') .short('a')
.long("auth") .long("auth")
.help("Add auth for path") .help("Add auth for path")
.action(ArgAction::Append)
.multiple_values(true) .multiple_values(true)
.multiple_occurrences(true) .value_delimiter(',')
.value_name("rule"), .value_name("rule"),
) )
.arg( .arg(
@@ -118,6 +123,13 @@ fn app() -> Command<'static> {
Arg::new("render-spa") Arg::new("render-spa")
.long("render-spa") .long("render-spa")
.help("Serve SPA(Single Page Application)"), .help("Serve SPA(Single Page Application)"),
)
.arg(
Arg::new("assets")
.long("assets")
.help("Use custom assets to override builtin assets")
.allow_invalid_utf8(true)
.value_name("path")
); );
#[cfg(feature = "tls")] #[cfg(feature = "tls")]
@@ -135,11 +147,23 @@ fn app() -> Command<'static> {
.help("Path to the SSL/TLS certificate's private key"), .help("Path to the SSL/TLS certificate's private key"),
); );
app app.arg(
Arg::new("log-format")
.long("log-format")
.value_name("format")
.help("Customize http log format"),
)
.arg(
Arg::new("completions")
.long("completions")
.value_name("shell")
.value_parser(value_parser!(Shell))
.help("Print shell completion script for <shell>"),
)
} }
pub fn matches() -> ArgMatches { pub fn print_completions<G: Generator>(gen: G, cmd: &mut Command) {
app().get_matches() generate(gen, cmd, cmd.get_name().to_string(), &mut std::io::stdout());
} }
#[derive(Debug)] #[derive(Debug)]
@@ -150,7 +174,7 @@ pub struct Args {
pub path_is_file: bool, pub path_is_file: bool,
pub path_prefix: String, pub path_prefix: String,
pub uri_prefix: String, pub uri_prefix: String,
pub hidden: String, pub hidden: Vec<String>,
pub auth_method: AuthMethod, pub auth_method: AuthMethod,
pub auth: AccessControl, pub auth: AccessControl,
pub allow_upload: bool, pub allow_upload: bool,
@@ -161,6 +185,8 @@ pub struct Args {
pub render_spa: bool, pub render_spa: bool,
pub render_try_index: bool, pub render_try_index: bool,
pub enable_cors: bool, pub enable_cors: bool,
pub assets_path: Option<PathBuf>,
pub log_http: LogHttp,
#[cfg(feature = "tls")] #[cfg(feature = "tls")]
pub tls: Option<(Vec<Certificate>, PrivateKey)>, pub tls: Option<(Vec<Certificate>, PrivateKey)>,
#[cfg(not(feature = "tls"))] #[cfg(not(feature = "tls"))]
@@ -179,7 +205,7 @@ impl Args {
.map(|v| v.collect()) .map(|v| v.collect())
.unwrap_or_else(|| vec!["0.0.0.0", "::"]); .unwrap_or_else(|| vec!["0.0.0.0", "::"]);
let addrs: Vec<IpAddr> = Args::parse_addrs(&addrs)?; let addrs: Vec<IpAddr> = Args::parse_addrs(&addrs)?;
let path = Args::parse_path(matches.value_of_os("path").unwrap_or_default())?; let path = Args::parse_path(matches.value_of_os("root").unwrap_or_default())?;
let path_is_file = path.metadata()?.is_file(); let path_is_file = path.metadata()?.is_file();
let path_prefix = matches let path_prefix = matches
.value_of("path-prefix") .value_of("path-prefix")
@@ -188,11 +214,11 @@ impl Args {
let uri_prefix = if path_prefix.is_empty() { let uri_prefix = if path_prefix.is_empty() {
"/".to_owned() "/".to_owned()
} else { } else {
format!("/{}/", &path_prefix) format!("/{}/", &encode_uri(&path_prefix))
}; };
let hidden: String = matches let hidden: Vec<String> = matches
.value_of("hidden") .value_of("hidden")
.map(|v| format!(",{},", v)) .map(|v| v.split(',').map(|x| x.to_string()).collect())
.unwrap_or_default(); .unwrap_or_default();
let enable_cors = matches.is_present("enable-cors"); let enable_cors = matches.is_present("enable-cors");
let auth: Vec<&str> = matches let auth: Vec<&str> = matches
@@ -222,6 +248,14 @@ impl Args {
}; };
#[cfg(not(feature = "tls"))] #[cfg(not(feature = "tls"))]
let tls = None; let tls = None;
let log_http: LogHttp = matches
.value_of("log-format")
.unwrap_or(DEFAULT_LOG_FORMAT)
.parse()?;
let assets_path = match matches.value_of_os("assets") {
Some(v) => Some(Args::parse_assets_path(v)?),
None => None,
};
Ok(Args { Ok(Args {
addrs, addrs,
@@ -242,6 +276,8 @@ impl Args {
render_try_index, render_try_index,
render_spa, render_spa,
tls, tls,
log_http,
assets_path,
}) })
} }
@@ -277,4 +313,12 @@ impl Args {
}) })
.map_err(|err| format!("Failed to access path `{}`: {}", path.display(), err,).into()) .map_err(|err| format!("Failed to access path `{}`: {}", path.display(), err,).into())
} }
fn parse_assets_path<P: AsRef<Path>>(path: P) -> BoxResult<PathBuf> {
let path = Self::parse_path(path)?;
if !path.join("index.html").exists() {
return Err(format!("Path `{}` doesn't contains index.html", path.display()).into());
}
Ok(path)
}
} }

View File

@@ -12,6 +12,7 @@ use crate::utils::encode_uri;
use crate::BoxResult; use crate::BoxResult;
const REALM: &str = "DUFS"; const REALM: &str = "DUFS";
const DIGEST_AUTH_TIMEOUT: u32 = 86400;
lazy_static! { lazy_static! {
static ref NONCESTARTHASH: Context = { static ref NONCESTARTHASH: Context = {
@@ -197,6 +198,24 @@ impl AuthMethod {
} }
} }
} }
pub fn get_user(&self, authorization: &HeaderValue) -> Option<String> {
match self {
AuthMethod::Basic => {
let value: Vec<u8> =
base64::decode(strip_prefix(authorization.as_bytes(), b"Basic ")?).ok()?;
let parts: Vec<&str> = std::str::from_utf8(&value).ok()?.split(':').collect();
Some(parts[0].to_string())
}
AuthMethod::Digest => {
let digest_value = strip_prefix(authorization.as_bytes(), b"Digest ")?;
let digest_vals = to_headermap(digest_value).ok()?;
digest_vals
.get(b"username".as_ref())
.and_then(|b| std::str::from_utf8(*b).ok())
.map(|v| v.to_string())
}
}
}
pub fn validate( pub fn validate(
&self, &self,
authorization: &HeaderValue, authorization: &HeaderValue,
@@ -206,10 +225,9 @@ impl AuthMethod {
) -> Option<()> { ) -> Option<()> {
match self { match self {
AuthMethod::Basic => { AuthMethod::Basic => {
let value: Vec<u8> = let basic_value: Vec<u8> =
base64::decode(strip_prefix(authorization.as_bytes(), b"Basic ").unwrap()) base64::decode(strip_prefix(authorization.as_bytes(), b"Basic ")?).ok()?;
.unwrap(); let parts: Vec<&str> = std::str::from_utf8(&basic_value).ok()?.split(':').collect();
let parts: Vec<&str> = std::str::from_utf8(&value).unwrap().split(':').collect();
if parts[0] != auth_user { if parts[0] != auth_user {
return None; return None;
@@ -228,13 +246,13 @@ impl AuthMethod {
} }
AuthMethod::Digest => { AuthMethod::Digest => {
let digest_value = strip_prefix(authorization.as_bytes(), b"Digest ")?; let digest_value = strip_prefix(authorization.as_bytes(), b"Digest ")?;
let user_vals = to_headermap(digest_value).ok()?; let digest_vals = to_headermap(digest_value).ok()?;
if let (Some(username), Some(nonce), Some(user_response)) = ( if let (Some(username), Some(nonce), Some(user_response)) = (
user_vals digest_vals
.get(b"username".as_ref()) .get(b"username".as_ref())
.and_then(|b| std::str::from_utf8(*b).ok()), .and_then(|b| std::str::from_utf8(*b).ok()),
user_vals.get(b"nonce".as_ref()), digest_vals.get(b"nonce".as_ref()),
user_vals.get(b"response".as_ref()), digest_vals.get(b"response".as_ref()),
) { ) {
match validate_nonce(nonce) { match validate_nonce(nonce) {
Ok(true) => {} Ok(true) => {}
@@ -246,12 +264,12 @@ impl AuthMethod {
let mut ha = Context::new(); let mut ha = Context::new();
ha.consume(method); ha.consume(method);
ha.consume(b":"); ha.consume(b":");
if let Some(uri) = user_vals.get(b"uri".as_ref()) { if let Some(uri) = digest_vals.get(b"uri".as_ref()) {
ha.consume(uri); ha.consume(uri);
} }
let ha = format!("{:x}", ha.compute()); let ha = format!("{:x}", ha.compute());
let mut correct_response = None; let mut correct_response = None;
if let Some(qop) = user_vals.get(b"qop".as_ref()) { if let Some(qop) = digest_vals.get(b"qop".as_ref()) {
if qop == &b"auth".as_ref() || qop == &b"auth-int".as_ref() { if qop == &b"auth".as_ref() || qop == &b"auth-int".as_ref() {
correct_response = Some({ correct_response = Some({
let mut c = Context::new(); let mut c = Context::new();
@@ -259,11 +277,11 @@ impl AuthMethod {
c.consume(b":"); c.consume(b":");
c.consume(nonce); c.consume(nonce);
c.consume(b":"); c.consume(b":");
if let Some(nc) = user_vals.get(b"nc".as_ref()) { if let Some(nc) = digest_vals.get(b"nc".as_ref()) {
c.consume(nc); c.consume(nc);
} }
c.consume(b":"); c.consume(b":");
if let Some(cnonce) = user_vals.get(b"cnonce".as_ref()) { if let Some(cnonce) = digest_vals.get(b"cnonce".as_ref()) {
c.consume(cnonce); c.consume(cnonce);
} }
c.consume(b":"); c.consume(b":");
@@ -317,8 +335,7 @@ fn validate_nonce(nonce: &[u8]) -> Result<bool, ()> {
h.consume(secs_nonce.to_be_bytes()); h.consume(secs_nonce.to_be_bytes());
let h = format!("{:x}", h.compute()); let h = format!("{:x}", h.compute());
if h[..26] == n[8..34] { if h[..26] == n[8..34] {
return Ok(dur < 300); // from the last 5min return Ok(dur < DIGEST_AUTH_TIMEOUT);
//Authentication-Info ?
} }
} }
} }
@@ -340,12 +357,12 @@ fn strip_prefix<'a>(search: &'a [u8], prefix: &[u8]) -> Option<&'a [u8]> {
fn to_headermap(header: &[u8]) -> Result<HashMap<&[u8], &[u8]>, ()> { fn to_headermap(header: &[u8]) -> Result<HashMap<&[u8], &[u8]>, ()> {
let mut sep = Vec::new(); let mut sep = Vec::new();
let mut asign = Vec::new(); let mut assign = Vec::new();
let mut i: usize = 0; let mut i: usize = 0;
let mut esc = false; let mut esc = false;
for c in header { for c in header {
match (c, esc) { match (c, esc) {
(b'=', false) => asign.push(i), (b'=', false) => assign.push(i),
(b',', false) => sep.push(i), (b',', false) => sep.push(i),
(b'"', false) => esc = true, (b'"', false) => esc = true,
(b'"', true) => esc = false, (b'"', true) => esc = false,
@@ -357,7 +374,7 @@ fn to_headermap(header: &[u8]) -> Result<HashMap<&[u8], &[u8]>, ()> {
i = 0; i = 0;
let mut ret = HashMap::new(); let mut ret = HashMap::new();
for (&k, &a) in sep.iter().zip(asign.iter()) { for (&k, &a) in sep.iter().zip(assign.iter()) {
while header[i] == b' ' { while header[i] == b' ' {
i += 1; i += 1;
} }

99
src/log_http.rs Normal file
View File

@@ -0,0 +1,99 @@
use std::{collections::HashMap, str::FromStr, sync::Arc};
use crate::{args::Args, server::Request};
pub const DEFAULT_LOG_FORMAT: &str = r#"$remote_addr "$request" $status"#;
#[derive(Debug)]
pub struct LogHttp {
elems: Vec<LogElement>,
}
#[derive(Debug)]
enum LogElement {
Variable(String),
Header(String),
Literal(String),
}
impl LogHttp {
pub fn data(&self, req: &Request, args: &Arc<Args>) -> HashMap<String, String> {
let mut data = HashMap::default();
for elem in self.elems.iter() {
match elem {
LogElement::Variable(name) => match name.as_str() {
"request" => {
data.insert(name.to_string(), format!("{} {}", req.method(), req.uri()));
}
"remote_user" => {
if let Some(user) = req
.headers()
.get("authorization")
.and_then(|v| args.auth_method.get_user(v))
{
data.insert(name.to_string(), user);
}
}
_ => {}
},
LogElement::Header(name) => {
if let Some(value) = req.headers().get(name).and_then(|v| v.to_str().ok()) {
data.insert(name.to_string(), value.to_string());
}
}
LogElement::Literal(_) => {}
}
}
data
}
pub fn log(&self, data: &HashMap<String, String>, err: Option<String>) {
if self.elems.is_empty() {
return;
}
let mut output = String::new();
for elem in self.elems.iter() {
match elem {
LogElement::Literal(value) => output.push_str(value.as_str()),
LogElement::Header(name) | LogElement::Variable(name) => {
output.push_str(data.get(name).map(|v| v.as_str()).unwrap_or("-"))
}
}
}
match err {
Some(err) => error!("{} {}", output, err),
None => info!("{}", output),
}
}
}
impl FromStr for LogHttp {
type Err = Box<dyn std::error::Error>;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut elems = vec![];
let mut is_var = false;
let mut cache = String::new();
for c in format!("{} ", s).chars() {
if c == '$' {
if !cache.is_empty() {
elems.push(LogElement::Literal(cache.to_string()));
}
cache.clear();
is_var = true;
} else if is_var && !(c.is_alphanumeric() || c == '_') {
if let Some(value) = cache.strip_prefix("$http_") {
elems.push(LogElement::Header(value.replace('_', "-").to_string()));
} else if let Some(value) = cache.strip_prefix('$') {
elems.push(LogElement::Variable(value.to_string()));
}
cache.clear();
is_var = false;
}
cache.push(c);
}
let cache = cache.trim();
if !cache.is_empty() {
elems.push(LogElement::Literal(cache.to_string()));
}
Ok(Self { elems })
}
}

View File

@@ -1,5 +1,6 @@
mod args; mod args;
mod auth; mod auth;
mod log_http;
mod logger; mod logger;
mod server; mod server;
mod streamer; mod streamer;
@@ -10,14 +11,16 @@ mod utils;
#[macro_use] #[macro_use]
extern crate log; extern crate log;
use crate::args::{matches, Args}; use crate::args::{build_cli, print_completions, Args};
use crate::server::{Request, Server}; use crate::server::{Request, Server};
#[cfg(feature = "tls")] #[cfg(feature = "tls")]
use crate::tls::{TlsAcceptor, TlsStream}; use crate::tls::{TlsAcceptor, TlsStream};
use std::net::{IpAddr, SocketAddr, TcpListener as StdTcpListener}; use std::net::{IpAddr, SocketAddr, TcpListener as StdTcpListener};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc; use std::sync::Arc;
use clap_complete::Shell;
use futures::future::join_all; use futures::future::join_all;
use tokio::net::TcpListener; use tokio::net::TcpListener;
use tokio::task::JoinHandle; use tokio::task::JoinHandle;
@@ -36,9 +39,17 @@ async fn main() {
async fn run() -> BoxResult<()> { async fn run() -> BoxResult<()> {
logger::init().map_err(|e| format!("Failed to init logger, {}", e))?; logger::init().map_err(|e| format!("Failed to init logger, {}", e))?;
let args = Args::parse(matches())?; let cmd = build_cli();
let matches = cmd.get_matches();
if let Some(generator) = matches.get_one::<Shell>("completions") {
let mut cmd = build_cli();
print_completions(*generator, &mut cmd);
return Ok(());
}
let args = Args::parse(matches)?;
let args = Arc::new(args); let args = Arc::new(args);
let handles = serve(args.clone())?; let running = Arc::new(AtomicBool::new(true));
let handles = serve(args.clone(), running.clone())?;
print_listening(args)?; print_listening(args)?;
tokio::select! { tokio::select! {
@@ -51,20 +62,24 @@ async fn run() -> BoxResult<()> {
Ok(()) Ok(())
}, },
_ = shutdown_signal() => { _ = shutdown_signal() => {
running.store(false, Ordering::SeqCst);
Ok(()) Ok(())
}, },
} }
} }
fn serve(args: Arc<Args>) -> BoxResult<Vec<JoinHandle<Result<(), hyper::Error>>>> { fn serve(
let inner = Arc::new(Server::new(args.clone())); args: Arc<Args>,
running: Arc<AtomicBool>,
) -> BoxResult<Vec<JoinHandle<Result<(), hyper::Error>>>> {
let inner = Arc::new(Server::new(args.clone(), running));
let mut handles = vec![]; let mut handles = vec![];
let port = args.port; let port = args.port;
for ip in args.addrs.iter() { for ip in args.addrs.iter() {
let inner = inner.clone(); let inner = inner.clone();
let incoming = create_addr_incoming(SocketAddr::new(*ip, port)) let incoming = create_addr_incoming(SocketAddr::new(*ip, port))
.map_err(|e| format!("Failed to bind `{}:{}`, {}", ip, port, e))?; .map_err(|e| format!("Failed to bind `{}:{}`, {}", ip, port, e))?;
let serv_func = move |remote_addr: SocketAddr| { let serve_func = move |remote_addr: SocketAddr| {
let inner = inner.clone(); let inner = inner.clone();
async move { async move {
Ok::<_, hyper::Error>(service_fn(move |req: Request| { Ok::<_, hyper::Error>(service_fn(move |req: Request| {
@@ -84,7 +99,7 @@ fn serve(args: Arc<Args>) -> BoxResult<Vec<JoinHandle<Result<(), hyper::Error>>>
let accepter = TlsAcceptor::new(config.clone(), incoming); let accepter = TlsAcceptor::new(config.clone(), incoming);
let new_service = make_service_fn(move |socket: &TlsStream| { let new_service = make_service_fn(move |socket: &TlsStream| {
let remote_addr = socket.remote_addr(); let remote_addr = socket.remote_addr();
serv_func(remote_addr) serve_func(remote_addr)
}); });
let server = tokio::spawn(hyper::Server::builder(accepter).serve(new_service)); let server = tokio::spawn(hyper::Server::builder(accepter).serve(new_service));
handles.push(server); handles.push(server);
@@ -96,7 +111,7 @@ fn serve(args: Arc<Args>) -> BoxResult<Vec<JoinHandle<Result<(), hyper::Error>>>
None => { None => {
let new_service = make_service_fn(move |socket: &AddrStream| { let new_service = make_service_fn(move |socket: &AddrStream| {
let remote_addr = socket.remote_addr(); let remote_addr = socket.remote_addr();
serv_func(remote_addr) serve_func(remote_addr)
}); });
let server = tokio::spawn(hyper::Server::builder(incoming).serve(new_service)); let server = tokio::spawn(hyper::Server::builder(incoming).serve(new_service));
handles.push(server); handles.push(server);

View File

@@ -1,29 +1,31 @@
use crate::streamer::Streamer; use crate::streamer::Streamer;
use crate::utils::{decode_uri, encode_uri, get_file_name, try_get_file_name}; use crate::utils::{decode_uri, encode_uri, get_file_name, glob, try_get_file_name};
use crate::{Args, BoxResult}; use crate::{Args, BoxResult};
use async_walkdir::{Filtering, WalkDir}; use walkdir::WalkDir;
use xml::escape::escape_str_pcdata; use xml::escape::escape_str_pcdata;
use async_zip::write::{EntryOptions, ZipFileWriter}; use async_zip::write::{EntryOptions, ZipFileWriter};
use async_zip::Compression; use async_zip::Compression;
use chrono::{TimeZone, Utc}; use chrono::{TimeZone, Utc};
use futures::stream::StreamExt;
use futures::TryStreamExt; use futures::TryStreamExt;
use headers::{ use headers::{
AcceptRanges, AccessControlAllowCredentials, AccessControlAllowHeaders, AcceptRanges, AccessControlAllowCredentials, AccessControlAllowOrigin, Connection,
AccessControlAllowOrigin, Connection, ContentLength, ContentType, ETag, HeaderMap, ContentLength, ContentType, ETag, HeaderMap, HeaderMapExt, IfModifiedSince, IfNoneMatch,
HeaderMapExt, IfModifiedSince, IfNoneMatch, IfRange, LastModified, Range, IfRange, LastModified, Range,
}; };
use hyper::header::{ use hyper::header::{
HeaderValue, ACCEPT, AUTHORIZATION, CONTENT_DISPOSITION, CONTENT_LENGTH, CONTENT_RANGE, HeaderValue, AUTHORIZATION, CONTENT_DISPOSITION, CONTENT_LENGTH, CONTENT_RANGE, CONTENT_TYPE,
CONTENT_TYPE, ORIGIN, RANGE, WWW_AUTHENTICATE, RANGE, WWW_AUTHENTICATE,
}; };
use hyper::{Body, Method, StatusCode, Uri}; use hyper::{Body, Method, StatusCode, Uri};
use serde::Serialize; use serde::Serialize;
use std::borrow::Cow;
use std::collections::HashMap;
use std::fs::Metadata; use std::fs::Metadata;
use std::io::SeekFrom; use std::io::SeekFrom;
use std::net::SocketAddr; use std::net::SocketAddr;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc; use std::sync::Arc;
use std::time::SystemTime; use std::time::SystemTime;
use tokio::fs::File; use tokio::fs::File;
@@ -45,14 +47,37 @@ const BUF_SIZE: usize = 65536;
pub struct Server { pub struct Server {
args: Arc<Args>, args: Arc<Args>,
assets_prefix: String, assets_prefix: String,
html: Cow<'static, str>,
single_file_req_paths: Vec<String>,
running: Arc<AtomicBool>,
} }
impl Server { impl Server {
pub fn new(args: Arc<Args>) -> Self { pub fn new(args: Arc<Args>, running: Arc<AtomicBool>) -> Self {
let assets_prefix = format!("{}__dufs_v{}_", args.uri_prefix, env!("CARGO_PKG_VERSION")); let assets_prefix = format!("{}__dufs_v{}_", args.uri_prefix, env!("CARGO_PKG_VERSION"));
let single_file_req_paths = if args.path_is_file {
vec![
args.uri_prefix.to_string(),
args.uri_prefix[0..args.uri_prefix.len() - 1].to_string(),
encode_uri(&format!(
"{}{}",
&args.uri_prefix,
get_file_name(&args.path)
)),
]
} else {
vec![]
};
let html = match args.assets_path.as_ref() {
Some(path) => Cow::Owned(std::fs::read_to_string(path.join("index.html")).unwrap()),
None => Cow::Borrowed(INDEX_HTML),
};
Self { Self {
args, args,
running,
single_file_req_paths,
assets_prefix, assets_prefix,
html,
} }
} }
@@ -61,16 +86,17 @@ impl Server {
req: Request, req: Request,
addr: SocketAddr, addr: SocketAddr,
) -> Result<Response, hyper::Error> { ) -> Result<Response, hyper::Error> {
let method = req.method().clone();
let uri = req.uri().clone(); let uri = req.uri().clone();
let assets_prefix = self.assets_prefix.clone(); let assets_prefix = self.assets_prefix.clone();
let enable_cors = self.args.enable_cors; let enable_cors = self.args.enable_cors;
let mut http_log_data = self.args.log_http.data(&req, &self.args);
http_log_data.insert("remote_addr".to_string(), addr.ip().to_string());
let mut res = match self.handle(req).await { let mut res = match self.clone().handle(req).await {
Ok(res) => { Ok(res) => {
let status = res.status().as_u16(); http_log_data.insert("status".to_string(), res.status().as_u16().to_string());
if !uri.path().starts_with(&assets_prefix) { if !uri.path().starts_with(&assets_prefix) {
info!(r#"{} "{} {}" - {}"#, addr.ip(), method, uri, status,); self.args.log_http.log(&http_log_data, None);
} }
res res
} }
@@ -78,8 +104,10 @@ impl Server {
let mut res = Response::default(); let mut res = Response::default();
let status = StatusCode::INTERNAL_SERVER_ERROR; let status = StatusCode::INTERNAL_SERVER_ERROR;
*res.status_mut() = status; *res.status_mut() = status;
let status = status.as_u16(); http_log_data.insert("status".to_string(), status.as_u16().to_string());
error!(r#"{} "{} {}" - {} {}"#, addr.ip(), method, uri, status, err); self.args
.log_http
.log(&http_log_data, Some(err.to_string()));
res res
} }
}; };
@@ -97,7 +125,7 @@ impl Server {
let headers = req.headers(); let headers = req.headers();
let method = req.method().clone(); let method = req.method().clone();
if method == Method::GET && self.handle_embed_assets(req_path, &mut res).await? { if method == Method::GET && self.handle_assets(req_path, headers, &mut res).await? {
return Ok(res); return Ok(res);
} }
@@ -116,8 +144,16 @@ impl Server {
let head_only = method == Method::HEAD; let head_only = method == Method::HEAD;
if self.args.path_is_file { if self.args.path_is_file {
self.handle_send_file(&self.args.path, headers, head_only, &mut res) if self
.await?; .single_file_req_paths
.iter()
.any(|v| v.as_str() == req_path)
{
self.handle_send_file(&self.args.path, headers, head_only, &mut res)
.await?;
} else {
status_not_found(&mut res);
}
return Ok(res); return Ok(res);
} }
@@ -132,6 +168,9 @@ impl Server {
let path = path.as_path(); let path = path.as_path();
let query = req.uri().query().unwrap_or_default(); let query = req.uri().query().unwrap_or_default();
let query_params: HashMap<String, String> = form_urlencoded::parse(query.as_bytes())
.map(|(k, v)| (k.to_string(), v.to_string()))
.collect();
let (is_miss, is_dir, is_file, size) = match fs::metadata(path).await.ok() { let (is_miss, is_dir, is_file, size) = match fs::metadata(path).await.ok() {
Some(meta) => (false, meta.is_dir(), meta.is_file(), meta.len()), Some(meta) => (false, meta.is_dir(), meta.is_file(), meta.len()),
@@ -153,19 +192,33 @@ impl Server {
match method { match method {
Method::GET | Method::HEAD => { Method::GET | Method::HEAD => {
if is_dir { if is_dir {
if render_try_index && query == "zip" { if render_try_index {
self.handle_zip_dir(path, head_only, &mut res).await?; if query_params.contains_key("zip") {
} else if render_index || render_spa || render_try_index { self.handle_zip_dir(path, head_only, &mut res).await?;
self.handle_render_index(path, headers, head_only, &mut res) } else if allow_search && query_params.contains_key("q") {
self.handle_search_dir(path, &query_params, head_only, &mut res)
.await?;
} else {
self.handle_render_index(
path,
&query_params,
headers,
head_only,
&mut res,
)
.await?; .await?;
} else if query == "zip" { }
} else if render_index || render_spa {
self.handle_render_index(path, &query_params, headers, head_only, &mut res)
.await?;
} else if query_params.contains_key("zip") {
self.handle_zip_dir(path, head_only, &mut res).await?; self.handle_zip_dir(path, head_only, &mut res).await?;
} else if allow_search && query.starts_with("q=") { } else if allow_search && query_params.contains_key("q") {
let q = decode_uri(&query[2..]).unwrap_or_default(); self.handle_search_dir(path, &query_params, head_only, &mut res)
self.handle_search_dir(path, &q, head_only, &mut res)
.await?; .await?;
} else { } else {
self.handle_ls_dir(path, true, head_only, &mut res).await?; self.handle_ls_dir(path, true, &query_params, head_only, &mut res)
.await?;
} }
} else if is_file { } else if is_file {
self.handle_send_file(path, headers, head_only, &mut res) self.handle_send_file(path, headers, head_only, &mut res)
@@ -174,7 +227,8 @@ impl Server {
self.handle_render_spa(path, headers, head_only, &mut res) self.handle_render_spa(path, headers, head_only, &mut res)
.await?; .await?;
} else if allow_upload && req_path.ends_with('/') { } else if allow_upload && req_path.ends_with('/') {
self.handle_ls_dir(path, false, head_only, &mut res).await?; self.handle_ls_dir(path, false, &query_params, head_only, &mut res)
.await?;
} else { } else {
status_not_found(&mut res); status_not_found(&mut res);
} }
@@ -228,7 +282,7 @@ impl Server {
} else if is_miss { } else if is_miss {
status_not_found(&mut res); status_not_found(&mut res);
} else { } else {
self.handle_copy(path, headers, &mut res).await? self.handle_copy(path, &req, &mut res).await?
} }
} }
"MOVE" => { "MOVE" => {
@@ -237,7 +291,7 @@ impl Server {
} else if is_miss { } else if is_miss {
status_not_found(&mut res); status_not_found(&mut res);
} else { } else {
self.handle_move(path, headers, &mut res).await? self.handle_move(path, &req, &mut res).await?
} }
} }
"LOCK" => { "LOCK" => {
@@ -307,6 +361,7 @@ impl Server {
&self, &self,
path: &Path, path: &Path,
exist: bool, exist: bool,
query_params: &HashMap<String, String>,
head_only: bool, head_only: bool,
res: &mut Response, res: &mut Response,
) -> BoxResult<()> { ) -> BoxResult<()> {
@@ -320,48 +375,55 @@ impl Server {
} }
} }
}; };
self.send_index(path, paths, exist, head_only, res) self.send_index(path, paths, exist, query_params, head_only, res)
} }
async fn handle_search_dir( async fn handle_search_dir(
&self, &self,
path: &Path, path: &Path,
search: &str, query_params: &HashMap<String, String>,
head_only: bool, head_only: bool,
res: &mut Response, res: &mut Response,
) -> BoxResult<()> { ) -> BoxResult<()> {
let mut paths: Vec<PathItem> = vec![]; let mut paths: Vec<PathItem> = vec![];
let hidden = self.args.hidden.to_string(); let path_buf = path.to_path_buf();
let search = search.to_string(); let hidden = Arc::new(self.args.hidden.to_vec());
let mut walkdir = WalkDir::new(path).filter(move |entry| { let hidden = hidden.clone();
let hidden_cloned = hidden.clone(); let running = self.running.clone();
let search_cloned = search.clone(); let search = query_params.get("q").unwrap().to_lowercase();
async move { let search_paths = tokio::task::spawn_blocking(move || {
let mut it = WalkDir::new(&path_buf).into_iter();
let mut paths: Vec<PathBuf> = vec![];
while let Some(Ok(entry)) = it.next() {
if !running.load(Ordering::SeqCst) {
break;
}
let entry_path = entry.path(); let entry_path = entry.path();
let base_name = get_file_name(&entry_path); let base_name = get_file_name(entry_path);
if is_hidden(&hidden_cloned, base_name) { let file_type = entry.file_type();
return Filtering::IgnoreDir; if is_hidden(&hidden, base_name) {
if file_type.is_dir() {
it.skip_current_dir();
}
continue;
} }
if !base_name if !base_name.to_lowercase().contains(&search) {
.to_lowercase() continue;
.contains(&search_cloned.to_lowercase())
{
return Filtering::Ignore;
} }
if fs::symlink_metadata(entry.path()).await.is_err() { if entry.path().symlink_metadata().is_err() {
return Filtering::Ignore; continue;
} }
Filtering::Continue paths.push(entry_path.to_path_buf());
} }
}); paths
while let Some(entry) = walkdir.next().await { })
if let Ok(entry) = entry { .await?;
if let Ok(Some(item)) = self.to_pathitem(entry.path(), path.to_path_buf()).await { for search_path in search_paths.into_iter() {
paths.push(item); if let Ok(Some(item)) = self.to_pathitem(search_path, path.to_path_buf()).await {
} paths.push(item);
} }
} }
self.send_index(path, paths, true, head_only, res) self.send_index(path, paths, true, query_params, head_only, res)
} }
async fn handle_zip_dir( async fn handle_zip_dir(
@@ -387,8 +449,9 @@ impl Server {
} }
let path = path.to_owned(); let path = path.to_owned();
let hidden = self.args.hidden.clone(); let hidden = self.args.hidden.clone();
let running = self.running.clone();
tokio::spawn(async move { tokio::spawn(async move {
if let Err(e) = zip_dir(&mut writer, &path, &hidden).await { if let Err(e) = zip_dir(&mut writer, &path, &hidden, running).await {
error!("Failed to zip {}, {}", path.display(), e); error!("Failed to zip {}, {}", path.display(), e);
} }
}); });
@@ -400,6 +463,7 @@ impl Server {
async fn handle_render_index( async fn handle_render_index(
&self, &self,
path: &Path, path: &Path,
query_params: &HashMap<String, String>,
headers: &HeaderMap<HeaderValue>, headers: &HeaderMap<HeaderValue>,
head_only: bool, head_only: bool,
res: &mut Response, res: &mut Response,
@@ -414,7 +478,8 @@ impl Server {
self.handle_send_file(&index_path, headers, head_only, res) self.handle_send_file(&index_path, headers, head_only, res)
.await?; .await?;
} else if self.args.render_try_index { } else if self.args.render_try_index {
self.handle_ls_dir(path, true, head_only, res).await?; self.handle_ls_dir(path, true, query_params, head_only, res)
.await?;
} else { } else {
status_not_found(res) status_not_found(res)
} }
@@ -438,29 +503,40 @@ impl Server {
Ok(()) Ok(())
} }
async fn handle_embed_assets(&self, req_path: &str, res: &mut Response) -> BoxResult<bool> { async fn handle_assets(
&self,
req_path: &str,
headers: &HeaderMap<HeaderValue>,
res: &mut Response,
) -> BoxResult<bool> {
if let Some(name) = req_path.strip_prefix(&self.assets_prefix) { if let Some(name) = req_path.strip_prefix(&self.assets_prefix) {
match name { match self.args.assets_path.as_ref() {
"index.js" => { Some(assets_path) => {
*res.body_mut() = Body::from(INDEX_JS); let path = assets_path.join(name);
res.headers_mut().insert( self.handle_send_file(&path, headers, false, res).await?;
"content-type",
HeaderValue::from_static("application/javascript"),
);
}
"index.css" => {
*res.body_mut() = Body::from(INDEX_CSS);
res.headers_mut()
.insert("content-type", HeaderValue::from_static("text/css"));
}
"favicon.ico" => {
*res.body_mut() = Body::from(FAVICON_ICO);
res.headers_mut()
.insert("content-type", HeaderValue::from_static("image/x-icon"));
}
_ => {
return Ok(false);
} }
None => match name {
"index.js" => {
*res.body_mut() = Body::from(INDEX_JS);
res.headers_mut().insert(
"content-type",
HeaderValue::from_static("application/javascript"),
);
}
"index.css" => {
*res.body_mut() = Body::from(INDEX_CSS);
res.headers_mut()
.insert("content-type", HeaderValue::from_static("text/css"));
}
"favicon.ico" => {
*res.body_mut() = Body::from(FAVICON_ICO);
res.headers_mut()
.insert("content-type", HeaderValue::from_static("image/x-icon"));
}
_ => {
status_not_found(res);
}
},
} }
res.headers_mut().insert( res.headers_mut().insert(
"cache-control", "cache-control",
@@ -625,16 +701,10 @@ impl Server {
Ok(()) Ok(())
} }
async fn handle_copy( async fn handle_copy(&self, path: &Path, req: &Request, res: &mut Response) -> BoxResult<()> {
&self, let dest = match self.extract_dest(req, res) {
path: &Path,
headers: &HeaderMap<HeaderValue>,
res: &mut Response,
) -> BoxResult<()> {
let dest = match self.extract_dest(headers) {
Some(dest) => dest, Some(dest) => dest,
None => { None => {
*res.status_mut() = StatusCode::BAD_REQUEST;
return Ok(()); return Ok(());
} }
}; };
@@ -653,16 +723,10 @@ impl Server {
Ok(()) Ok(())
} }
async fn handle_move( async fn handle_move(&self, path: &Path, req: &Request, res: &mut Response) -> BoxResult<()> {
&self, let dest = match self.extract_dest(req, res) {
path: &Path,
headers: &HeaderMap<HeaderValue>,
res: &mut Response,
) -> BoxResult<()> {
let dest = match self.extract_dest(headers) {
Some(dest) => dest, Some(dest) => dest,
None => { None => {
*res.status_mut() = StatusCode::BAD_REQUEST;
return Ok(()); return Ok(());
} }
}; };
@@ -721,10 +785,30 @@ impl Server {
path: &Path, path: &Path,
mut paths: Vec<PathItem>, mut paths: Vec<PathItem>,
exist: bool, exist: bool,
query_params: &HashMap<String, String>,
head_only: bool, head_only: bool,
res: &mut Response, res: &mut Response,
) -> BoxResult<()> { ) -> BoxResult<()> {
paths.sort_unstable(); if let Some(sort) = query_params.get("sort") {
if sort == "name" {
paths.sort_by(|v1, v2| {
alphanumeric_sort::compare_str(v1.name.to_lowercase(), v2.name.to_lowercase())
})
} else if sort == "mtime" {
paths.sort_by(|v1, v2| v1.mtime.cmp(&v2.mtime))
} else if sort == "size" {
paths.sort_by(|v1, v2| v1.size.unwrap_or(0).cmp(&v2.size.unwrap_or(0)))
}
if query_params
.get("order")
.map(|v| v == "desc")
.unwrap_or_default()
{
paths.reverse()
}
} else {
paths.sort_unstable();
}
let href = format!("/{}", normalize_path(path.strip_prefix(&self.args.path)?)); let href = format!("/{}", normalize_path(path.strip_prefix(&self.args.path)?));
let data = IndexData { let data = IndexData {
href, href,
@@ -736,23 +820,10 @@ impl Server {
dir_exists: exist, dir_exists: exist,
}; };
let data = serde_json::to_string(&data).unwrap(); let data = serde_json::to_string(&data).unwrap();
let asset_js = format!("{}index.js", self.assets_prefix); let output = self
let asset_css = format!("{}index.css", self.assets_prefix); .html
let asset_ico = format!("{}favicon.ico", self.assets_prefix); .replace("__ASSERTS_PREFIX__", &self.assets_prefix)
let output = INDEX_HTML.replace( .replace("__INDEX_DATA__", &data);
"__SLOT__",
&format!(
r#"
<link rel="icon" type="image/x-icon" href="{}">
<link rel="stylesheet" href="{}">
<script>
DATA = {}
</script>
<script src="{}"></script>
"#,
asset_ico, asset_css, data, asset_js
),
);
res.headers_mut() res.headers_mut()
.typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8)); .typed_insert(ContentType::from(mime_guess::mime::TEXT_HTML_UTF_8));
res.headers_mut() res.headers_mut()
@@ -781,10 +852,43 @@ DATA = {}
.unwrap_or_default() .unwrap_or_default()
} }
fn extract_dest(&self, headers: &HeaderMap<HeaderValue>) -> Option<PathBuf> { fn extract_dest(&self, req: &Request, res: &mut Response) -> Option<PathBuf> {
let headers = req.headers();
let dest_path = match self.extract_destination_header(headers) {
Some(dest) => dest,
None => {
*res.status_mut() = StatusCode::BAD_REQUEST;
return None;
}
};
let authorization = headers.get(AUTHORIZATION);
let guard_type = self.args.auth.guard(
&dest_path,
req.method(),
authorization,
self.args.auth_method.clone(),
);
if guard_type.is_reject() {
*res.status_mut() = StatusCode::FORBIDDEN;
*res.body_mut() = Body::from("Forbidden");
return None;
}
let dest = match self.extract_path(&dest_path) {
Some(dest) => dest,
None => {
*res.status_mut() = StatusCode::BAD_REQUEST;
return None;
}
};
Some(dest)
}
fn extract_destination_header(&self, headers: &HeaderMap<HeaderValue>) -> Option<String> {
let dest = headers.get("Destination")?.to_str().ok()?; let dest = headers.get("Destination")?.to_str().ok()?;
let uri: Uri = dest.parse().ok()?; let uri: Uri = dest.parse().ok()?;
self.extract_path(uri.path()) Some(uri.path().to_string())
} }
fn extract_path(&self, path: &str) -> Option<PathBuf> { fn extract_path(&self, path: &str) -> Option<PathBuf> {
@@ -969,11 +1073,19 @@ fn add_cors(res: &mut Response) {
.typed_insert(AccessControlAllowOrigin::ANY); .typed_insert(AccessControlAllowOrigin::ANY);
res.headers_mut() res.headers_mut()
.typed_insert(AccessControlAllowCredentials); .typed_insert(AccessControlAllowCredentials);
res.headers_mut().insert(
res.headers_mut().typed_insert( "Access-Control-Allow-Methods",
vec![RANGE, CONTENT_TYPE, ACCEPT, ORIGIN, WWW_AUTHENTICATE] HeaderValue::from_static("GET,HEAD,PUT,OPTIONS,DELETE,PROPFIND,COPY,MOVE"),
.into_iter() );
.collect::<AccessControlAllowHeaders>(), res.headers_mut().insert(
"Access-Control-Allow-Headers",
HeaderValue::from_static("Authorization,Destination,Range"),
);
res.headers_mut().insert(
"Access-Control-Expose-Headers",
HeaderValue::from_static(
"WWW-Authenticate,Content-Range,Accept-Ranges,Content-Disposition",
),
); );
} }
@@ -992,41 +1104,54 @@ fn res_multistatus(res: &mut Response, content: &str) {
)); ));
} }
async fn zip_dir<W: AsyncWrite + Unpin>(writer: &mut W, dir: &Path, hidden: &str) -> BoxResult<()> { async fn zip_dir<W: AsyncWrite + Unpin>(
writer: &mut W,
dir: &Path,
hidden: &[String],
running: Arc<AtomicBool>,
) -> BoxResult<()> {
let mut writer = ZipFileWriter::new(writer); let mut writer = ZipFileWriter::new(writer);
let hidden = hidden.to_string(); let hidden = Arc::new(hidden.to_vec());
let mut walkdir = WalkDir::new(dir).filter(move |entry| { let hidden = hidden.clone();
let hidden = hidden.clone(); let dir_path_buf = dir.to_path_buf();
async move { let zip_paths = tokio::task::spawn_blocking(move || {
let mut it = WalkDir::new(&dir_path_buf).into_iter();
let mut paths: Vec<PathBuf> = vec![];
while let Some(Ok(entry)) = it.next() {
if !running.load(Ordering::SeqCst) {
break;
}
let entry_path = entry.path(); let entry_path = entry.path();
let base_name = get_file_name(&entry_path); let base_name = get_file_name(entry_path);
let file_type = entry.file_type();
if is_hidden(&hidden, base_name) { if is_hidden(&hidden, base_name) {
return Filtering::IgnoreDir; if file_type.is_dir() {
it.skip_current_dir();
}
continue;
} }
let meta = match fs::symlink_metadata(entry.path()).await { if entry.path().symlink_metadata().is_err() {
Ok(meta) => meta, continue;
Err(_) => return Filtering::Ignore,
};
if !meta.is_file() {
return Filtering::Ignore;
} }
Filtering::Continue if !file_type.is_file() {
} continue;
}); }
while let Some(entry) = walkdir.next().await { paths.push(entry_path.to_path_buf());
if let Ok(entry) = entry {
let entry_path = entry.path();
let filename = match entry_path.strip_prefix(dir).ok().and_then(|v| v.to_str()) {
Some(v) => v,
None => continue,
};
let entry_options = EntryOptions::new(filename.to_owned(), Compression::Deflate)
.unix_permissions(0o644);
let mut file = File::open(&entry_path).await?;
let mut file_writer = writer.write_entry_stream(entry_options).await?;
io::copy(&mut file, &mut file_writer).await?;
file_writer.close().await?;
} }
paths
})
.await?;
for zip_path in zip_paths.into_iter() {
let filename = match zip_path.strip_prefix(dir).ok().and_then(|v| v.to_str()) {
Some(v) => v,
None => continue,
};
let entry_options =
EntryOptions::new(filename.to_owned(), Compression::Deflate).unix_permissions(0o644);
let mut file = File::open(&zip_path).await?;
let mut file_writer = writer.write_entry_stream(entry_options).await?;
io::copy(&mut file, &mut file_writer).await?;
file_writer.close().await?;
} }
writer.close().await?; writer.close().await?;
Ok(()) Ok(())
@@ -1087,8 +1212,8 @@ fn status_no_content(res: &mut Response) {
*res.status_mut() = StatusCode::NO_CONTENT; *res.status_mut() = StatusCode::NO_CONTENT;
} }
fn is_hidden(hidden: &str, file_name: &str) -> bool { fn is_hidden(hidden: &[String], file_name: &str) -> bool {
hidden.contains(&format!(",{},", file_name)) hidden.iter().any(|v| glob(v, file_name))
} }
fn set_webdav_headers(res: &mut Response) { fn set_webdav_headers(res: &mut Response) {

View File

@@ -125,9 +125,9 @@ impl Accept for TlsAcceptor {
// Load public certificate from file. // Load public certificate from file.
pub fn load_certs(filename: &str) -> Result<Vec<Certificate>, Box<dyn std::error::Error>> { pub fn load_certs(filename: &str) -> Result<Vec<Certificate>, Box<dyn std::error::Error>> {
// Open certificate file. // Open certificate file.
let certfile = fs::File::open(&filename) let cert_file = fs::File::open(&filename)
.map_err(|e| format!("Failed to access `{}`, {}", &filename, e))?; .map_err(|e| format!("Failed to access `{}`, {}", &filename, e))?;
let mut reader = io::BufReader::new(certfile); let mut reader = io::BufReader::new(cert_file);
// Load and return certificate. // Load and return certificate.
let certs = rustls_pemfile::certs(&mut reader).map_err(|_| "Failed to load certificate")?; let certs = rustls_pemfile::certs(&mut reader).map_err(|_| "Failed to load certificate")?;
@@ -139,17 +139,18 @@ pub fn load_certs(filename: &str) -> Result<Vec<Certificate>, Box<dyn std::error
// Load private key from file. // Load private key from file.
pub fn load_private_key(filename: &str) -> Result<PrivateKey, Box<dyn std::error::Error>> { pub fn load_private_key(filename: &str) -> Result<PrivateKey, Box<dyn std::error::Error>> {
// Open keyfile. let key_file = fs::File::open(&filename)
let keyfile = fs::File::open(&filename)
.map_err(|e| format!("Failed to access `{}`, {}", &filename, e))?; .map_err(|e| format!("Failed to access `{}`, {}", &filename, e))?;
let mut reader = io::BufReader::new(keyfile); let mut reader = io::BufReader::new(key_file);
// Load and return a single private key. // Load and return a single private key.
let keys = rustls_pemfile::read_all(&mut reader) let keys = rustls_pemfile::read_all(&mut reader)
.map_err(|e| format!("There was a problem with reading private key: {:?}", e))? .map_err(|e| format!("There was a problem with reading private key: {:?}", e))?
.into_iter() .into_iter()
.find_map(|item| match item { .find_map(|item| match item {
rustls_pemfile::Item::RSAKey(key) | rustls_pemfile::Item::PKCS8Key(key) => Some(key), rustls_pemfile::Item::RSAKey(key)
| rustls_pemfile::Item::PKCS8Key(key)
| rustls_pemfile::Item::ECKey(key) => Some(key),
_ => None, _ => None,
}) })
.ok_or("No supported private key in file")?; .ok_or("No supported private key in file")?;

View File

@@ -23,3 +23,64 @@ pub fn try_get_file_name(path: &Path) -> BoxResult<&str> {
.and_then(|v| v.to_str()) .and_then(|v| v.to_str())
.ok_or_else(|| format!("Failed to get file name of `{}`", path.display()).into()) .ok_or_else(|| format!("Failed to get file name of `{}`", path.display()).into())
} }
pub fn glob(source: &str, target: &str) -> bool {
let ss: Vec<char> = source.chars().collect();
let mut iter = target.chars();
let mut i = 0;
'outer: while i < ss.len() {
let s = ss[i];
match s {
'*' => match ss.get(i + 1) {
Some(s_next) => {
for t in iter.by_ref() {
if t == *s_next {
i += 2;
continue 'outer;
}
}
return false;
}
None => return true,
},
'?' => match iter.next() {
Some(_) => {
i += 1;
continue;
}
None => return false,
},
_ => match iter.next() {
Some(t) => {
if s == t {
i += 1;
continue;
}
return false;
}
None => return false,
},
}
}
iter.next().is_none()
}
#[test]
fn test_glob_key() {
assert!(glob("", ""));
assert!(glob(".*", ".git"));
assert!(glob("abc", "abc"));
assert!(glob("a*c", "abc"));
assert!(glob("a?c", "abc"));
assert!(glob("a*c", "abbc"));
assert!(glob("*c", "abc"));
assert!(glob("a*", "abc"));
assert!(glob("?c", "bc"));
assert!(glob("a?", "ab"));
assert!(!glob("abc", "adc"));
assert!(!glob("abc", "abcd"));
assert!(!glob("a?c", "abbc"));
assert!(!glob("*.log", "log"));
assert!(glob("*.log", ".log"));
assert!(glob("*.log", "a.log"));
}

View File

@@ -64,7 +64,7 @@ fn allow_upload_delete_can_override(#[with(&["-A"])] server: TestServer) -> Resu
fn allow_search(#[with(&["--allow-search"])] server: TestServer) -> Result<(), Error> { fn allow_search(#[with(&["--allow-search"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "test.html"))?; let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "test.html"))?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let paths = utils::retrive_index_paths(&resp.text()?); let paths = utils::retrieve_index_paths(&resp.text()?);
assert!(!paths.is_empty()); assert!(!paths.is_empty());
for p in paths { for p in paths {
assert!(p.contains("test.html")); assert!(p.contains("test.html"));

View File

@@ -1,11 +1,10 @@
//! Run file server with different args
mod fixtures; mod fixtures;
mod utils; mod utils;
use assert_cmd::prelude::*; use fixtures::{server, Error, TestServer};
use assert_fs::fixture::TempDir;
use fixtures::{port, server, tmpdir, wait_for_port, Error, TestServer};
use rstest::rstest; use rstest::rstest;
use std::process::{Command, Stdio};
#[rstest] #[rstest]
fn path_prefix_index(#[with(&["--path-prefix", "xyz"])] server: TestServer) -> Result<(), Error> { fn path_prefix_index(#[with(&["--path-prefix", "xyz"])] server: TestServer) -> Result<(), Error> {
@@ -31,22 +30,3 @@ fn path_prefix_propfind(
assert!(text.contains("<D:href>/xyz/</D:href>")); assert!(text.contains("<D:href>/xyz/</D:href>"));
Ok(()) Ok(())
} }
#[rstest]
#[case("index.html")]
fn serve_single_file(tmpdir: TempDir, port: u16, #[case] file: &str) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")?
.arg(tmpdir.path().join(file))
.arg("-p")
.arg(port.to_string())
.stdout(Stdio::piped())
.spawn()?;
wait_for_port(port);
let resp = reqwest::blocking::get(format!("http://localhost:{}/index.html", port))?;
assert_eq!(resp.text()?, "This is index.html");
child.kill()?;
Ok(())
}

View File

@@ -1,8 +1,11 @@
mod fixtures; mod fixtures;
mod utils; mod utils;
use fixtures::{server, Error, TestServer}; use assert_cmd::prelude::*;
use assert_fs::fixture::TempDir;
use fixtures::{port, server, tmpdir, wait_for_port, Error, TestServer, DIR_ASSETS};
use rstest::rstest; use rstest::rstest;
use std::process::{Command, Stdio};
#[rstest] #[rstest]
fn assets(server: TestServer) -> Result<(), Error> { fn assets(server: TestServer) -> Result<(), Error> {
@@ -91,3 +94,29 @@ fn asset_js_with_prefix(
); );
Ok(()) Ok(())
} }
#[rstest]
fn assets_override(tmpdir: TempDir, port: u16) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")?
.arg(tmpdir.path())
.arg("-p")
.arg(port.to_string())
.arg("--assets")
.arg(tmpdir.join(DIR_ASSETS))
.stdout(Stdio::piped())
.spawn()?;
wait_for_port(port);
let url = format!("http://localhost:{}", port);
let resp = reqwest::blocking::get(&url)?;
assert!(resp.text()?.starts_with(&format!(
"/__dufs_v{}_index.js;DATA",
env!("CARGO_PKG_VERSION")
)));
let resp = reqwest::blocking::get(&url)?;
assert_resp_paths!(resp);
child.kill()?;
Ok(())
}

View File

@@ -95,3 +95,29 @@ fn auth_basic(
assert_eq!(resp.status(), 201); assert_eq!(resp.status(), 201);
Ok(()) Ok(())
} }
#[rstest]
fn auth_webdav_move(
#[with(&["--auth", "/@user:pass@*", "--auth", "/dira@user3:pass3", "-A"])] server: TestServer,
) -> Result<(), Error> {
let origin_url = format!("{}dira/test.html", server.url());
let new_url = format!("{}test2.html", server.url());
let resp = fetch!(b"MOVE", &origin_url)
.header("Destination", &new_url)
.send_with_digest_auth("user3", "pass3")?;
assert_eq!(resp.status(), 403);
Ok(())
}
#[rstest]
fn auth_webdav_copy(
#[with(&["--auth", "/@user:pass@*", "--auth", "/dira@user3:pass3", "-A"])] server: TestServer,
) -> Result<(), Error> {
let origin_url = format!("{}dira/test.html", server.url());
let new_url = format!("{}test2.html", server.url());
let resp = fetch!(b"COPY", &origin_url)
.header("Destination", &new_url)
.send_with_digest_auth("user3", "pass3")?;
assert_eq!(resp.status(), 403);
Ok(())
}

32
tests/cli.rs Normal file
View File

@@ -0,0 +1,32 @@
//! Run cli with different args, not starting a server
mod fixtures;
use assert_cmd::prelude::*;
use clap::ValueEnum;
use clap_complete::Shell;
use fixtures::Error;
use std::process::Command;
#[test]
/// Show help and exit.
fn help_shows() -> Result<(), Error> {
Command::cargo_bin("dufs")?.arg("-h").assert().success();
Ok(())
}
#[test]
/// Print completions and exit.
fn print_completions() -> Result<(), Error> {
// let shell_enums = EnumValueParser::<Shell>::new();
for shell in Shell::value_variants() {
Command::cargo_bin("dufs")?
.arg("--completions")
.arg(shell.to_string())
.assert()
.success();
}
Ok(())
}

View File

@@ -7,31 +7,27 @@ use rstest::rstest;
#[rstest] #[rstest]
fn cors(#[with(&["--enable-cors"])] server: TestServer) -> Result<(), Error> { fn cors(#[with(&["--enable-cors"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;
assert_eq!( assert_eq!(
resp.headers().get("access-control-allow-origin").unwrap(), resp.headers().get("access-control-allow-origin").unwrap(),
"*" "*"
); );
assert_eq!( assert_eq!(
resp.headers().get("access-control-allow-headers").unwrap(), resp.headers()
"range, content-type, accept, origin, www-authenticate" .get("access-control-allow-credentials")
.unwrap(),
"true"
); );
Ok(())
}
#[rstest]
fn cors_options(#[with(&["--enable-cors"])] server: TestServer) -> Result<(), Error> {
let resp = fetch!(b"OPTIONS", server.url()).send()?;
assert_eq!( assert_eq!(
resp.headers().get("access-control-allow-origin").unwrap(), resp.headers().get("access-control-allow-methods").unwrap(),
"*" "GET,HEAD,PUT,OPTIONS,DELETE,PROPFIND,COPY,MOVE"
); );
assert_eq!( assert_eq!(
resp.headers().get("access-control-allow-headers").unwrap(), resp.headers().get("access-control-allow-headers").unwrap(),
"range, content-type, accept, origin, www-authenticate" "Authorization,Destination,Range"
);
assert_eq!(
resp.headers().get("access-control-expose-headers").unwrap(),
"WWW-Authenticate,Content-Range,Accept-Ranges,Content-Disposition"
); );
Ok(()) Ok(())
} }

11
tests/data/cert_ecdsa.pem Normal file
View File

@@ -0,0 +1,11 @@
-----BEGIN CERTIFICATE-----
MIIBfTCCASOgAwIBAgIUfrAUHXIfeM54OLnTIUD9xT6FIwkwCgYIKoZIzj0EAwIw
FDESMBAGA1UEAwwJbG9jYWxob3N0MB4XDTIyMDgwMjAxMjQ1NFoXDTMyMDczMDAx
MjQ1NFowFDESMBAGA1UEAwwJbG9jYWxob3N0MFkwEwYHKoZIzj0CAQYIKoZIzj0D
AQcDQgAEW4tBe0jF2wYSLCvdreb0izR/8sgKNKkbe4xPyA9uNEbtTk58eoO3944R
JPT6S5wRTHFpF0BJhQRfiuW4K2EUcaNTMFEwHQYDVR0OBBYEFEebUDkiMJoV2d5W
8o+6p4DauHFFMB8GA1UdIwQYMBaAFEebUDkiMJoV2d5W8o+6p4DauHFFMA8GA1Ud
EwEB/wQFMAMBAf8wCgYIKoZIzj0EAwIDSAAwRQIhAPJvmzqaq/S5yYxeB4se8k2z
6pnVNxrTT2CqdPD8Z+7rAiBZAyU+5+KbQq3aZsmuNUx+YOqTDMkaUR/nd/tjnnOX
gA==
-----END CERTIFICATE-----

View File

@@ -1,3 +1,5 @@
#!/usr/bin/env bash #!/usr/bin/env bash
openssl req -subj '/CN=localhost' -x509 -newkey rsa:4096 -keyout key_pkcs8.pem -out cert.pem -nodes -days 3650 openssl req -subj '/CN=localhost' -x509 -newkey rsa:4096 -keyout key_pkcs8.pem -out cert.pem -nodes -days 3650
openssl rsa -in key_pkcs8.pem -out key_pkcs1.pem openssl rsa -in key_pkcs8.pem -out key_pkcs1.pem
openssl ecparam -name prime256v1 -genkey -noout -out key_ecdsa.pem
openssl req -subj '/CN=localhost' -x509 -key key_ecdsa.pem -out cert_ecdsa.pem -nodes -days 3650

5
tests/data/key_ecdsa.pem Normal file
View File

@@ -0,0 +1,5 @@
-----BEGIN EC PRIVATE KEY-----
MHcCAQEEILOQ44lHqD4w12HJKlZJ+Y3u91eUKjabu3UKPSahhC89oAoGCCqGSM49
AwEHoUQDQgAEW4tBe0jF2wYSLCvdreb0izR/8sgKNKkbe4xPyA9uNEbtTk58eoO3
944RJPT6S5wRTHFpF0BJhQRfiuW4K2EUcQ==
-----END EC PRIVATE KEY-----

View File

@@ -15,11 +15,11 @@ pub type Error = Box<dyn std::error::Error>;
#[allow(dead_code)] #[allow(dead_code)]
pub static FILES: &[&str] = &["test.txt", "test.html", "index.html", "😀.bin"]; pub static FILES: &[&str] = &["test.txt", "test.html", "index.html", "😀.bin"];
/// Directory names for testing diretory don't exist /// Directory names for testing directory don't exist
#[allow(dead_code)] #[allow(dead_code)]
pub static DIR_NO_FOUND: &str = "dir-no-found/"; pub static DIR_NO_FOUND: &str = "dir-no-found/";
/// Directory names for testing diretory don't have index.html /// Directory names for testing directory don't have index.html
#[allow(dead_code)] #[allow(dead_code)]
pub static DIR_NO_INDEX: &str = "dir-no-index/"; pub static DIR_NO_INDEX: &str = "dir-no-index/";
@@ -27,9 +27,13 @@ pub static DIR_NO_INDEX: &str = "dir-no-index/";
#[allow(dead_code)] #[allow(dead_code)]
pub static DIR_GIT: &str = ".git/"; pub static DIR_GIT: &str = ".git/";
/// Directory names for testings assets override
#[allow(dead_code)]
pub static DIR_ASSETS: &str = "dir-assets/";
/// Directory names for testing purpose /// Directory names for testing purpose
#[allow(dead_code)] #[allow(dead_code)]
pub static DIRECTORIES: &[&str] = &["dira/", "dirb/", "dirc/", DIR_NO_INDEX, DIR_GIT]; pub static DIRECTORIES: &[&str] = &["dira/", "dirb/", "dirc/", DIR_NO_INDEX, DIR_GIT, DIR_ASSETS];
/// Test fixture which creates a temporary directory with a few files and directories inside. /// Test fixture which creates a temporary directory with a few files and directories inside.
/// The directories also contain files. /// The directories also contain files.
@@ -44,14 +48,21 @@ pub fn tmpdir() -> TempDir {
.expect("Couldn't write to file"); .expect("Couldn't write to file");
} }
for directory in DIRECTORIES { for directory in DIRECTORIES {
for file in FILES { if *directory == DIR_ASSETS {
if *directory == DIR_NO_INDEX && *file == "index.html" {
continue;
}
tmpdir tmpdir
.child(format!("{}{}", directory, file)) .child(format!("{}{}", directory, "index.html"))
.write_str(&format!("This is {}{}", directory, file)) .write_str("__ASSERTS_PREFIX__index.js;DATA = __INDEX_DATA__")
.expect("Couldn't write to file"); .expect("Couldn't write to file");
} else {
for file in FILES {
if *directory == DIR_NO_INDEX && *file == "index.html" {
continue;
}
tmpdir
.child(format!("{}{}", directory, file))
.write_str(&format!("This is {}{}", directory, file))
.expect("Couldn't write to file");
}
} }
} }
@@ -93,34 +104,6 @@ where
TestServer::new(port, tmpdir, child, is_tls) TestServer::new(port, tmpdir, child, is_tls)
} }
/// Same as `server()` but ignore stderr
#[fixture]
#[allow(dead_code)]
pub fn server_no_stderr<I>(#[default(&[] as &[&str])] args: I) -> TestServer
where
I: IntoIterator + Clone,
I::Item: AsRef<std::ffi::OsStr>,
{
let port = port();
let tmpdir = tmpdir();
let child = Command::cargo_bin("dufs")
.expect("Couldn't find test binary")
.arg(tmpdir.path())
.arg("-p")
.arg(port.to_string())
.args(args.clone())
.stdout(Stdio::null())
.stderr(Stdio::null())
.spawn()
.expect("Couldn't run test binary");
let is_tls = args
.into_iter()
.any(|x| x.as_ref().to_str().unwrap().contains("tls"));
wait_for_port(port);
TestServer::new(port, tmpdir, child, is_tls)
}
/// Wait a max of 1s for the port to become available. /// Wait a max of 1s for the port to become available.
pub fn wait_for_port(port: u16) { pub fn wait_for_port(port: u16) {
let start_wait = Instant::now(); let start_wait = Instant::now();

View File

@@ -10,12 +10,26 @@ use rstest::rstest;
fn hidden_get_dir(#[case] server: TestServer, #[case] exist: bool) -> Result<(), Error> { fn hidden_get_dir(#[case] server: TestServer, #[case] exist: bool) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let paths = utils::retrive_index_paths(&resp.text()?); let paths = utils::retrieve_index_paths(&resp.text()?);
assert!(paths.contains("dira/"));
assert_eq!(paths.contains(".git/"), exist); assert_eq!(paths.contains(".git/"), exist);
assert_eq!(paths.contains("index.html"), exist); assert_eq!(paths.contains("index.html"), exist);
Ok(()) Ok(())
} }
#[rstest]
#[case(server(&[] as &[&str]), true)]
#[case(server(&["--hidden", "*.html"]), false)]
fn hidden_get_dir2(#[case] server: TestServer, #[case] exist: bool) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?;
assert_eq!(resp.status(), 200);
let paths = utils::retrieve_index_paths(&resp.text()?);
assert!(paths.contains("dira/"));
assert_eq!(paths.contains("index.html"), exist);
assert_eq!(paths.contains("test.html"), exist);
Ok(())
}
#[rstest] #[rstest]
#[case(server(&[] as &[&str]), true)] #[case(server(&[] as &[&str]), true)]
#[case(server(&["--hidden", ".git,index.html"]), false)] #[case(server(&["--hidden", ".git,index.html"]), false)]
@@ -23,6 +37,7 @@ fn hidden_propfind_dir(#[case] server: TestServer, #[case] exist: bool) -> Resul
let resp = fetch!(b"PROPFIND", server.url()).send()?; let resp = fetch!(b"PROPFIND", server.url()).send()?;
assert_eq!(resp.status(), 207); assert_eq!(resp.status(), 207);
let body = resp.text()?; let body = resp.text()?;
assert!(body.contains("<D:href>/dira/</D:href>"));
assert_eq!(body.contains("<D:href>/.git/</D:href>"), exist); assert_eq!(body.contains("<D:href>/.git/</D:href>"), exist);
assert_eq!(body.contains("<D:href>/index.html</D:href>"), exist); assert_eq!(body.contains("<D:href>/index.html</D:href>"), exist);
Ok(()) Ok(())
@@ -34,7 +49,7 @@ fn hidden_propfind_dir(#[case] server: TestServer, #[case] exist: bool) -> Resul
fn hidden_search_dir(#[case] server: TestServer, #[case] exist: bool) -> Result<(), Error> { fn hidden_search_dir(#[case] server: TestServer, #[case] exist: bool) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "test.html"))?; let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "test.html"))?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let paths = utils::retrive_index_paths(&resp.text()?); let paths = utils::retrieve_index_paths(&resp.text()?);
for p in paths { for p in paths {
assert_eq!(p.contains("test.html"), exist); assert_eq!(p.contains("test.html"), exist);
} }

View File

@@ -66,7 +66,7 @@ fn head_dir_zip(server: TestServer) -> Result<(), Error> {
fn get_dir_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> { fn get_dir_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "test.html"))?; let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "test.html"))?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let paths = utils::retrive_index_paths(&resp.text()?); let paths = utils::retrieve_index_paths(&resp.text()?);
assert!(!paths.is_empty()); assert!(!paths.is_empty());
for p in paths { for p in paths {
assert!(p.contains("test.html")); assert!(p.contains("test.html"));
@@ -78,7 +78,7 @@ fn get_dir_search(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
fn get_dir_search2(#[with(&["-A"])] server: TestServer) -> Result<(), Error> { fn get_dir_search2(#[with(&["-A"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "😀.bin"))?; let resp = reqwest::blocking::get(format!("{}?q={}", server.url(), "😀.bin"))?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let paths = utils::retrive_index_paths(&resp.text()?); let paths = utils::retrieve_index_paths(&resp.text()?);
assert!(!paths.is_empty()); assert!(!paths.is_empty());
for p in paths { for p in paths {
assert!(p.contains("😀.bin")); assert!(p.contains("😀.bin"));

78
tests/log_http.rs Normal file
View File

@@ -0,0 +1,78 @@
mod fixtures;
mod utils;
use diqwest::blocking::WithDigestAuth;
use fixtures::{port, tmpdir, wait_for_port, Error};
use assert_cmd::prelude::*;
use assert_fs::fixture::TempDir;
use rstest::rstest;
use std::io::Read;
use std::process::{Command, Stdio};
#[rstest]
#[case(&["-a", "/@user:pass", "--log-format", "$remote_user"], false)]
#[case(&["-a", "/@user:pass", "--log-format", "$remote_user", "--auth-method", "basic"], true)]
fn log_remote_user(
tmpdir: TempDir,
port: u16,
#[case] args: &[&str],
#[case] is_basic: bool,
) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")?
.arg(tmpdir.path())
.arg("-p")
.arg(port.to_string())
.args(args)
.stdout(Stdio::piped())
.spawn()?;
wait_for_port(port);
let stdout = child.stdout.as_mut().expect("Failed to get stdout");
let req = fetch!(b"GET", &format!("http://localhost:{}", port));
let resp = if is_basic {
req.basic_auth("user", Some("pass")).send()?
} else {
req.send_with_digest_auth("user", "pass")?
};
assert_eq!(resp.status(), 200);
let mut buf = [0; 1000];
let buf_len = stdout.read(&mut buf)?;
let output = std::str::from_utf8(&buf[0..buf_len])?;
assert!(output.lines().last().unwrap().ends_with("user"));
child.kill()?;
Ok(())
}
#[rstest]
#[case(&["--log-format", ""])]
fn no_log(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")?
.arg(tmpdir.path())
.arg("-p")
.arg(port.to_string())
.args(args)
.stdout(Stdio::piped())
.spawn()?;
wait_for_port(port);
let stdout = child.stdout.as_mut().expect("Failed to get stdout");
let resp = fetch!(b"GET", &format!("http://localhost:{}", port)).send()?;
assert_eq!(resp.status(), 200);
let mut buf = [0; 1000];
let buf_len = stdout.read(&mut buf)?;
let output = std::str::from_utf8(&buf[0..buf_len])?;
assert_eq!(output.lines().last().unwrap(), "");
Ok(())
}

View File

@@ -50,6 +50,18 @@ fn render_try_index3(#[with(&["--render-try-index"])] server: TestServer) -> Res
Ok(()) Ok(())
} }
#[rstest]
#[case(server(&["--render-try-index"] as &[&str]), false)]
#[case(server(&["--render-try-index", "--allow-search"] as &[&str]), true)]
fn render_try_index4(#[case] server: TestServer, #[case] searched: bool) -> Result<(), Error> {
let resp = reqwest::blocking::get(format!("{}{}?q={}", server.url(), DIR_NO_INDEX, "😀.bin"))?;
assert_eq!(resp.status(), 200);
let paths = utils::retrieve_index_paths(&resp.text()?);
assert!(!paths.is_empty());
assert_eq!(paths.iter().all(|v| v.contains("😀.bin")), searched);
Ok(())
}
#[rstest] #[rstest]
fn render_spa(#[with(&["--render-spa"])] server: TestServer) -> Result<(), Error> { fn render_spa(#[with(&["--render-spa"])] server: TestServer) -> Result<(), Error> {
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;

60
tests/single_file.rs Normal file
View File

@@ -0,0 +1,60 @@
//! Run file server with different args
mod fixtures;
mod utils;
use assert_cmd::prelude::*;
use assert_fs::fixture::TempDir;
use fixtures::{port, tmpdir, wait_for_port, Error};
use rstest::rstest;
use std::process::{Command, Stdio};
#[rstest]
#[case("index.html")]
fn single_file(tmpdir: TempDir, port: u16, #[case] file: &str) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")?
.arg(tmpdir.path().join(file))
.arg("-p")
.arg(port.to_string())
.stdout(Stdio::piped())
.spawn()?;
wait_for_port(port);
let resp = reqwest::blocking::get(format!("http://localhost:{}", port))?;
assert_eq!(resp.text()?, "This is index.html");
let resp = reqwest::blocking::get(format!("http://localhost:{}/", port))?;
assert_eq!(resp.text()?, "This is index.html");
let resp = reqwest::blocking::get(format!("http://localhost:{}/index.html", port))?;
assert_eq!(resp.text()?, "This is index.html");
child.kill()?;
Ok(())
}
#[rstest]
#[case("index.html")]
fn path_prefix_single_file(tmpdir: TempDir, port: u16, #[case] file: &str) -> Result<(), Error> {
let mut child = Command::cargo_bin("dufs")?
.arg(tmpdir.path().join(file))
.arg("-p")
.arg(port.to_string())
.arg("--path-prefix")
.arg("xyz")
.stdout(Stdio::piped())
.spawn()?;
wait_for_port(port);
let resp = reqwest::blocking::get(format!("http://localhost:{}/xyz", port))?;
assert_eq!(resp.text()?, "This is index.html");
let resp = reqwest::blocking::get(format!("http://localhost:{}/xyz/", port))?;
assert_eq!(resp.text()?, "This is index.html");
let resp = reqwest::blocking::get(format!("http://localhost:{}/xyz/index.html", port))?;
assert_eq!(resp.text()?, "This is index.html");
let resp = reqwest::blocking::get(format!("http://localhost:{}", port))?;
assert_eq!(resp.status(), 404);
child.kill()?;
Ok(())
}

29
tests/sort.rs Normal file
View File

@@ -0,0 +1,29 @@
mod fixtures;
mod utils;
use fixtures::{server, Error, TestServer};
use rstest::rstest;
#[rstest]
fn ls_dir_sort_by_name(server: TestServer) -> Result<(), Error> {
let url = server.url();
let resp = reqwest::blocking::get(format!("{}?sort=name&order=asc", url))?;
let paths1 = self::utils::retrieve_index_paths(&resp.text()?);
let resp = reqwest::blocking::get(format!("{}?sort=name&order=desc", url))?;
let mut paths2 = self::utils::retrieve_index_paths(&resp.text()?);
paths2.reverse();
assert_eq!(paths1, paths2);
Ok(())
}
#[rstest]
fn search_dir_sort_by_name(server: TestServer) -> Result<(), Error> {
let url = server.url();
let resp = reqwest::blocking::get(format!("{}?q={}&sort=name&order=asc", url, "test.html"))?;
let paths1 = self::utils::retrieve_index_paths(&resp.text()?);
let resp = reqwest::blocking::get(format!("{}?q={}&sort=name&order=desc", url, "test.html"))?;
let mut paths2 = self::utils::retrieve_index_paths(&resp.text()?);
paths2.reverse();
assert_eq!(paths1, paths2);
Ok(())
}

View File

@@ -20,7 +20,7 @@ fn default_not_allow_symlink(server: TestServer, tmpdir: TempDir) -> Result<(),
let resp = reqwest::blocking::get(format!("{}{}/index.html", server.url(), dir))?; let resp = reqwest::blocking::get(format!("{}{}/index.html", server.url(), dir))?;
assert_eq!(resp.status(), 404); assert_eq!(resp.status(), 404);
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;
let paths = utils::retrive_index_paths(&resp.text()?); let paths = utils::retrieve_index_paths(&resp.text()?);
assert!(!paths.is_empty()); assert!(!paths.is_empty());
assert!(!paths.contains(&format!("{}/", dir))); assert!(!paths.contains(&format!("{}/", dir)));
Ok(()) Ok(())
@@ -39,7 +39,7 @@ fn allow_symlink(
let resp = reqwest::blocking::get(format!("{}{}/index.html", server.url(), dir))?; let resp = reqwest::blocking::get(format!("{}{}/index.html", server.url(), dir))?;
assert_eq!(resp.status(), 200); assert_eq!(resp.status(), 200);
let resp = reqwest::blocking::get(server.url())?; let resp = reqwest::blocking::get(server.url())?;
let paths = utils::retrive_index_paths(&resp.text()?); let paths = utils::retrieve_index_paths(&resp.text()?);
assert!(!paths.is_empty()); assert!(!paths.is_empty());
assert!(paths.contains(&format!("{}/", dir))); assert!(paths.contains(&format!("{}/", dir)));
Ok(()) Ok(())

View File

@@ -17,6 +17,10 @@ use rstest::rstest;
"--tls-cert", "tests/data/cert.pem", "--tls-cert", "tests/data/cert.pem",
"--tls-key", "tests/data/key_pkcs1.pem", "--tls-key", "tests/data/key_pkcs1.pem",
]))] ]))]
#[case(server(&[
"--tls-cert", "tests/data/cert_ecdsa.pem",
"--tls-key", "tests/data/key_ecdsa.pem",
]))]
fn tls_works(#[case] server: TestServer) -> Result<(), Error> { fn tls_works(#[case] server: TestServer) -> Result<(), Error> {
let client = ClientBuilder::new() let client = ClientBuilder::new()
.danger_accept_invalid_certs(true) .danger_accept_invalid_certs(true)

View File

@@ -1,5 +1,5 @@
use indexmap::IndexSet;
use serde_json::Value; use serde_json::Value;
use std::collections::HashSet;
#[macro_export] #[macro_export]
macro_rules! assert_resp_paths { macro_rules! assert_resp_paths {
@@ -9,7 +9,7 @@ macro_rules! assert_resp_paths {
($resp:ident, $files:expr) => { ($resp:ident, $files:expr) => {
assert_eq!($resp.status(), 200); assert_eq!($resp.status(), 200);
let body = $resp.text()?; let body = $resp.text()?;
let paths = self::utils::retrive_index_paths(&body); let paths = self::utils::retrieve_index_paths(&body);
assert!(!paths.is_empty()); assert!(!paths.is_empty());
for file in $files { for file in $files {
assert!(paths.contains(&file.to_string())); assert!(paths.contains(&file.to_string()));
@@ -25,8 +25,8 @@ macro_rules! fetch {
} }
#[allow(dead_code)] #[allow(dead_code)]
pub fn retrive_index_paths(index: &str) -> HashSet<String> { pub fn retrieve_index_paths(index: &str) -> IndexSet<String> {
retrive_index_paths_impl(index).unwrap_or_default() retrieve_index_paths_impl(index).unwrap_or_default()
} }
#[allow(dead_code)] #[allow(dead_code)]
@@ -35,10 +35,11 @@ pub fn encode_uri(v: &str) -> String {
parts.join("/") parts.join("/")
} }
fn retrive_index_paths_impl(index: &str) -> Option<HashSet<String>> { fn retrieve_index_paths_impl(index: &str) -> Option<IndexSet<String>> {
let lines: Vec<&str> = index.lines().collect(); let lines: Vec<&str> = index.lines().collect();
let line = lines.iter().find(|v| v.contains("DATA ="))?; let line = lines.iter().find(|v| v.contains("DATA ="))?;
let value: Value = line[7..].parse().ok()?; let line_col = line.find("DATA =").unwrap() + 6;
let value: Value = line[line_col..].parse().ok()?;
let paths = value let paths = value
.get("paths")? .get("paths")?
.as_array()? .as_array()?