vw_small

Hardened fork of Vaultwarden (https://github.com/dani-garcia/vaultwarden) with fewer features.
git clone https://git.philomathiclife.com/repos/vw_small
Log | Files | Refs | README

commit 2421d49d9a64cf235f0c885a6063a3bb3628bdda
parent 1db37bf3d06543c890612ff88193813035763034
Author: Olivier Martin <olivier.martin@illogika.com>
Date:   Fri, 16 Apr 2021 14:29:28 -0400

Merge branch 'master' of github.com:dani-garcia/bitwarden_rs into 2fa_enforcement

# Conflicts:
#	src/db/models/org_policy.rs
#	src/db/models/organization.rs

Diffstat:
M.github/workflows/build.yml | 11++++++++++-
MCargo.lock | 229++++++++++++++++++++++++++++++++++++++++++++++---------------------------------
MCargo.toml | 4++--
MREADME.md | 6+++---
Mbuild.rs | 12+++++++-----
Mdocker/Dockerfile.j2 | 4++--
Mrust-toolchain | 4++--
Mrustfmt.toml | 5+++++
Msrc/api/admin.rs | 78++++++++++++++++++++++++++++++++++--------------------------------------------
Msrc/api/core/accounts.rs | 10+---------
Msrc/api/core/ciphers.rs | 125+++++++++++++++++++++++++++++++++++++++----------------------------------------
Msrc/api/core/folders.rs | 10+---------
Msrc/api/core/mod.rs | 26+++++++-------------------
Msrc/api/core/organizations.rs | 58+++++++++++++++++++++++++++++-----------------------------
Msrc/api/core/sends.rs | 23+++--------------------
Msrc/api/core/two_factor/authenticator.rs | 25+++++--------------------
Msrc/api/core/two_factor/duo.rs | 19++++++++++++-------
Msrc/api/core/two_factor/email.rs | 9+++------
Msrc/api/core/two_factor/mod.rs | 8+-------
Msrc/api/core/two_factor/u2f.rs | 24++++--------------------
Msrc/api/icons.rs | 48++++++++++++++++++++++++++++++++++++------------
Msrc/api/identity.rs | 47++++++++++++++++++++++-------------------------
Msrc/api/mod.rs | 6+++---
Msrc/api/notifications.rs | 32+++++++++++++++-----------------
Msrc/api/web.rs | 8++++++--
Msrc/auth.rs | 33++++++++++++++++++++-------------
Msrc/config.rs | 25+++++++------------------
Msrc/crypto.rs | 4+---
Msrc/db/mod.rs | 17++++-------------
Msrc/db/models/attachment.rs | 1-
Msrc/db/models/cipher.rs | 37++++++++++++++-----------------------
Msrc/db/models/collection.rs | 31+++++++++++++++----------------
Msrc/db/models/favorite.rs | 24++++++++++++------------
Msrc/db/models/folder.rs | 1-
Msrc/db/models/mod.rs | 5++---
Msrc/db/models/org_policy.rs | 9++++-----
Msrc/db/models/organization.rs | 31++++++++++++++-----------------
Msrc/db/models/user.rs | 12+++++++-----
Msrc/error.rs | 10+++-------
Msrc/mail.rs | 26++++++++++----------------
Msrc/main.rs | 64++++++++++++++++++++++++++++++++++++----------------------------
Msrc/static/templates/admin/settings.hbs | 6+++++-
Msrc/util.rs | 47+++++++++++++++++++++++++++++++----------------
43 files changed, 594 insertions(+), 620 deletions(-)

diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml @@ -89,7 +89,7 @@ jobs: with: profile: minimal target: ${{ matrix.target-triple }} - components: clippy + components: clippy, rustfmt # End Uses the rust-toolchain file to determine version @@ -111,6 +111,15 @@ jobs: # End Run cargo clippy + # Run cargo fmt + - name: '`cargo fmt`' + uses: actions-rs/cargo@v1 + with: + command: fmt + args: --all -- --check + # End Run cargo fmt + + # Build the binary - name: '`cargo build --release --features ${{ matrix.features }} --target ${{ matrix.target-triple }}`' uses: actions-rs/cargo@v1 diff --git a/Cargo.lock b/Cargo.lock @@ -471,9 +471,9 @@ version = "0.3.0" source = "git+https://github.com/SergioBenitez/Devise.git?rev=e58b3ac9a#e58b3ac9afc3b6ff10a8aaf02a3e768a8f530089" dependencies = [ "bitflags", - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -499,9 +499,9 @@ version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45f5098f628d02a7a0f68ddba586fb61e80edec3bdc1be3b921f4ceec60858d3" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -564,6 +564,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f" [[package]] +name = "either" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" + +[[package]] name = "encoding_rs" version = "0.8.28" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -680,9 +686,9 @@ dependencies = [ [[package]] name = "futures" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f55667319111d593ba876406af7c409c0ebb44dc4be6132a783ccf163ea14c1" +checksum = "a9d5813545e459ad3ca1bff9915e9ad7f1a47dc6a91b627ce321d5863b7dd253" dependencies = [ "futures-channel", "futures-core", @@ -695,9 +701,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c2dd2df839b57db9ab69c2c9d8f3e8c81984781937fe2807dc6dcf3b2ad2939" +checksum = "ce79c6a52a299137a6013061e0cf0e688fce5d7f1bc60125f520912fdb29ec25" dependencies = [ "futures-core", "futures-sink", @@ -705,15 +711,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15496a72fabf0e62bdc3df11a59a3787429221dd0710ba8ef163d6f7a9112c94" +checksum = "098cd1c6dda6ca01650f1a37a794245eb73181d0d4d4e955e2f3c37db7af1815" [[package]] name = "futures-executor" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891a4b7b96d84d5940084b2a37632dd65deeae662c114ceaa2c879629c9c0ad1" +checksum = "10f6cb7042eda00f0049b1d2080aa4b93442997ee507eb3828e8bd7577f94c9d" dependencies = [ "futures-core", "futures-task", @@ -722,39 +728,39 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71c2c65c57704c32f5241c1223167c2c3294fd34ac020c807ddbe6db287ba59" +checksum = "365a1a1fb30ea1c03a830fdb2158f5236833ac81fa0ad12fe35b29cddc35cb04" [[package]] name = "futures-macro" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea405816a5139fb39af82c2beb921d52143f556038378d6db21183a5c37fbfb7" +checksum = "668c6733a182cd7deb4f1de7ba3bf2120823835b3bcfbeacf7d2c4a773c1bb8b" dependencies = [ "proc-macro-hack", - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] name = "futures-sink" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85754d98985841b7d4f5e8e6fbfa4a4ac847916893ec511a2917ccd8525b8bb3" +checksum = "5c5629433c555de3d82861a7a4e3794a4c40040390907cfbfd7143a92a426c23" [[package]] name = "futures-task" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa189ef211c15ee602667a6fcfe1c1fd9e07d42250d2156382820fba33c9df80" +checksum = "ba7aa51095076f3ba6d9a1f702f74bd05ec65f555d70d2033d55ba8d69f581bc" [[package]] name = "futures-util" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1812c7ab8aedf8d6f2701a43e1243acdbcc2b36ab26e2ad421eb99ac963d96d1" +checksum = "3c144ad54d60f23927f0a6b6d816e4271278b64f005ad65e4e35291d2de9c025" dependencies = [ "futures-channel", "futures-core", @@ -923,16 +929,16 @@ dependencies = [ "log 0.4.14", "mac", "markup5ever", - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] name = "http" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7245cd7449cc792608c3c8a9eaf69bd4eabbabf802713748fd739c98b82f0747" +checksum = "527e8c9ac747e28542699a951517aa9a6945af506cd1f2e1b53a576c17b6cc11" dependencies = [ "bytes 1.0.1", "fnv", @@ -952,9 +958,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.3.5" +version = "1.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "615caabe2c3160b313d52ccc905335f4ed5f10881dd63dc5699d47e90be85691" +checksum = "bc35c995b9d93ec174cf9a27d425c7892722101e14993cd227fdb51d70cf9589" [[package]] name = "httpdate" @@ -1121,9 +1127,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.49" +version = "0.3.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc15e39392125075f60c95ba416f5381ff6c3a948ff02ab12464715adf56c821" +checksum = "2d99f9e3e84b8f67f846ef5b4cbbc3b1c29f6c759fcbce6f01aa0e73d932a24c" dependencies = [ "wasm-bindgen", ] @@ -1194,9 +1200,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.91" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8916b1f6ca17130ec6568feccee27c156ad12037880833a3b842a823236502e7" +checksum = "9385f66bf6105b241aa65a61cb923ef20efc665cb9f9bb50ac2f0c4b7f378d41" [[package]] name = "libsqlite3-sys" @@ -1211,9 +1217,9 @@ dependencies = [ [[package]] name = "lock_api" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312" +checksum = "5a3c91c24eae6777794bb1997ad98bbb87daf92890acab859f7eaa4320333176" dependencies = [ "scopeguard", ] @@ -1317,9 +1323,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9753f12909fd8d923f75ae5c3258cae1ed3c8ec052e1b38c93c21a6d157f789c" dependencies = [ "migrations_internals", - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -1544,9 +1550,9 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -1819,9 +1825,9 @@ checksum = "99b8db626e31e5b81787b9783425769681b347011cc59471e33ea46d2ea0cf55" dependencies = [ "pest", "pest_meta", - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -1894,9 +1900,9 @@ version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a490329918e856ed1b083f244e3bfe2d8c4f336407e4ea9e1a9f479ff09049e5" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -1961,9 +1967,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.24" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71" +checksum = "a152013215dca273577e18d2bf00fa862b89b24169fb78c4c95aeb07992c9cec" dependencies = [ "unicode-xid 0.2.1", ] @@ -1995,14 +2001,14 @@ version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", ] [[package]] name = "quoted_printable" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47b080c5db639b292ac79cbd34be0cfc5d36694768d8341109634d90b86930e2" +checksum = "1238256b09923649ec89b08104c4dfe9f6cb2fea734a5db5384e44916d59e9c5" [[package]] name = "r2d2" @@ -2151,9 +2157,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94341e4e44e24f6b591b59e47a8a027df12e008d73fd5672dbea9cc22f4507d9" +checksum = "8270314b5ccceb518e7e578952f0b72b88222d02e8f77f5ecf7abbb673539041" dependencies = [ "bitflags", ] @@ -2186,9 +2192,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf12057f289428dbf5c591c74bf10392e4a8003f993405a902f20117019022d4" +checksum = "2296f2fac53979e8ccbc4a1136b25dcefd37be9ed7e4a1f6b05a6029c84ff124" dependencies = [ "async-compression", "base64 0.13.0", @@ -2213,6 +2219,7 @@ dependencies = [ "serde_urlencoded", "tokio", "tokio-native-tls", + "tokio-socks", "tokio-util", "url 2.2.1", "wasm-bindgen", @@ -2401,9 +2408,9 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "sct" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3042af939fca8c3453b7af0f1c66e533a15a86169e39de2657310ade8f98d3c" +checksum = "b362b83898e0e69f38515b82ee15aa80636befe47c3b6d3d89a911e78fc228ce" dependencies = [ "ring", "untrusted", @@ -2462,9 +2469,9 @@ version = "1.0.125" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b093b7a2bb58203b5da3056c05b4ec1fed827dcfdb37347a8841695263b3d06d" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -2642,11 +2649,11 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c87a60a40fccc84bef0652345bbbbbe20a605bf5d0ce81719fc476f5c03b50ef" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", "serde", "serde_derive", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -2656,13 +2663,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "58fa5ff6ad0d98d1ffa8cb115892b6e69d67799f6763e162a1c9db421dc22e11" dependencies = [ "base-x", - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", "serde", "serde_derive", "serde_json", "sha1", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -2692,7 +2699,7 @@ checksum = "f24c8e5e19d22a726626f1a5e16fe15b132dcf21d10177fa5a45ce7962996b97" dependencies = [ "phf_generator", "phf_shared", - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", ] @@ -2715,11 +2722,11 @@ dependencies = [ [[package]] name = "syn" -version = "1.0.65" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3a1d708c221c5a612956ef9f75b37e454e88d1f7b899fbd3a18d4252012d663" +checksum = "48fe99c6bd8b1cc636890bcc071842de909d902c81ac7dab53ba33c421ab8ffb" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", "unicode-xid 0.2.1", ] @@ -2768,6 +2775,26 @@ dependencies = [ ] [[package]] +name = "thiserror" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0f4a65597094d4483ddaed134f409b2cb7c1beccf25201a9f73c719254fa98e" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7765189610d8241a44529806d6fd1f2e0a08734313a35d5b3a556f92b381f3c0" +dependencies = [ + "proc-macro2 1.0.26", + "quote 1.0.9", + "syn 1.0.69", +] + +[[package]] name = "threadpool" version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -2819,17 +2846,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5c3be1edfad6027c69f5491cf4cb310d1a71ecd6af742788c6ff8bced86b8fa" dependencies = [ "proc-macro-hack", - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", "standback", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] name = "tinyvec" -version = "1.1.1" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317cca572a0e89c3ce0ca1f1bdc9369547fe318a683418e42ac8f59d14701023" +checksum = "5b5220f05bb7de7f3f53c7c065e1199b3172696fe2db9f9c4d8ad9b4ee74c342" dependencies = [ "tinyvec_macros", ] @@ -2842,9 +2869,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "134af885d758d645f0f0505c9a8b3f9bf8a348fd822e112ab5248138348f1722" +checksum = "83f0c8e7c0addab50b663055baf787d0af7f413a46e6e7fb9559a4e4db7137a5" dependencies = [ "autocfg", "bytes 1.0.1", @@ -2866,10 +2893,22 @@ dependencies = [ ] [[package]] +name = "tokio-socks" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51165dfa029d2a65969413a6cc96f354b86b464498702f174a4efa13608fd8c0" +dependencies = [ + "either", + "futures-util", + "thiserror", + "tokio", +] + +[[package]] name = "tokio-util" -version = "0.6.5" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5143d049e85af7fbc36f5454d990e62c2df705b3589f123b71f441b6b59f443f" +checksum = "940a12c99365c31ea8dd9ba04ec1be183ffe4920102bb7122c2f515437601e8e" dependencies = [ "bytes 1.0.1", "futures-core", @@ -2913,9 +2952,9 @@ version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c42e6fa53307c8a17e4ccd4dc81cf5ec38db9209f59b222210375b54ee40d1e2" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -3003,9 +3042,9 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" +checksum = "eeb8be209bb1c96b7c177c7420d26e04eccacb0eeae6b980e35fcb74678107e0" dependencies = [ "matches", ] @@ -3128,9 +3167,9 @@ checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" [[package]] name = "wasm-bindgen" -version = "0.2.72" +version = "0.2.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fe8f61dba8e5d645a4d8132dc7a0a66861ed5e1045d2c0ed940fab33bac0fbe" +checksum = "83240549659d187488f91f33c0f8547cbfef0b2088bc470c116d1d260ef623d9" dependencies = [ "cfg-if 1.0.0", "serde", @@ -3140,24 +3179,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.72" +version = "0.2.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "046ceba58ff062da072c7cb4ba5b22a37f00a302483f7e2a6cdc18fedbdc1fd3" +checksum = "ae70622411ca953215ca6d06d3ebeb1e915f0f6613e3b495122878d7ebec7dae" dependencies = [ "bumpalo", "lazy_static", "log 0.4.14", - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.22" +version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73157efb9af26fb564bb59a009afd1c7c334a44db171d280690d0c3faaec3468" +checksum = "81b8b767af23de6ac18bf2168b690bed2902743ddf0fb39252e36f9e2bfc63ea" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -3167,9 +3206,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.72" +version = "0.2.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ef9aa01d36cda046f797c57959ff5f3c615c9cc63997a8d545831ec7976819b" +checksum = "3e734d91443f177bfdb41969de821e15c516931c3c3db3d318fa1b68975d0f6f" dependencies = [ "quote 1.0.9", "wasm-bindgen-macro-support", @@ -3177,28 +3216,28 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.72" +version = "0.2.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96eb45c1b2ee33545a813a92dbb53856418bf7eb54ab34f7f7ff1448a5b3735d" +checksum = "d53739ff08c8a68b0fdbcd54c372b8ab800b1449ab3c9d706503bc7dd1621b2c" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.72" +version = "0.2.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7148f4696fb4960a346eaa60bbfb42a1ac4ebba21f750f75fc1375b098d5ffa" +checksum = "d9a543ae66aa233d14bb765ed9af4a33e81b8b58d1584cf1b47ff8cd0b9e4489" [[package]] name = "web-sys" -version = "0.3.49" +version = "0.3.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59fe19d70f5dacc03f6e46777213facae5ac3801575d56ca6cbd4c93dcd12310" +checksum = "a905d57e488fec8861446d3393670fb50d27a262344013181c2cdf9fff5481be" dependencies = [ "js-sys", "wasm-bindgen", diff --git a/Cargo.toml b/Cargo.toml @@ -32,7 +32,7 @@ rocket = { version = "0.5.0-dev", features = ["tls"], default-features = false } rocket_contrib = "0.5.0-dev" # HTTP client -reqwest = { version = "0.11.2", features = ["blocking", "json", "gzip", "brotli"] } +reqwest = { version = "0.11.3", features = ["blocking", "json", "gzip", "brotli", "socks"] } # multipart/form-data support multipart = { version = "0.17.1", features = ["server"], default-features = false } @@ -102,7 +102,7 @@ num-traits = "0.2.14" num-derive = "0.3.3" # Email libraries -tracing = { version = "0.1", features = ["log"] } # Needed to have lettre trace logging used when SMTP_DEBUG is enabled. +tracing = { version = "0.1.25", features = ["log"] } # Needed to have lettre trace logging used when SMTP_DEBUG is enabled. lettre = { version = "0.10.0-beta.3", features = ["smtp-transport", "builder", "serde", "native-tls", "hostname", "tracing"], default-features = false } newline-converter = "0.2.0" diff --git a/README.md b/README.md @@ -60,10 +60,10 @@ Thanks for your contribution to the project! <table> <tr> <td align="center"> - <a href="https://github.com/netDpay"> - <img src="https://avatars.githubusercontent.com/u/77323954?s=75&v=4" width="75px;" alt="netDpay"/> + <a href="https://github.com/netdadaltd"> + <img src="https://avatars.githubusercontent.com/u/77323954?s=75&v=4" width="75px;" alt="netdadaltd"/> <br /> - <sub><b>netDpay</b></sub> + <sub><b>netDada Ltd.</b></sub> </a> </td> </tr> diff --git a/build.rs b/build.rs @@ -1,7 +1,7 @@ -use std::process::Command; use std::env; +use std::process::Command; -fn main() { +fn main() { // This allow using #[cfg(sqlite)] instead of #[cfg(feature = "sqlite")], which helps when trying to add them through macros #[cfg(feature = "sqlite")] println!("cargo:rustc-cfg=sqlite"); @@ -11,8 +11,10 @@ fn main() { println!("cargo:rustc-cfg=postgresql"); #[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))] - compile_error!("You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite"); - + compile_error!( + "You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite" + ); + if let Ok(version) = env::var("BWRS_VERSION") { println!("cargo:rustc-env=BWRS_VERSION={}", version); println!("cargo:rustc-env=CARGO_PKG_VERSION={}", version); @@ -61,7 +63,7 @@ fn read_git_info() -> Result<(), std::io::Error> { } else { format!("{}-{}", last_tag, rev_short) }; - + println!("cargo:rustc-env=BWRS_VERSION={}", version); println!("cargo:rustc-env=CARGO_PKG_VERSION={}", version); diff --git a/docker/Dockerfile.j2 b/docker/Dockerfile.j2 @@ -1,10 +1,10 @@ # This file was generated using a Jinja2 template. # Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles. -{% set build_stage_base_image = "rust:1.50" %} +{% set build_stage_base_image = "rust:1.51" %} {% if "alpine" in target_file %} {% if "amd64" in target_file %} -{% set build_stage_base_image = "clux/muslrust:nightly-2021-02-22" %} +{% set build_stage_base_image = "clux/muslrust:nightly-2021-04-14" %} {% set runtime_stage_base_image = "alpine:3.13" %} {% set package_arch_target = "x86_64-unknown-linux-musl" %} {% elif "armv7" in target_file %} diff --git a/rust-toolchain b/rust-toolchain @@ -1 +1 @@ -nightly-2021-02-22 -\ No newline at end of file +nightly-2021-04-14 +\ No newline at end of file diff --git a/rustfmt.toml b/rustfmt.toml @@ -1,2 +1,7 @@ version = "Two" +edition = "2018" max_width = 120 +newline_style = "Unix" +use_small_heuristics = "Off" +struct_lit_single_line = false +overflow_delimited_expr = true diff --git a/src/api/admin.rs b/src/api/admin.rs @@ -3,7 +3,6 @@ use serde::de::DeserializeOwned; use serde_json::Value; use std::{env, time::Duration}; -use reqwest::{blocking::Client, header::USER_AGENT}; use rocket::{ http::{Cookie, Cookies, SameSite}, request::{self, FlashMessage, Form, FromRequest, Outcome, Request}, @@ -19,7 +18,7 @@ use crate::{ db::{backup_database, get_sql_server_version, models::*, DbConn, DbConnType}, error::{Error, MapResult}, mail, - util::{format_naive_datetime_local, get_display_size, is_running_in_docker}, + util::{format_naive_datetime_local, get_display_size, get_reqwest_client, is_running_in_docker}, CONFIG, }; @@ -64,11 +63,8 @@ static DB_TYPE: Lazy<&str> = Lazy::new(|| { .unwrap_or("Unknown") }); -static CAN_BACKUP: Lazy<bool> = Lazy::new(|| { - DbConnType::from_url(&CONFIG.database_url()) - .map(|t| t == DbConnType::sqlite) - .unwrap_or(false) -}); +static CAN_BACKUP: Lazy<bool> = + Lazy::new(|| DbConnType::from_url(&CONFIG.database_url()).map(|t| t == DbConnType::sqlite).unwrap_or(false)); #[get("/")] fn admin_disabled() -> &'static str { @@ -141,7 +137,12 @@ fn admin_url(referer: Referer) -> String { fn admin_login(flash: Option<FlashMessage>) -> ApiResult<Html<String>> { // If there is an error, show it let msg = flash.map(|msg| format!("{}: {}", msg.name(), msg.msg())); - let json = json!({"page_content": "admin/login", "version": VERSION, "error": msg, "urlpath": CONFIG.domain_path()}); + let json = json!({ + "page_content": "admin/login", + "version": VERSION, + "error": msg, + "urlpath": CONFIG.domain_path() + }); // Return the page let text = CONFIG.render_template(BASE_TEMPLATE, &json)?; @@ -165,10 +166,7 @@ fn post_admin_login( // If the token is invalid, redirect to login page if !_validate_token(&data.token) { error!("Invalid admin token. IP: {}", ip.ip); - Err(Flash::error( - Redirect::to(admin_url(referer)), - "Invalid admin token, please try again.", - )) + Err(Flash::error(Redirect::to(admin_url(referer)), "Invalid admin token, please try again.")) } else { // If the token received is valid, generate JWT and save it as a cookie let claims = generate_admin_claims(); @@ -328,7 +326,8 @@ fn get_users_json(_token: AdminToken, conn: DbConn) -> Json<Value> { fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> { let users = User::get_all(&conn); let dt_fmt = "%Y-%m-%d %H:%M:%S %Z"; - let users_json: Vec<Value> = users.iter() + let users_json: Vec<Value> = users + .iter() .map(|u| { let mut usr = u.to_json(&conn); usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &conn)); @@ -338,7 +337,7 @@ fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> { usr["created_at"] = json!(format_naive_datetime_local(&u.created_at, dt_fmt)); usr["last_active"] = match u.last_active(&conn) { Some(dt) => json!(format_naive_datetime_local(&dt, dt_fmt)), - None => json!("Never") + None => json!("Never"), }; usr }) @@ -423,7 +422,6 @@ fn update_user_org_type(data: Json<UserOrgTypeData>, _token: AdminToken, conn: D user_to_edit.save(&conn) } - #[post("/users/update_revision")] fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult { User::update_all_revisions(&conn) @@ -432,7 +430,8 @@ fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult { #[get("/organizations/overview")] fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> { let organizations = Organization::get_all(&conn); - let organizations_json: Vec<Value> = organizations.iter() + let organizations_json: Vec<Value> = organizations + .iter() .map(|o| { let mut org = o.to_json(); org["user_count"] = json!(UserOrganization::count_by_org(&o.uuid, &conn)); @@ -469,26 +468,15 @@ struct GitCommit { } fn get_github_api<T: DeserializeOwned>(url: &str) -> Result<T, Error> { - let github_api = Client::builder().build()?; + let github_api = get_reqwest_client(); - Ok(github_api - .get(url) - .timeout(Duration::from_secs(10)) - .header(USER_AGENT, "Bitwarden_RS") - .send()? - .error_for_status()? - .json::<T>()?) + Ok(github_api.get(url).timeout(Duration::from_secs(10)).send()?.error_for_status()?.json::<T>()?) } fn has_http_access() -> bool { - let http_access = Client::builder().build().unwrap(); - - match http_access - .head("https://github.com/dani-garcia/bitwarden_rs") - .timeout(Duration::from_secs(10)) - .header(USER_AGENT, "Bitwarden_RS") - .send() - { + let http_access = get_reqwest_client(); + + match http_access.head("https://github.com/dani-garcia/bitwarden_rs").timeout(Duration::from_secs(10)).send() { Ok(r) => r.status().is_success(), _ => false, } @@ -501,17 +489,16 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu use std::net::ToSocketAddrs; // Get current running versions - let web_vault_version: WebVaultVersion = match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "bwrs-version.json")) { - Ok(s) => serde_json::from_str(&s)?, - _ => { - match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) { + let web_vault_version: WebVaultVersion = + match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "bwrs-version.json")) { + Ok(s) => serde_json::from_str(&s)?, + _ => match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) { Ok(s) => serde_json::from_str(&s)?, - _ => { - WebVaultVersion{version: String::from("Version file missing")} + _ => WebVaultVersion { + version: String::from("Version file missing"), }, - } - }, - }; + }, + }; // Execute some environment checks let running_within_docker = is_running_in_docker(); @@ -531,7 +518,8 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu // TODO: Maybe we need to cache this using a LazyStatic or something. Github only allows 60 requests per hour, and we use 3 here already. let (latest_release, latest_commit, latest_web_build) = if has_http_access { ( - match get_github_api::<GitRelease>("https://api.github.com/repos/dani-garcia/bitwarden_rs/releases/latest") { + match get_github_api::<GitRelease>("https://api.github.com/repos/dani-garcia/bitwarden_rs/releases/latest") + { Ok(r) => r.tag_name, _ => "-".to_string(), }, @@ -547,7 +535,9 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu if running_within_docker { "-".to_string() } else { - match get_github_api::<GitRelease>("https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest") { + match get_github_api::<GitRelease>( + "https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest", + ) { Ok(r) => r.tag_name.trim_start_matches('v').to_string(), _ => "-".to_string(), } @@ -559,7 +549,7 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu let ip_header_name = match &ip_header.0 { Some(h) => h, - _ => "" + _ => "", }; let diagnostics_json = json!({ diff --git a/src/api/core/accounts.rs b/src/api/core/accounts.rs @@ -320,15 +320,7 @@ fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, conn: DbConn, nt: err!("The cipher is not owned by the user") } - update_cipher_from_data( - &mut saved_cipher, - cipher_data, - &headers, - false, - &conn, - &nt, - UpdateType::CipherUpdate, - )? + update_cipher_from_data(&mut saved_cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::CipherUpdate)? } // Update user data diff --git a/src/api/core/ciphers.rs b/src/api/core/ciphers.rs @@ -100,24 +100,18 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> Json<Value> { let folders_json: Vec<Value> = folders.iter().map(Folder::to_json).collect(); let collections = Collection::find_by_user_uuid(&headers.user.uuid, &conn); - let collections_json: Vec<Value> = collections.iter() - .map(|c| c.to_json_details(&headers.user.uuid, &conn)) - .collect(); + let collections_json: Vec<Value> = + collections.iter().map(|c| c.to_json_details(&headers.user.uuid, &conn)).collect(); let policies = OrgPolicy::find_by_user(&headers.user.uuid, &conn); let policies_json: Vec<Value> = policies.iter().map(OrgPolicy::to_json).collect(); let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn); - let ciphers_json: Vec<Value> = ciphers - .iter() - .map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)) - .collect(); + let ciphers_json: Vec<Value> = + ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect(); let sends = Send::find_by_user(&headers.user.uuid, &conn); - let sends_json: Vec<Value> = sends - .iter() - .map(|s| s.to_json()) - .collect(); + let sends_json: Vec<Value> = sends.iter().map(|s| s.to_json()).collect(); let domains_json = if data.exclude_domains { Value::Null @@ -142,10 +136,8 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> Json<Value> { fn get_ciphers(headers: Headers, conn: DbConn) -> Json<Value> { let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn); - let ciphers_json: Vec<Value> = ciphers - .iter() - .map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)) - .collect(); + let ciphers_json: Vec<Value> = + ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect(); Json(json!({ "Data": ciphers_json, @@ -246,7 +238,7 @@ fn post_ciphers_create(data: JsonUpcase<ShareCipherData>, headers: Headers, conn // Check if there are one more more collections selected when this cipher is part of an organization. // err if this is not the case before creating an empty cipher. - if data.Cipher.OrganizationId.is_some() && data.CollectionIds.is_empty() { + if data.Cipher.OrganizationId.is_some() && data.CollectionIds.is_empty() { err!("You must select at least one collection."); } @@ -288,17 +280,12 @@ fn post_ciphers(data: JsonUpcase<CipherData>, headers: Headers, conn: DbConn, nt /// allowed to delete or share such ciphers to an org, however. /// /// Ref: https://bitwarden.com/help/article/policies/#personal-ownership -fn enforce_personal_ownership_policy( - data: &CipherData, - headers: &Headers, - conn: &DbConn -) -> EmptyResult { +fn enforce_personal_ownership_policy(data: &CipherData, headers: &Headers, conn: &DbConn) -> EmptyResult { if data.OrganizationId.is_none() { let user_uuid = &headers.user.uuid; let policy_type = OrgPolicyType::PersonalOwnership; if OrgPolicy::is_applicable_to_user(user_uuid, policy_type, conn) { - err!("Due to an Enterprise Policy, you are restricted from \ - saving items to your personal vault.") + err!("Due to an Enterprise Policy, you are restricted from saving items to your personal vault.") } } Ok(()) @@ -317,11 +304,12 @@ pub fn update_cipher_from_data( // Check that the client isn't updating an existing cipher with stale data. if let Some(dt) = data.LastKnownRevisionDate { - match NaiveDateTime::parse_from_str(&dt, "%+") { // ISO 8601 format - Err(err) => - warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err), - Ok(dt) if cipher.updated_at.signed_duration_since(dt).num_seconds() > 1 => - err!("The client copy of this cipher is out of date. Resync the client and try again."), + match NaiveDateTime::parse_from_str(&dt, "%+") { + // ISO 8601 format + Err(err) => warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err), + Ok(dt) if cipher.updated_at.signed_duration_since(dt).num_seconds() > 1 => { + err!("The client copy of this cipher is out of date. Resync the client and try again.") + } Ok(_) => (), } } @@ -394,12 +382,9 @@ pub fn update_cipher_from_data( // But, we at least know we do not need to store and return this specific key. fn _clean_cipher_data(mut json_data: Value) -> Value { if json_data.is_array() { - json_data.as_array_mut() - .unwrap() - .iter_mut() - .for_each(|ref mut f| { - f.as_object_mut().unwrap().remove("Response"); - }); + json_data.as_array_mut().unwrap().iter_mut().for_each(|ref mut f| { + f.as_object_mut().unwrap().remove("Response"); + }); }; json_data } @@ -421,13 +406,13 @@ pub fn update_cipher_from_data( data["Uris"] = _clean_cipher_data(data["Uris"].clone()); } data - }, + } None => err!("Data missing"), }; cipher.name = data.Name; cipher.notes = data.Notes; - cipher.fields = data.Fields.map(|f| _clean_cipher_data(f).to_string() ); + cipher.fields = data.Fields.map(|f| _clean_cipher_data(f).to_string()); cipher.data = type_data.to_string(); cipher.password_history = data.PasswordHistory.map(|f| f.to_string()); @@ -602,11 +587,8 @@ fn post_collections_admin( } let posted_collections: HashSet<String> = data.CollectionIds.iter().cloned().collect(); - let current_collections: HashSet<String> = cipher - .get_collections(&headers.user.uuid, &conn) - .iter() - .cloned() - .collect(); + let current_collections: HashSet<String> = + cipher.get_collections(&headers.user.uuid, &conn).iter().cloned().collect(); for collection in posted_collections.symmetric_difference(&current_collections) { match Collection::find_by_uuid(&collection, &conn) { @@ -842,24 +824,25 @@ fn post_attachment( let file_name = HEXLOWER.encode(&crypto::get_random(vec![0; 10])); let path = base_path.join(&file_name); - let size = match field.data.save().memory_threshold(0).size_limit(size_limit).with_path(path.clone()) { - SaveResult::Full(SavedData::File(_, size)) => size as i32, - SaveResult::Full(other) => { - std::fs::remove_file(path).ok(); - error = Some(format!("Attachment is not a file: {:?}", other)); - return; - } - SaveResult::Partial(_, reason) => { - std::fs::remove_file(path).ok(); - error = Some(format!("Attachment size limit exceeded with this file: {:?}", reason)); - return; - } - SaveResult::Error(e) => { - std::fs::remove_file(path).ok(); - error = Some(format!("Error: {:?}", e)); - return; - } - }; + let size = + match field.data.save().memory_threshold(0).size_limit(size_limit).with_path(path.clone()) { + SaveResult::Full(SavedData::File(_, size)) => size as i32, + SaveResult::Full(other) => { + std::fs::remove_file(path).ok(); + error = Some(format!("Attachment is not a file: {:?}", other)); + return; + } + SaveResult::Partial(_, reason) => { + std::fs::remove_file(path).ok(); + error = Some(format!("Attachment size limit exceeded with this file: {:?}", reason)); + return; + } + SaveResult::Error(e) => { + std::fs::remove_file(path).ok(); + error = Some(format!("Error: {:?}", e)); + return; + } + }; let mut attachment = Attachment::new(file_name, cipher.uuid.clone(), name, size); attachment.akey = attachment_key.clone(); @@ -994,12 +977,22 @@ fn delete_cipher_selected_admin(data: JsonUpcase<Value>, headers: Headers, conn: } #[post("/ciphers/delete-admin", data = "<data>")] -fn delete_cipher_selected_post_admin(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { +fn delete_cipher_selected_post_admin( + data: JsonUpcase<Value>, + headers: Headers, + conn: DbConn, + nt: Notify, +) -> EmptyResult { delete_cipher_selected_post(data, headers, conn, nt) } #[put("/ciphers/delete-admin", data = "<data>")] -fn delete_cipher_selected_put_admin(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { +fn delete_cipher_selected_put_admin( + data: JsonUpcase<Value>, + headers: Headers, + conn: DbConn, + nt: Notify, +) -> EmptyResult { delete_cipher_selected_put(data, headers, conn, nt) } @@ -1150,7 +1143,13 @@ fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, soft_del Ok(()) } -fn _delete_multiple_ciphers(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, soft_delete: bool, nt: Notify) -> EmptyResult { +fn _delete_multiple_ciphers( + data: JsonUpcase<Value>, + headers: Headers, + conn: DbConn, + soft_delete: bool, + nt: Notify, +) -> EmptyResult { let data: Value = data.into_inner().data; let uuids = match data.get("Ids") { @@ -1202,7 +1201,7 @@ fn _restore_multiple_ciphers(data: JsonUpcase<Value>, headers: &Headers, conn: & for uuid in uuids { match _restore_cipher_by_uuid(uuid, headers, conn, nt) { Ok(json) => ciphers.push(json.into_inner()), - err => return err + err => return err, } } diff --git a/src/api/core/folders.rs b/src/api/core/folders.rs @@ -8,15 +8,7 @@ use crate::{ }; pub fn routes() -> Vec<rocket::Route> { - routes![ - get_folders, - get_folder, - post_folders, - post_folder, - put_folder, - delete_folder_post, - delete_folder, - ] + routes![get_folders, get_folder, post_folders, post_folder, put_folder, delete_folder_post, delete_folder,] } #[get("/folders")] diff --git a/src/api/core/mod.rs b/src/api/core/mod.rs @@ -2,21 +2,15 @@ mod accounts; mod ciphers; mod folders; mod organizations; -pub mod two_factor; mod sends; +pub mod two_factor; pub use ciphers::purge_trashed_ciphers; pub use sends::purge_sends; pub fn routes() -> Vec<Route> { - let mut mod_routes = routes![ - clear_device_token, - put_device_token, - get_eq_domains, - post_eq_domains, - put_eq_domains, - hibp_breach, - ]; + let mut mod_routes = + routes![clear_device_token, put_device_token, get_eq_domains, post_eq_domains, put_eq_domains, hibp_breach,]; let mut routes = Vec::new(); routes.append(&mut accounts::routes()); @@ -33,9 +27,9 @@ pub fn routes() -> Vec<Route> { // // Move this somewhere else // +use rocket::response::Response; use rocket::Route; use rocket_contrib::json::Json; -use rocket::response::Response; use serde_json::Value; use crate::{ @@ -43,6 +37,7 @@ use crate::{ auth::Headers, db::DbConn, error::Error, + util::get_reqwest_client, }; #[put("/devices/identifier/<uuid>/clear-token")] @@ -147,22 +142,15 @@ fn put_eq_domains(data: JsonUpcase<EquivDomainData>, headers: Headers, conn: DbC #[get("/hibp/breach?<username>")] fn hibp_breach(username: String) -> JsonResult { - let user_agent = "Bitwarden_RS"; let url = format!( "https://haveibeenpwned.com/api/v3/breachedaccount/{}?truncateResponse=false&includeUnverified=false", username ); - use reqwest::{blocking::Client, header::USER_AGENT}; - if let Some(api_key) = crate::CONFIG.hibp_api_key() { - let hibp_client = Client::builder().build()?; + let hibp_client = get_reqwest_client(); - let res = hibp_client - .get(&url) - .header(USER_AGENT, user_agent) - .header("hibp-api-key", api_key) - .send()?; + let res = hibp_client.get(&url).header("hibp-api-key", api_key).send()?; // If we get a 404, return a 404, it means no breached accounts if res.status() == 404 { diff --git a/src/api/core/organizations.rs b/src/api/core/organizations.rs @@ -5,7 +5,7 @@ use serde_json::Value; use crate::{ api::{EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, Notify, NumberOrString, PasswordData, UpdateType}, - auth::{decode_invite, AdminHeaders, Headers, OwnerHeaders, ManagerHeaders, ManagerHeadersLoose}, + auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders}, db::{models::*, DbConn}, mail, CONFIG, }; @@ -333,7 +333,12 @@ fn post_organization_collection_delete_user( } #[delete("/organizations/<org_id>/collections/<col_id>")] -fn delete_organization_collection(org_id: String, col_id: String, _headers: ManagerHeaders, conn: DbConn) -> EmptyResult { +fn delete_organization_collection( + org_id: String, + col_id: String, + _headers: ManagerHeaders, + conn: DbConn, +) -> EmptyResult { match Collection::find_by_uuid(&col_id, &conn) { None => err!("Collection not found"), Some(collection) => { @@ -426,9 +431,7 @@ fn put_collection_users( continue; } - CollectionUser::save(&user.user_uuid, &coll_id, - d.ReadOnly, d.HidePasswords, - &conn)?; + CollectionUser::save(&user.user_uuid, &coll_id, d.ReadOnly, d.HidePasswords, &conn)?; } Ok(()) @@ -443,10 +446,8 @@ struct OrgIdData { #[get("/ciphers/organization-details?<data..>")] fn get_org_details(data: Form<OrgIdData>, headers: Headers, conn: DbConn) -> Json<Value> { let ciphers = Cipher::find_by_org(&data.organization_id, &conn); - let ciphers_json: Vec<Value> = ciphers - .iter() - .map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)) - .collect(); + let ciphers_json: Vec<Value> = + ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect(); Json(json!({ "Data": ciphers_json, @@ -544,9 +545,7 @@ fn send_invite(org_id: String, data: JsonUpcase<InviteData>, headers: AdminHeade match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) { None => err!("Collection not found in Organization"), Some(collection) => { - CollectionUser::save(&user.uuid, &collection.uuid, - col.ReadOnly, col.HidePasswords, - &conn)?; + CollectionUser::save(&user.uuid, &collection.uuid, col.ReadOnly, col.HidePasswords, &conn)?; } } } @@ -814,9 +813,13 @@ fn edit_user( match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) { None => err!("Collection not found in Organization"), Some(collection) => { - CollectionUser::save(&user_to_edit.user_uuid, &collection.uuid, - col.ReadOnly, col.HidePasswords, - &conn)?; + CollectionUser::save( + &user_to_edit.user_uuid, + &collection.uuid, + col.ReadOnly, + col.HidePasswords, + &conn, + )?; } } } @@ -912,16 +915,8 @@ fn post_org_import( .into_iter() .map(|cipher_data| { let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone()); - update_cipher_from_data( - &mut cipher, - cipher_data, - &headers, - false, - &conn, - &nt, - UpdateType::CipherCreate, - ) - .ok(); + update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::CipherCreate) + .ok(); cipher }) .collect(); @@ -1002,7 +997,13 @@ struct PolicyData { } #[put("/organizations/<org_id>/policies/<pol_type>", data = "<data>")] -fn put_policy(org_id: String, pol_type: i32, data: Json<PolicyData>, _headers: AdminHeaders, conn: DbConn) -> JsonResult { +fn put_policy( + org_id: String, + pol_type: i32, + data: Json<PolicyData>, + _headers: AdminHeaders, + conn: DbConn, +) -> JsonResult { let data: PolicyData = data.into_inner(); let pol_type_enum = match OrgPolicyType::from_i32(pol_type) { @@ -1160,8 +1161,7 @@ fn import(org_id: String, data: JsonUpcase<OrgImportData>, headers: Headers, con // If user is not part of the organization, but it exists } else if UserOrganization::find_by_email_and_org(&user_data.Email, &org_id, &conn).is_none() { - if let Some (user) = User::find_by_mail(&user_data.Email, &conn) { - + if let Some(user) = User::find_by_mail(&user_data.Email, &conn) { let user_org_status = if CONFIG.mail_enabled() { UserOrgStatus::Invited as i32 } else { @@ -1197,7 +1197,7 @@ fn import(org_id: String, data: JsonUpcase<OrgImportData>, headers: Headers, con // If this flag is enabled, any user that isn't provided in the Users list will be removed (by default they will be kept unless they have Deleted == true) if data.OverwriteExisting { for user_org in UserOrganization::find_by_org_and_type(&org_id, UserOrgType::User as i32, &conn) { - if let Some (user_email) = User::find_by_uuid(&user_org.user_uuid, &conn).map(|u| u.email) { + if let Some(user_email) = User::find_by_uuid(&user_org.user_uuid, &conn).map(|u| u.email) { if !data.Users.iter().any(|u| u.Email == user_email) { user_org.delete(&conn)?; } diff --git a/src/api/core/sends.rs b/src/api/core/sends.rs @@ -16,15 +16,7 @@ use crate::{ const SEND_INACCESSIBLE_MSG: &str = "Send does not exist or is no longer available"; pub fn routes() -> Vec<rocket::Route> { - routes![ - post_send, - post_send_file, - post_access, - post_access_file, - put_send, - delete_send, - put_remove_password - ] + routes![post_send, post_send_file, post_access, post_access_file, put_send, delete_send, put_remove_password] } pub fn purge_sends(pool: DbPool) { @@ -171,13 +163,7 @@ fn post_send_file(data: Data, content_type: &ContentType, headers: Headers, conn None => err!("No model entry present"), }; - let size = match data_entry - .data - .save() - .memory_threshold(0) - .size_limit(size_limit) - .with_path(&file_path) - { + let size = match data_entry.data.save().memory_threshold(0).size_limit(size_limit).with_path(&file_path) { SaveResult::Full(SavedData::File(_, size)) => size as i32, SaveResult::Full(other) => { std::fs::remove_file(&file_path).ok(); @@ -198,10 +184,7 @@ fn post_send_file(data: Data, content_type: &ContentType, headers: Headers, conn if let Some(o) = data_value.as_object_mut() { o.insert(String::from("Id"), Value::String(file_id)); o.insert(String::from("Size"), Value::Number(size.into())); - o.insert( - String::from("SizeName"), - Value::String(crate::util::get_display_size(size)), - ); + o.insert(String::from("SizeName"), Value::String(crate::util::get_display_size(size))); } send.data = serde_json::to_string(&data_value)?; diff --git a/src/api/core/two_factor/authenticator.rs b/src/api/core/two_factor/authenticator.rs @@ -17,11 +17,7 @@ use crate::{ pub use crate::config::CONFIG; pub fn routes() -> Vec<Route> { - routes![ - generate_authenticator, - activate_authenticator, - activate_authenticator_put, - ] + routes![generate_authenticator, activate_authenticator, activate_authenticator_put,] } #[post("/two-factor/get-authenticator", data = "<data>")] @@ -141,7 +137,7 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, ip: &Cl // The amount of steps back and forward in time // Also check if we need to disable time drifted TOTP codes. // If that is the case, we set the steps to 0 so only the current TOTP is valid. - let steps: i64 = if CONFIG.authenticator_disable_time_drift() { 0 } else { 1 }; + let steps = !CONFIG.authenticator_disable_time_drift() as i64; for step in -steps..=steps { let time_step = current_timestamp / 30i64 + step; @@ -163,22 +159,11 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, ip: &Cl twofactor.save(&conn)?; return Ok(()); } else if generated == totp_code && time_step <= twofactor.last_used as i64 { - warn!( - "This or a TOTP code within {} steps back and forward has already been used!", - steps - ); - err!(format!( - "Invalid TOTP code! Server time: {} IP: {}", - current_time.format("%F %T UTC"), - ip.ip - )); + warn!("This or a TOTP code within {} steps back and forward has already been used!", steps); + err!(format!("Invalid TOTP code! Server time: {} IP: {}", current_time.format("%F %T UTC"), ip.ip)); } } // Else no valide code received, deny access - err!(format!( - "Invalid TOTP code! Server time: {} IP: {}", - current_time.format("%F %T UTC"), - ip.ip - )); + err!(format!("Invalid TOTP code! Server time: {} IP: {}", current_time.format("%F %T UTC"), ip.ip)); } diff --git a/src/api/core/two_factor/duo.rs b/src/api/core/two_factor/duo.rs @@ -12,6 +12,7 @@ use crate::{ DbConn, }, error::MapResult, + util::get_reqwest_client, CONFIG, }; @@ -59,7 +60,11 @@ impl DuoData { ik.replace_range(digits.., replaced); sk.replace_range(digits.., replaced); - Self { host, ik, sk } + Self { + host, + ik, + sk, + } } } @@ -185,9 +190,7 @@ fn activate_duo_put(data: JsonUpcase<EnableDuoData>, headers: Headers, conn: DbC } fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData) -> EmptyResult { - const AGENT: &str = "bitwarden_rs:Duo/1.0 (Rust)"; - - use reqwest::{blocking::Client, header::*, Method}; + use reqwest::{header, Method}; use std::str::FromStr; // https://duo.com/docs/authapi#api-details @@ -199,11 +202,13 @@ fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData) -> Em let m = Method::from_str(method).unwrap_or_default(); - Client::new() + let client = get_reqwest_client(); + + client .request(m, &url) .basic_auth(username, Some(password)) - .header(USER_AGENT, AGENT) - .header(DATE, date) + .header(header::USER_AGENT, "bitwarden_rs:Duo/1.0 (Rust)") + .header(header::DATE, date) .send()? .error_for_status()?; diff --git a/src/api/core/two_factor/email.rs b/src/api/core/two_factor/email.rs @@ -125,11 +125,7 @@ fn send_email(data: JsonUpcase<SendEmailData>, headers: Headers, conn: DbConn) - let twofactor_data = EmailTokenData::new(data.Email, generated_token); // Uses EmailVerificationChallenge as type to show that it's not verified yet. - let twofactor = TwoFactor::new( - user.uuid, - TwoFactorType::EmailVerificationChallenge, - twofactor_data.to_json(), - ); + let twofactor = TwoFactor::new(user.uuid, TwoFactorType::EmailVerificationChallenge, twofactor_data.to_json()); twofactor.save(&conn)?; mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?; @@ -186,7 +182,8 @@ fn email(data: JsonUpcase<EmailData>, headers: Headers, conn: DbConn) -> JsonRes /// Validate the email code when used as TwoFactor token mechanism pub fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &DbConn) -> EmptyResult { let mut email_data = EmailTokenData::from_json(&data)?; - let mut twofactor = TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Email as i32, &conn).map_res("Two factor not found")?; + let mut twofactor = TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Email as i32, &conn) + .map_res("Two factor not found")?; let issued_token = match &email_data.last_token { Some(t) => t, _ => err!("No token available"), diff --git a/src/api/core/two_factor/mod.rs b/src/api/core/two_factor/mod.rs @@ -21,13 +21,7 @@ pub mod u2f; pub mod yubikey; pub fn routes() -> Vec<Route> { - let mut routes = routes![ - get_twofactor, - get_recover, - recover, - disable_twofactor, - disable_twofactor_put, - ]; + let mut routes = routes![get_twofactor, get_recover, recover, disable_twofactor, disable_twofactor_put,]; routes.append(&mut authenticator::routes()); routes.append(&mut duo::routes()); diff --git a/src/api/core/two_factor/u2f.rs b/src/api/core/two_factor/u2f.rs @@ -28,13 +28,7 @@ static APP_ID: Lazy<String> = Lazy::new(|| format!("{}/app-id.json", &CONFIG.dom static U2F: Lazy<U2f> = Lazy::new(|| U2f::new(APP_ID.clone())); pub fn routes() -> Vec<Route> { - routes![ - generate_u2f, - generate_u2f_challenge, - activate_u2f, - activate_u2f_put, - delete_u2f, - ] + routes![generate_u2f, generate_u2f_challenge, activate_u2f, activate_u2f_put, delete_u2f,] } #[post("/two-factor/get-u2f", data = "<data>")] @@ -161,10 +155,7 @@ fn activate_u2f(data: JsonUpcase<EnableU2FData>, headers: Headers, conn: DbConn) let response: RegisterResponseCopy = serde_json::from_str(&data.DeviceResponse)?; - let error_code = response - .error_code - .clone() - .map_or("0".into(), NumberOrString::into_string); + let error_code = response.error_code.clone().map_or("0".into(), NumberOrString::into_string); if error_code != "0" { err!("Error registering U2F token") @@ -300,20 +291,13 @@ fn _old_parse_registrations(registations: &str) -> Vec<Registration> { let regs: Vec<Value> = serde_json::from_str(registations).expect("Can't parse Registration data"); - regs.into_iter() - .map(|r| serde_json::from_value(r).unwrap()) - .map(|Helper(r)| r) - .collect() + regs.into_iter().map(|r| serde_json::from_value(r).unwrap()).map(|Helper(r)| r).collect() } pub fn generate_u2f_login(user_uuid: &str, conn: &DbConn) -> ApiResult<U2fSignRequest> { let challenge = _create_u2f_challenge(user_uuid, TwoFactorType::U2fLoginChallenge, conn); - let registrations: Vec<_> = get_u2f_registrations(user_uuid, conn)? - .1 - .into_iter() - .map(|r| r.reg) - .collect(); + let registrations: Vec<_> = get_u2f_registrations(user_uuid, conn)?.1.into_iter().map(|r| r.reg).collect(); if registrations.is_empty() { err!("No U2F devices registered") diff --git a/src/api/icons.rs b/src/api/icons.rs @@ -12,7 +12,11 @@ use regex::Regex; use reqwest::{blocking::Client, blocking::Response, header, Url}; use rocket::{http::ContentType, http::Cookie, response::Content, Route}; -use crate::{error::Error, util::Cached, CONFIG}; +use crate::{ + error::Error, + util::{get_reqwest_client_builder, Cached}, + CONFIG, +}; pub fn routes() -> Vec<Route> { routes![icon] @@ -25,14 +29,19 @@ static CLIENT: Lazy<Client> = Lazy::new(|| { default_headers.insert(header::ACCEPT_LANGUAGE, header::HeaderValue::from_static("en-US,en;q=0.8")); default_headers.insert(header::CACHE_CONTROL, header::HeaderValue::from_static("no-cache")); default_headers.insert(header::PRAGMA, header::HeaderValue::from_static("no-cache")); - default_headers.insert(header::ACCEPT, header::HeaderValue::from_static("text/html,application/xhtml+xml,application/xml; q=0.9,image/webp,image/apng,*/*;q=0.8")); + default_headers.insert( + header::ACCEPT, + header::HeaderValue::from_static( + "text/html,application/xhtml+xml,application/xml; q=0.9,image/webp,image/apng,*/*;q=0.8", + ), + ); // Reuse the client between requests - Client::builder() + get_reqwest_client_builder() .timeout(Duration::from_secs(CONFIG.icon_download_timeout())) .default_headers(default_headers) .build() - .unwrap() + .expect("Failed to build icon client") }); // Build Regex only once since this takes a lot of time. @@ -49,13 +58,16 @@ fn icon(domain: String) -> Cached<Content<Vec<u8>>> { if !is_valid_domain(&domain) { warn!("Invalid domain: {}", domain); - return Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()); + return Cached::ttl( + Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), + CONFIG.icon_cache_negttl(), + ); } match get_icon(&domain) { Some((icon, icon_type)) => { Cached::ttl(Content(ContentType::new("image", icon_type), icon), CONFIG.icon_cache_ttl()) - }, + } _ => Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()), } } @@ -77,7 +89,10 @@ fn is_valid_domain(domain: &str) -> bool { || domain.starts_with('-') || domain.ends_with('-') { - debug!("Domain validation error: '{}' is either empty, contains '..', starts with an '.', starts or ends with a '-'", domain); + debug!( + "Domain validation error: '{}' is either empty, contains '..', starts with an '.', starts or ends with a '-'", + domain + ); return false; } else if domain.len() > 255 { debug!("Domain validation error: '{}' exceeds 255 characters", domain); @@ -255,7 +270,7 @@ fn get_icon(domain: &str) -> Option<(Vec<u8>, String)> { } if let Some(icon) = get_cached_icon(&path) { - let icon_type = match get_icon_type(&icon) { + let icon_type = match get_icon_type(&icon) { Some(x) => x, _ => "x-icon", }; @@ -338,12 +353,20 @@ struct Icon { impl Icon { const fn new(priority: u8, href: String) -> Self { - Self { href, priority } + Self { + href, + priority, + } } } fn get_favicons_node(node: &std::rc::Rc<markup5ever_rcdom::Node>, icons: &mut Vec<Icon>, url: &Url) { - if let markup5ever_rcdom::NodeData::Element { name, attrs, .. } = &node.data { + if let markup5ever_rcdom::NodeData::Element { + name, + attrs, + .. + } = &node.data + { if name.local.as_ref() == "link" { let mut has_rel = false; let mut href = None; @@ -354,7 +377,8 @@ fn get_favicons_node(node: &std::rc::Rc<markup5ever_rcdom::Node>, icons: &mut Ve let attr_name = attr.name.local.as_ref(); let attr_value = attr.value.as_ref(); - if attr_name == "rel" && ICON_REL_REGEX.is_match(attr_value) && !ICON_REL_BLACKLIST.is_match(attr_value) { + if attr_name == "rel" && ICON_REL_REGEX.is_match(attr_value) && !ICON_REL_BLACKLIST.is_match(attr_value) + { has_rel = true; } else if attr_name == "href" { href = Some(attr_value); @@ -683,6 +707,6 @@ fn get_icon_type(bytes: &[u8]) -> Option<&'static str> { [82, 73, 70, 70, ..] => Some("webp"), [255, 216, 255, ..] => Some("jpeg"), [66, 77, ..] => Some("bmp"), - _ => None + _ => None, } } diff --git a/src/api/identity.rs b/src/api/identity.rs @@ -88,34 +88,28 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult let username = data.username.as_ref().unwrap(); let user = match User::find_by_mail(username, &conn) { Some(user) => user, - None => err!( - "Username or password is incorrect. Try again", - format!("IP: {}. Username: {}.", ip.ip, username) - ), + None => err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {}.", ip.ip, username)), }; // Check password let password = data.password.as_ref().unwrap(); if !user.check_valid_password(password) { - err!( - "Username or password is incorrect. Try again", - format!("IP: {}. Username: {}.", ip.ip, username) - ) + err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {}.", ip.ip, username)) } // Check if the user is disabled if !user.enabled { - err!( - "This user has been disabled", - format!("IP: {}. Username: {}.", ip.ip, username) - ) + err!("This user has been disabled", format!("IP: {}. Username: {}.", ip.ip, username)) } let now = Local::now(); if user.verified_at.is_none() && CONFIG.mail_enabled() && CONFIG.signups_verify() { let now = now.naive_utc(); - if user.last_verifying_at.is_none() || now.signed_duration_since(user.last_verifying_at.unwrap()).num_seconds() > CONFIG.signups_verify_resend_time() as i64 { + if user.last_verifying_at.is_none() + || now.signed_duration_since(user.last_verifying_at.unwrap()).num_seconds() + > CONFIG.signups_verify_resend_time() as i64 + { let resend_limit = CONFIG.signups_verify_resend_limit() as i32; if resend_limit == 0 || user.login_verify_count < resend_limit { // We want to send another email verification if we require signups to verify @@ -135,10 +129,7 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult } // We still want the login to fail until they actually verified the email address - err!( - "Please verify your email before trying again.", - format!("IP: {}. Username: {}.", ip.ip, username) - ) + err!("Please verify your email before trying again.", format!("IP: {}. Username: {}.", ip.ip, username)) } let (mut device, new_device) = get_device(&data, &conn, &user); @@ -169,7 +160,7 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult "Key": user.akey, "PrivateKey": user.private_key, //"TwoFactorToken": "11122233333444555666777888999" - + "Kdf": user.client_kdf_type, "KdfIterations": user.client_kdf_iter, "ResetMasterPassword": false,// TODO: Same as above @@ -236,9 +227,7 @@ fn twofactor_auth( None => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA token not provided"), }; - let selected_twofactor = twofactors - .into_iter() - .find(|tf| tf.atype == selected_id && tf.enabled); + let selected_twofactor = twofactors.into_iter().find(|tf| tf.atype == selected_id && tf.enabled); use crate::api::core::two_factor as _tf; use crate::crypto::ct_eq; @@ -247,18 +236,26 @@ fn twofactor_auth( let mut remember = data.two_factor_remember.unwrap_or(0); match TwoFactorType::from_i32(selected_id) { - Some(TwoFactorType::Authenticator) => _tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn)?, + Some(TwoFactorType::Authenticator) => { + _tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn)? + } Some(TwoFactorType::U2f) => _tf::u2f::validate_u2f_login(user_uuid, twofactor_code, conn)?, Some(TwoFactorType::YubiKey) => _tf::yubikey::validate_yubikey_login(twofactor_code, &selected_data?)?, - Some(TwoFactorType::Duo) => _tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)?, - Some(TwoFactorType::Email) => _tf::email::validate_email_code_str(user_uuid, twofactor_code, &selected_data?, conn)?, + Some(TwoFactorType::Duo) => { + _tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)? + } + Some(TwoFactorType::Email) => { + _tf::email::validate_email_code_str(user_uuid, twofactor_code, &selected_data?, conn)? + } Some(TwoFactorType::Remember) => { match device.twofactor_remember { Some(ref code) if !CONFIG.disable_2fa_remember() && ct_eq(code, twofactor_code) => { remember = 1; // Make sure we also return the token here, otherwise it will only remember the first time } - _ => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA Remember token not provided"), + _ => { + err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA Remember token not provided") + } } } _ => err!("Invalid two factor provider"), diff --git a/src/api/mod.rs b/src/api/mod.rs @@ -55,9 +55,9 @@ impl NumberOrString { use std::num::ParseIntError as PIE; match self { NumberOrString::Number(n) => Ok(n), - NumberOrString::String(s) => s - .parse() - .map_err(|e: PIE| crate::Error::new("Can't convert to number", e.to_string())), + NumberOrString::String(s) => { + s.parse().map_err(|e: PIE| crate::Error::new("Can't convert to number", e.to_string())) + } } } } diff --git a/src/api/notifications.rs b/src/api/notifications.rs @@ -4,12 +4,7 @@ use rocket::Route; use rocket_contrib::json::Json; use serde_json::Value as JsonValue; -use crate::{ - api::EmptyResult, - auth::Headers, - db::DbConn, - Error, CONFIG, -}; +use crate::{api::EmptyResult, auth::Headers, db::DbConn, Error, CONFIG}; pub fn routes() -> Vec<Route> { routes![negotiate, websockets_err] @@ -19,12 +14,16 @@ static SHOW_WEBSOCKETS_MSG: AtomicBool = AtomicBool::new(true); #[get("/hub")] fn websockets_err() -> EmptyResult { - if CONFIG.websocket_enabled() && SHOW_WEBSOCKETS_MSG.compare_exchange(true, false, Ordering::Relaxed, Ordering::Relaxed).is_ok() { - err!(" + if CONFIG.websocket_enabled() + && SHOW_WEBSOCKETS_MSG.compare_exchange(true, false, Ordering::Relaxed, Ordering::Relaxed).is_ok() + { + err!( + " ########################################################### '/notifications/hub' should be proxied to the websocket server or notifications won't work. Go to the Wiki for more info, or disable WebSockets setting WEBSOCKET_ENABLED=false. - ###########################################################################################\n") + ###########################################################################################\n" + ) } else { Err(Error::empty()) } @@ -204,9 +203,7 @@ impl Handler for WsHandler { let handler_insert = self.out.clone(); let handler_update = self.out.clone(); - self.users - .map - .upsert(user_uuid, || vec![handler_insert], |ref mut v| v.push(handler_update)); + self.users.map.upsert(user_uuid, || vec![handler_insert], |ref mut v| v.push(handler_update)); // Schedule a ping to keep the connection alive self.out.timeout(PING_MS, PING) @@ -216,7 +213,11 @@ impl Handler for WsHandler { if let Message::Text(text) = msg.clone() { let json = &text[..text.len() - 1]; // Remove last char - if let Ok(InitialMessage { protocol, version }) = from_str::<InitialMessage>(json) { + if let Ok(InitialMessage { + protocol, + version, + }) = from_str::<InitialMessage>(json) + { if &protocol == "messagepack" && version == 1 { return self.out.send(&INITIAL_RESPONSE[..]); // Respond to initial message } @@ -295,10 +296,7 @@ impl WebSocketUsers { // NOTE: The last modified date needs to be updated before calling these methods pub fn send_user_update(&self, ut: UpdateType, user: &User) { let data = create_update( - vec![ - ("UserId".into(), user.uuid.clone().into()), - ("Date".into(), serialize_date(user.updated_at)), - ], + vec![("UserId".into(), user.uuid.clone().into()), ("Date".into(), serialize_date(user.updated_at))], ut, ); diff --git a/src/api/web.rs b/src/api/web.rs @@ -83,11 +83,15 @@ fn static_files(filename: String) -> Result<Content<&'static [u8]>, Error> { "hibp.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/hibp.png"))), "bootstrap.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/bootstrap.css"))), - "bootstrap-native.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap-native.js"))), + "bootstrap-native.js" => { + Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap-native.js"))) + } "identicon.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/identicon.js"))), "datatables.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/datatables.js"))), "datatables.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/datatables.css"))), - "jquery-3.5.1.slim.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/jquery-3.5.1.slim.js"))), + "jquery-3.5.1.slim.js" => { + Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/jquery-3.5.1.slim.js"))) + } _ => err!(format!("Static file not found: {}", filename)), } } diff --git a/src/auth.rs b/src/auth.rs @@ -223,10 +223,9 @@ use crate::db::{ }; pub struct Host { - pub host: String + pub host: String, } - impl<'a, 'r> FromRequest<'a, 'r> for Host { type Error = &'static str; @@ -261,7 +260,9 @@ impl<'a, 'r> FromRequest<'a, 'r> for Host { format!("{}://{}", protocol, host) }; - Outcome::Success(Host { host }) + Outcome::Success(Host { + host, + }) } } @@ -317,10 +318,8 @@ impl<'a, 'r> FromRequest<'a, 'r> for Headers { }; if user.security_stamp != claims.sstamp { - if let Some(stamp_exception) = user - .stamp_exception - .as_deref() - .and_then(|s| serde_json::from_str::<UserStampException>(s).ok()) + if let Some(stamp_exception) = + user.stamp_exception.as_deref().and_then(|s| serde_json::from_str::<UserStampException>(s).ok()) { let current_route = match request.route().and_then(|r| r.name) { Some(name) => name, @@ -338,7 +337,11 @@ impl<'a, 'r> FromRequest<'a, 'r> for Headers { } } - Outcome::Success(Headers { host, device, user }) + Outcome::Success(Headers { + host, + device, + user, + }) } } @@ -506,7 +509,11 @@ impl<'a, 'r> FromRequest<'a, 'r> for ManagerHeaders { }; if !headers.org_user.has_full_access() { - match CollectionUser::find_by_collection_and_user(&col_id, &headers.org_user.user_uuid, &conn) { + match CollectionUser::find_by_collection_and_user( + &col_id, + &headers.org_user.user_uuid, + &conn, + ) { Some(_) => (), None => err_handler!("The current user isn't a manager for this collection"), } @@ -636,10 +643,10 @@ impl<'a, 'r> FromRequest<'a, 'r> for ClientIp { None }; - let ip = ip - .or_else(|| req.remote().map(|r| r.ip())) - .unwrap_or_else(|| "0.0.0.0".parse().unwrap()); + let ip = ip.or_else(|| req.remote().map(|r| r.ip())).unwrap_or_else(|| "0.0.0.0".parse().unwrap()); - Outcome::Success(ClientIp { ip }) + Outcome::Success(ClientIp { + ip, + }) } } diff --git a/src/config.rs b/src/config.rs @@ -527,10 +527,7 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { let limit = 256; if cfg.database_max_conns < 1 || cfg.database_max_conns > limit { - err!(format!( - "`DATABASE_MAX_CONNS` contains an invalid value. Ensure it is between 1 and {}.", - limit, - )); + err!(format!("`DATABASE_MAX_CONNS` contains an invalid value. Ensure it is between 1 and {}.", limit,)); } let dom = cfg.domain.to_lowercase(); @@ -872,9 +869,7 @@ fn case_helper<'reg, 'rc>( rc: &mut RenderContext<'reg, 'rc>, out: &mut dyn Output, ) -> HelperResult { - let param = h - .param(0) - .ok_or_else(|| RenderError::new("Param not found for helper \"case\""))?; + let param = h.param(0).ok_or_else(|| RenderError::new("Param not found for helper \"case\""))?; let value = param.value().clone(); if h.params().iter().skip(1).any(|x| x.value() == &value) { @@ -891,21 +886,15 @@ fn js_escape_helper<'reg, 'rc>( _rc: &mut RenderContext<'reg, 'rc>, out: &mut dyn Output, ) -> HelperResult { - let param = h - .param(0) - .ok_or_else(|| RenderError::new("Param not found for helper \"js_escape\""))?; + let param = h.param(0).ok_or_else(|| RenderError::new("Param not found for helper \"js_escape\""))?; - let no_quote = h - .param(1) - .is_some(); + let no_quote = h.param(1).is_some(); - let value = param - .value() - .as_str() - .ok_or_else(|| RenderError::new("Param for helper \"js_escape\" is not a String"))?; + let value = + param.value().as_str().ok_or_else(|| RenderError::new("Param for helper \"js_escape\" is not a String"))?; let mut escaped_value = value.replace('\\', "").replace('\'', "\\x22").replace('\"', "\\x27"); - if ! no_quote { + if !no_quote { escaped_value = format!("&quot;{}&quot;", escaped_value); } diff --git a/src/crypto.rs b/src/crypto.rs @@ -47,9 +47,7 @@ pub fn get_random_64() -> Vec<u8> { pub fn get_random(mut array: Vec<u8>) -> Vec<u8> { use ring::rand::{SecureRandom, SystemRandom}; - SystemRandom::new() - .fill(&mut array) - .expect("Error generating random values"); + SystemRandom::new().fill(&mut array).expect("Error generating random values"); array } diff --git a/src/db/mod.rs b/src/db/mod.rs @@ -23,7 +23,6 @@ pub mod __mysql_schema; #[path = "schemas/postgresql/schema.rs"] pub mod __postgresql_schema; - // This is used to generate the main DbConn and DbPool enums, which contain one variant for each database supported macro_rules! generate_connections { ( $( $name:ident: $ty:ty ),+ ) => { @@ -108,7 +107,6 @@ impl DbConnType { } } - #[macro_export] macro_rules! db_run { // Same for all dbs @@ -154,7 +152,6 @@ macro_rules! db_run { }; } - pub trait FromDb { type Output; #[allow(clippy::wrong_self_convention)] @@ -239,7 +236,6 @@ pub fn backup_database(conn: &DbConn) -> Result<(), Error> { Ok(()) } - /// Get the SQL Server version pub fn get_sql_server_version(conn: &DbConn) -> String { db_run! {@raw conn: @@ -292,8 +288,7 @@ mod sqlite_migrations { use diesel::{Connection, RunQueryDsl}; // Make sure the database is up to date (create if it doesn't exist, or run the migrations) - let connection = - diesel::sqlite::SqliteConnection::establish(&crate::CONFIG.database_url())?; + let connection = diesel::sqlite::SqliteConnection::establish(&crate::CONFIG.database_url())?; // Disable Foreign Key Checks during migration // Scoped to a connection. @@ -303,9 +298,7 @@ mod sqlite_migrations { // Turn on WAL in SQLite if crate::CONFIG.enable_db_wal() { - diesel::sql_query("PRAGMA journal_mode=wal") - .execute(&connection) - .expect("Failed to turn on WAL"); + diesel::sql_query("PRAGMA journal_mode=wal").execute(&connection).expect("Failed to turn on WAL"); } embedded_migrations::run_with_output(&connection, &mut std::io::stdout())?; @@ -321,8 +314,7 @@ mod mysql_migrations { pub fn run_migrations() -> Result<(), super::Error> { use diesel::{Connection, RunQueryDsl}; // Make sure the database is up to date (create if it doesn't exist, or run the migrations) - let connection = - diesel::mysql::MysqlConnection::establish(&crate::CONFIG.database_url())?; + let connection = diesel::mysql::MysqlConnection::establish(&crate::CONFIG.database_url())?; // Disable Foreign Key Checks during migration // Scoped to a connection/session. @@ -343,8 +335,7 @@ mod postgresql_migrations { pub fn run_migrations() -> Result<(), super::Error> { use diesel::{Connection, RunQueryDsl}; // Make sure the database is up to date (create if it doesn't exist, or run the migrations) - let connection = - diesel::pg::PgConnection::establish(&crate::CONFIG.database_url())?; + let connection = diesel::pg::PgConnection::establish(&crate::CONFIG.database_url())?; // Disable Foreign Key Checks during migration // FIXME: Per https://www.postgresql.org/docs/12/sql-set-constraints.html, diff --git a/src/db/models/attachment.rs b/src/db/models/attachment.rs @@ -59,7 +59,6 @@ use crate::error::MapResult; /// Database methods impl Attachment { - pub fn save(&self, conn: &DbConn) -> EmptyResult { db_run! { conn: sqlite, mysql { diff --git a/src/db/models/cipher.rs b/src/db/models/cipher.rs @@ -4,14 +4,7 @@ use serde_json::Value; use crate::CONFIG; use super::{ - Attachment, - CollectionCipher, - Favorite, - FolderCipher, - Organization, - User, - UserOrgStatus, - UserOrgType, + Attachment, CollectionCipher, Favorite, FolderCipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization, }; @@ -93,16 +86,16 @@ impl Cipher { }; let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); - let password_history_json = self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); + let password_history_json = + self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); - let (read_only, hide_passwords) = - match self.get_access_restrictions(&user_uuid, conn) { - Some((ro, hp)) => (ro, hp), - None => { - error!("Cipher ownership assertion failure"); - (true, true) - }, - }; + let (read_only, hide_passwords) = match self.get_access_restrictions(&user_uuid, conn) { + Some((ro, hp)) => (ro, hp), + None => { + error!("Cipher ownership assertion failure"); + (true, true) + } + }; // Get the type_data or a default to an empty json object '{}'. // If not passing an empty object, mobile clients will crash. @@ -197,12 +190,10 @@ impl Cipher { None => { // Belongs to Organization, need to update affected users if let Some(ref org_uuid) = self.organization_uuid { - UserOrganization::find_by_cipher_and_org(&self.uuid, &org_uuid, conn) - .iter() - .for_each(|user_org| { - User::update_uuid_revision(&user_org.user_uuid, conn); - user_uuids.push(user_org.user_uuid.clone()) - }); + UserOrganization::find_by_cipher_and_org(&self.uuid, &org_uuid, conn).iter().for_each(|user_org| { + User::update_uuid_revision(&user_org.user_uuid, conn); + user_uuids.push(user_org.user_uuid.clone()) + }); } } }; diff --git a/src/db/models/collection.rs b/src/db/models/collection.rs @@ -1,6 +1,6 @@ use serde_json::Value; -use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization, User, Cipher}; +use super::{Cipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization}; db_object! { #[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] @@ -127,11 +127,9 @@ impl Collection { } pub fn update_users_revision(&self, conn: &DbConn) { - UserOrganization::find_by_collection_and_org(&self.uuid, &self.org_uuid, conn) - .iter() - .for_each(|user_org| { - User::update_uuid_revision(&user_org.user_uuid, conn); - }); + UserOrganization::find_by_collection_and_org(&self.uuid, &self.org_uuid, conn).iter().for_each(|user_org| { + User::update_uuid_revision(&user_org.user_uuid, conn); + }); } pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { @@ -170,10 +168,7 @@ impl Collection { } pub fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &DbConn) -> Vec<Self> { - Self::find_by_user_uuid(user_uuid, conn) - .into_iter() - .filter(|c| c.org_uuid == org_uuid) - .collect() + Self::find_by_user_uuid(user_uuid, conn).into_iter().filter(|c| c.org_uuid == org_uuid).collect() } pub fn find_by_organization(org_uuid: &str, conn: &DbConn) -> Vec<Self> { @@ -284,7 +279,13 @@ impl CollectionUser { }} } - pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, hide_passwords: bool, conn: &DbConn) -> EmptyResult { + pub fn save( + user_uuid: &str, + collection_uuid: &str, + read_only: bool, + hide_passwords: bool, + conn: &DbConn, + ) -> EmptyResult { User::update_uuid_revision(&user_uuid, conn); db_run! { conn: @@ -374,11 +375,9 @@ impl CollectionUser { } pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult { - CollectionUser::find_by_collection(&collection_uuid, conn) - .iter() - .for_each(|collection| { - User::update_uuid_revision(&collection.user_uuid, conn); - }); + CollectionUser::find_by_collection(&collection_uuid, conn).iter().for_each(|collection| { + User::update_uuid_revision(&collection.user_uuid, conn); + }); db_run! { conn: { diesel::delete(users_collections::table.filter(users_collections::collection_uuid.eq(collection_uuid))) diff --git a/src/db/models/favorite.rs b/src/db/models/favorite.rs @@ -20,7 +20,7 @@ use crate::error::MapResult; impl Favorite { // Returns whether the specified cipher is a favorite of the specified user. pub fn is_favorite(cipher_uuid: &str, user_uuid: &str, conn: &DbConn) -> bool { - db_run!{ conn: { + db_run! { conn: { let query = favorites::table .filter(favorites::cipher_uuid.eq(cipher_uuid)) .filter(favorites::user_uuid.eq(user_uuid)) @@ -36,19 +36,19 @@ impl Favorite { match (old, new) { (false, true) => { User::update_uuid_revision(user_uuid, &conn); - db_run!{ conn: { - diesel::insert_into(favorites::table) - .values(( - favorites::user_uuid.eq(user_uuid), - favorites::cipher_uuid.eq(cipher_uuid), - )) - .execute(conn) - .map_res("Error adding favorite") - }} + db_run! { conn: { + diesel::insert_into(favorites::table) + .values(( + favorites::user_uuid.eq(user_uuid), + favorites::cipher_uuid.eq(cipher_uuid), + )) + .execute(conn) + .map_res("Error adding favorite") + }} } (true, false) => { User::update_uuid_revision(user_uuid, &conn); - db_run!{ conn: { + db_run! { conn: { diesel::delete( favorites::table .filter(favorites::user_uuid.eq(user_uuid)) @@ -59,7 +59,7 @@ impl Favorite { }} } // Otherwise, the favorite status is already what it should be. - _ => Ok(()) + _ => Ok(()), } } diff --git a/src/db/models/folder.rs b/src/db/models/folder.rs @@ -109,7 +109,6 @@ impl Folder { User::update_uuid_revision(&self.user_uuid, conn); FolderCipher::delete_all_by_folder(&self.uuid, &conn)?; - db_run! { conn: { diesel::delete(folders::table.filter(folders::uuid.eq(&self.uuid))) .execute(conn) diff --git a/src/db/models/mod.rs b/src/db/models/mod.rs @@ -6,9 +6,9 @@ mod favorite; mod folder; mod org_policy; mod organization; +mod send; mod two_factor; mod user; -mod send; pub use self::attachment::Attachment; pub use self::cipher::Cipher; @@ -18,6 +18,6 @@ pub use self::favorite::Favorite; pub use self::folder::{Folder, FolderCipher}; pub use self::org_policy::{OrgPolicy, OrgPolicyType}; pub use self::organization::{Organization, UserOrgStatus, UserOrgType, UserOrganization}; +pub use self::send::{Send, SendType}; pub use self::two_factor::{TwoFactor, TwoFactorType}; pub use self::user::{Invitation, User, UserStampException}; -pub use self::send::{Send, SendType}; -\ No newline at end of file diff --git a/src/db/models/org_policy.rs b/src/db/models/org_policy.rs @@ -4,7 +4,7 @@ use crate::api::EmptyResult; use crate::db::DbConn; use crate::error::MapResult; -use super::{Organization, UserOrganization, UserOrgStatus, UserOrgType}; +use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization}; db_object! { #[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] @@ -20,9 +20,7 @@ db_object! { } } -#[derive(Copy, Clone)] -#[derive(num_derive::FromPrimitive)] -#[derive(PartialEq)] +#[derive(Copy, Clone, PartialEq, num_derive::FromPrimitive)] pub enum OrgPolicyType { TwoFactorAuthentication = 0, MasterPassword = 1, @@ -176,7 +174,8 @@ impl OrgPolicy { /// and the user is not an owner or admin of that org. This is only useful for checking /// applicability of policy types that have these particular semantics. pub fn is_applicable_to_user(user_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> bool { - for policy in OrgPolicy::find_by_user(user_uuid, conn) { // Returns confirmed users only. + // Returns confirmed users only. + for policy in OrgPolicy::find_by_user(user_uuid, conn) { if policy.enabled && policy.has_type(policy_type) { let org_uuid = &policy.org_uuid; if let Some(user) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn) { diff --git a/src/db/models/organization.rs b/src/db/models/organization.rs @@ -1,8 +1,8 @@ +use num_traits::FromPrimitive; use serde_json::Value; use std::cmp::Ordering; -use num_traits::FromPrimitive; -use super::{CollectionUser, User, OrgPolicy, OrgPolicyType}; +use super::{CollectionUser, OrgPolicy, OrgPolicyType, User}; db_object! { #[derive(Identifiable, Queryable, Insertable, AsChangeset)] @@ -35,8 +35,7 @@ pub enum UserOrgStatus { Confirmed = 2, } -#[derive(Copy, Clone, PartialEq, Eq)] -#[derive(num_derive::FromPrimitive)] +#[derive(Copy, Clone, PartialEq, Eq, num_derive::FromPrimitive)] pub enum UserOrgType { Owner = 0, Admin = 1, @@ -190,11 +189,9 @@ use crate::error::MapResult; /// Database methods impl Organization { pub fn save(&self, conn: &DbConn) -> EmptyResult { - UserOrganization::find_by_org(&self.uuid, conn) - .iter() - .for_each(|user_org| { - User::update_uuid_revision(&user_org.user_uuid, conn); - }); + UserOrganization::find_by_org(&self.uuid, conn).iter().for_each(|user_org| { + User::update_uuid_revision(&user_org.user_uuid, conn); + }); db_run! { conn: sqlite, mysql { @@ -236,7 +233,6 @@ impl Organization { UserOrganization::delete_all_by_organization(&self.uuid, &conn)?; OrgPolicy::delete_all_by_organization(&self.uuid, &conn)?; - db_run! { conn: { diesel::delete(organizations::table.filter(organizations::uuid.eq(self.uuid))) .execute(conn) @@ -347,11 +343,13 @@ impl UserOrganization { let collections = CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn); collections .iter() - .map(|c| json!({ - "Id": c.collection_uuid, - "ReadOnly": c.read_only, - "HidePasswords": c.hide_passwords, - })) + .map(|c| { + json!({ + "Id": c.collection_uuid, + "ReadOnly": c.read_only, + "HidePasswords": c.hide_passwords, + }) + }) .collect() }; @@ -446,8 +444,7 @@ impl UserOrganization { } pub fn has_full_access(&self) -> bool { - (self.access_all || self.atype >= UserOrgType::Admin) && - self.has_status(UserOrgStatus::Confirmed) + (self.access_all || self.atype >= UserOrgType::Admin) && self.has_status(UserOrgStatus::Confirmed) } pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { diff --git a/src/db/models/user.rs b/src/db/models/user.rs @@ -63,8 +63,8 @@ enum UserStatus { #[derive(Serialize, Deserialize)] pub struct UserStampException { - pub route: String, - pub security_stamp: String + pub route: String, + pub security_stamp: String, } /// Local methods @@ -162,7 +162,7 @@ impl User { pub fn set_stamp_exception(&mut self, route_exception: &str) { let stamp_exception = UserStampException { route: route_exception.to_string(), - security_stamp: self.security_stamp.to_string() + security_stamp: self.security_stamp.to_string(), }; self.stamp_exception = Some(serde_json::to_string(&stamp_exception).unwrap_or_default()); } @@ -341,14 +341,16 @@ impl User { pub fn last_active(&self, conn: &DbConn) -> Option<NaiveDateTime> { match Device::find_latest_active_by_user(&self.uuid, conn) { Some(device) => Some(device.updated_at), - None => None + None => None, } } } impl Invitation { pub const fn new(email: String) -> Self { - Self { email } + Self { + email, + } } pub fn save(&self, conn: &DbConn) -> EmptyResult { diff --git a/src/error.rs b/src/error.rs @@ -33,10 +33,10 @@ macro_rules! make_error { }; } +use diesel::r2d2::PoolError as R2d2Err; use diesel::result::Error as DieselErr; use diesel::ConnectionError as DieselConErr; use diesel_migrations::RunMigrationsError as DieselMigErr; -use diesel::r2d2::PoolError as R2d2Err; use handlebars::RenderError as HbErr; use jsonwebtoken::errors::Error as JwtErr; use regex::Error as RegexErr; @@ -191,18 +191,14 @@ use rocket::response::{self, Responder, Response}; impl<'r> Responder<'r> for Error { fn respond_to(self, _: &Request) -> response::Result<'r> { match self.error { - ErrorKind::EmptyError(_) => {} // Don't print the error in this situation + ErrorKind::EmptyError(_) => {} // Don't print the error in this situation ErrorKind::SimpleError(_) => {} // Don't print the error in this situation _ => error!(target: "error", "{:#?}", self), }; let code = Status::from_code(self.error_code).unwrap_or(Status::BadRequest); - Response::build() - .status(code) - .header(ContentType::JSON) - .sized_body(Cursor::new(format!("{}", self))) - .ok() + Response::build().status(code).header(ContentType::JSON).sized_body(Cursor::new(format!("{}", self))).ok() } } diff --git a/src/mail.rs b/src/mail.rs @@ -1,4 +1,4 @@ -use std::{str::FromStr}; +use std::str::FromStr; use chrono::{DateTime, Local}; use percent_encoding::{percent_encode, NON_ALPHANUMERIC}; @@ -62,11 +62,13 @@ fn mailer() -> SmtpTransport { let mut selected_mechanisms = vec![]; for wanted_mechanism in mechanism.split(',') { for m in &allowed_mechanisms { - if m.to_string().to_lowercase() == wanted_mechanism.trim_matches(|c| c == '"' || c == '\'' || c == ' ').to_lowercase() { + if m.to_string().to_lowercase() + == wanted_mechanism.trim_matches(|c| c == '"' || c == '\'' || c == ' ').to_lowercase() + { selected_mechanisms.push(*m); } } - }; + } if !selected_mechanisms.is_empty() { smtp_client.authentication(selected_mechanisms) @@ -332,31 +334,23 @@ fn send_email(address: &str, subject: &str, body_html: String, body_text: String let smtp_from = &CONFIG.smtp_from(); let email = Message::builder() - .message_id(Some(format!("<{}@{}>", crate::util::get_uuid(), smtp_from.split('@').collect::<Vec<&str>>()[1] ))) + .message_id(Some(format!("<{}@{}>", crate::util::get_uuid(), smtp_from.split('@').collect::<Vec<&str>>()[1]))) .to(Mailbox::new(None, Address::from_str(&address)?)) - .from(Mailbox::new( - Some(CONFIG.smtp_from_name()), - Address::from_str(smtp_from)?, - )) + .from(Mailbox::new(Some(CONFIG.smtp_from_name()), Address::from_str(smtp_from)?)) .subject(subject) - .multipart( - MultiPart::alternative() - .singlepart(text) - .singlepart(html) - )?; + .multipart(MultiPart::alternative().singlepart(text).singlepart(html))?; match mailer().send(&email) { Ok(_) => Ok(()), // Match some common errors and make them more user friendly Err(e) => { - if e.is_client() { - err!(format!("SMTP Client error: {}", e)); + err!(format!("SMTP Client error: {}", e)); } else if e.is_transient() { err!(format!("SMTP 4xx error: {:?}", e)); } else if e.is_permanent() { err!(format!("SMTP 5xx error: {:?}", e)); - } else if e.is_timeout() { + } else if e.is_timeout() { err!(format!("SMTP timeout error: {:?}", e)); } else { Err(e.into()) diff --git a/src/main.rs b/src/main.rs @@ -16,7 +16,7 @@ extern crate diesel; #[macro_use] extern crate diesel_migrations; -use job_scheduler::{JobScheduler, Job}; +use job_scheduler::{Job, JobScheduler}; use std::{ fs::create_dir_all, panic, @@ -127,7 +127,9 @@ fn init_logging(level: log::LevelFilter) -> Result<(), fern::InitError> { // Enable smtp debug logging only specifically for smtp when need. // This can contain sensitive information we do not want in the default debug/trace logging. if CONFIG.smtp_debug() { - println!("[WARNING] SMTP Debugging is enabled (SMTP_DEBUG=true). Sensitive information could be disclosed via logs!"); + println!( + "[WARNING] SMTP Debugging is enabled (SMTP_DEBUG=true). Sensitive information could be disclosed via logs!" + ); println!("[WARNING] Only enable SMTP_DEBUG during troubleshooting!\n"); logger = logger.level_for("lettre::transport::smtp", log::LevelFilter::Debug) } else { @@ -298,7 +300,10 @@ fn check_web_vault() { let index_path = Path::new(&CONFIG.web_vault_folder()).join("index.html"); if !index_path.exists() { - error!("Web vault is not found at '{}'. To install it, please follow the steps in: ", CONFIG.web_vault_folder()); + error!( + "Web vault is not found at '{}'. To install it, please follow the steps in: ", + CONFIG.web_vault_folder() + ); error!("https://github.com/dani-garcia/bitwarden_rs/wiki/Building-binary#install-the-web-vault"); error!("You can also set the environment variable 'WEB_VAULT_ENABLED=false' to disable it"); exit(1); @@ -344,31 +349,34 @@ fn schedule_jobs(pool: db::DbPool) { info!("Job scheduler disabled."); return; } - thread::Builder::new().name("job-scheduler".to_string()).spawn(move || { - let mut sched = JobScheduler::new(); - - // Purge sends that are past their deletion date. - if !CONFIG.send_purge_schedule().is_empty() { - sched.add(Job::new(CONFIG.send_purge_schedule().parse().unwrap(), || { - api::purge_sends(pool.clone()); - })); - } + thread::Builder::new() + .name("job-scheduler".to_string()) + .spawn(move || { + let mut sched = JobScheduler::new(); + + // Purge sends that are past their deletion date. + if !CONFIG.send_purge_schedule().is_empty() { + sched.add(Job::new(CONFIG.send_purge_schedule().parse().unwrap(), || { + api::purge_sends(pool.clone()); + })); + } - // Purge trashed items that are old enough to be auto-deleted. - if !CONFIG.trash_purge_schedule().is_empty() { - sched.add(Job::new(CONFIG.trash_purge_schedule().parse().unwrap(), || { - api::purge_trashed_ciphers(pool.clone()); - })); - } + // Purge trashed items that are old enough to be auto-deleted. + if !CONFIG.trash_purge_schedule().is_empty() { + sched.add(Job::new(CONFIG.trash_purge_schedule().parse().unwrap(), || { + api::purge_trashed_ciphers(pool.clone()); + })); + } - // Periodically check for jobs to run. We probably won't need any - // jobs that run more often than once a minute, so a default poll - // interval of 30 seconds should be sufficient. Users who want to - // schedule jobs to run more frequently for some reason can reduce - // the poll interval accordingly. - loop { - sched.tick(); - thread::sleep(Duration::from_millis(CONFIG.job_poll_interval_ms())); - } - }).expect("Error spawning job scheduler thread"); + // Periodically check for jobs to run. We probably won't need any + // jobs that run more often than once a minute, so a default poll + // interval of 30 seconds should be sufficient. Users who want to + // schedule jobs to run more frequently for some reason can reduce + // the poll interval accordingly. + loop { + sched.tick(); + thread::sleep(Duration::from_millis(CONFIG.job_poll_interval_ms())); + } + }) + .expect("Error spawning job scheduler thread"); } diff --git a/src/static/templates/admin/settings.hbs b/src/static/templates/admin/settings.hbs @@ -116,7 +116,11 @@ data-target="#g_database">Backup Database</button></div> <div id="g_database" class="card-body collapse" data-parent="#config-form"> <div class="small mb-3"> - NOTE: A local installation of sqlite3 is required for this section to work. + WARNING: This function only creates a backup copy of the SQLite database. + This does not include any configuration or file attachment data that may + also be needed to fully restore a bitwarden_rs instance. For details on + how to perform complete backups, refer to the wiki page on + <a href="https://github.com/dani-garcia/bitwarden_rs/wiki/Backing-up-your-vault">backups</a>. </div> <button type="button" class="btn btn-primary" onclick="backupDatabase();">Backup Database</button> </div> diff --git a/src/util.rs b/src/util.rs @@ -28,7 +28,10 @@ impl Fairing for AppHeaders { res.set_raw_header("X-Frame-Options", "SAMEORIGIN"); res.set_raw_header("X-Content-Type-Options", "nosniff"); res.set_raw_header("X-XSS-Protection", "1; mode=block"); - let csp = format!("frame-ancestors 'self' chrome-extension://nngceckbapebfimnlniiiahkandclblb moz-extension://* {};", CONFIG.allowed_iframe_ancestors()); + let csp = format!( + "frame-ancestors 'self' chrome-extension://nngceckbapebfimnlniiiahkandclblb moz-extension://* {};", + CONFIG.allowed_iframe_ancestors() + ); res.set_raw_header("Content-Security-Policy", csp); // Disable cache unless otherwise specified @@ -124,14 +127,8 @@ impl<'r, R: Responder<'r>> Responder<'r> for Cached<R> { // Log all the routes from the main paths list, and the attachments endpoint // Effectively ignores, any static file route, and the alive endpoint -const LOGGED_ROUTES: [&str; 6] = [ - "/api", - "/admin", - "/identity", - "/icons", - "/notifications/hub/negotiate", - "/attachments", -]; +const LOGGED_ROUTES: [&str; 6] = + ["/api", "/admin", "/identity", "/icons", "/notifications/hub/negotiate", "/attachments"]; // Boolean is extra debug, when true, we ignore the whitelist above and also print the mounts pub struct BetterLogging(pub bool); @@ -158,7 +155,11 @@ impl Fairing for BetterLogging { } let config = rocket.config(); - let scheme = if config.tls_enabled() { "https" } else { "http" }; + let scheme = if config.tls_enabled() { + "https" + } else { + "http" + }; let addr = format!("{}://{}:{}", &scheme, &config.address, &config.port); info!(target: "start", "Rocket has launched from {}", addr); } @@ -293,8 +294,7 @@ where use std::env; -pub fn get_env_str_value(key: &str) -> Option<String> -{ +pub fn get_env_str_value(key: &str) -> Option<String> { let key_file = format!("{}_FILE", key); let value_from_env = env::var(key); let value_file = env::var(&key_file); @@ -304,9 +304,9 @@ pub fn get_env_str_value(key: &str) -> Option<String> (Ok(v_env), Err(_)) => Some(v_env), (Err(_), Ok(v_file)) => match fs::read_to_string(v_file) { Ok(content) => Some(content.trim().to_string()), - Err(e) => panic!("Failed to load {}: {:?}", key, e) + Err(e) => panic!("Failed to load {}: {:?}", key, e), }, - _ => None + _ => None, } } @@ -478,7 +478,6 @@ pub fn retry<F, T, E>(func: F, max_tries: u32) -> Result<T, E> where F: Fn() -> Result<T, E>, { - use std::{thread::sleep, time::Duration}; let mut tries = 0; loop { @@ -497,12 +496,13 @@ where } } +use std::{thread::sleep, time::Duration}; + pub fn retry_db<F, T, E>(func: F, max_tries: u32) -> Result<T, E> where F: Fn() -> Result<T, E>, E: std::error::Error, { - use std::{thread::sleep, time::Duration}; let mut tries = 0; loop { @@ -522,3 +522,18 @@ where } } } + +use reqwest::{ + blocking::{Client, ClientBuilder}, + header, +}; + +pub fn get_reqwest_client() -> Client { + get_reqwest_client_builder().build().expect("Failed to build client") +} + +pub fn get_reqwest_client_builder() -> ClientBuilder { + let mut headers = header::HeaderMap::new(); + headers.insert(header::USER_AGENT, header::HeaderValue::from_static("Bitwarden_RS")); + Client::builder().default_headers(headers).timeout(Duration::from_secs(10)) +}