commit 00855ee31dcca786809e99e3f7e99a23ec8d54e7
parent c18a273b4ad49cfba0bf932e6748c0e16a2ad06f
Author: BlackDex <black.dex@gmail.com>
Date: Sun, 1 Jan 2023 15:09:10 +0100
Fix failing large note imports
When importing to Vaultwarden (or Bitwarden) notes larger then 10_000
encrypted characters are invalid. This because it for one isn't
compatible with Bitwarden. And some clients tend to break on very large
notes.
We already added a check for this limit when adding a single cipher, but
this caused issues during import, and could cause a partial imported
vault. Bitwarden does some validations before actually running it
through the import process and generates a special error message which
helps the user indicate which items are invalid during the import.
This PR adds that validation check and returns the same kind of error.
Fixes #3048
Diffstat:
4 files changed, 42 insertions(+), 3 deletions(-)
diff --git a/src/api/core/ciphers.rs b/src/api/core/ciphers.rs
@@ -205,7 +205,7 @@ pub struct CipherData {
*/
pub Type: i32,
pub Name: String,
- Notes: Option<String>,
+ pub Notes: Option<String>,
Fields: Option<Value>,
// Only one of these should exist, depending on type
@@ -542,6 +542,12 @@ async fn post_ciphers_import(
let data: ImportData = data.into_inner().data;
+ // Validate the import before continuing
+ // Bitwarden does not process the import if there is one item invalid.
+ // Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
+ // TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
+ Cipher::validate_notes(&data.Ciphers)?;
+
// Read and create the folders
let mut folders: Vec<_> = Vec::new();
for folder in data.Folders.into_iter() {
diff --git a/src/api/core/mod.rs b/src/api/core/mod.rs
@@ -7,7 +7,7 @@ mod organizations;
mod sends;
pub mod two_factor;
-pub use ciphers::{purge_trashed_ciphers, CipherSyncData, CipherSyncType};
+pub use ciphers::{purge_trashed_ciphers, CipherData, CipherSyncData, CipherSyncType};
pub use emergency_access::{emergency_notification_reminder_job, emergency_request_timeout_job};
pub use events::{event_cleanup_job, log_event, log_user_event};
pub use sends::purge_sends;
diff --git a/src/api/core/organizations.rs b/src/api/core/organizations.rs
@@ -1378,6 +1378,12 @@ async fn post_org_import(
let data: ImportData = data.into_inner().data;
let org_id = query.organization_id;
+ // Validate the import before continuing
+ // Bitwarden does not process the import if there is one item invalid.
+ // Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
+ // TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
+ Cipher::validate_notes(&data.Ciphers)?;
+
let mut collections = Vec::new();
for coll in data.Collections {
let collection = Collection::new(org_id.clone(), coll.Name);
diff --git a/src/db/models/cipher.rs b/src/db/models/cipher.rs
@@ -6,7 +6,7 @@ use super::{
Attachment, CollectionCipher, Favorite, FolderCipher, Group, User, UserOrgStatus, UserOrgType, UserOrganization,
};
-use crate::api::core::CipherSyncData;
+use crate::api::core::{CipherData, CipherSyncData};
use std::borrow::Cow;
@@ -73,6 +73,33 @@ impl Cipher {
reprompt: None,
}
}
+
+ pub fn validate_notes(cipher_data: &[CipherData]) -> EmptyResult {
+ let mut validation_errors = serde_json::Map::new();
+ for (index, cipher) in cipher_data.iter().enumerate() {
+ if let Some(note) = &cipher.Notes {
+ if note.len() > 10_000 {
+ validation_errors.insert(
+ format!("Ciphers[{index}].Notes"),
+ serde_json::to_value([
+ "The field Notes exceeds the maximum encrypted value length of 10000 characters.",
+ ])
+ .unwrap(),
+ );
+ }
+ }
+ }
+ if !validation_errors.is_empty() {
+ let err_json = json!({
+ "message": "The model state is invalid.",
+ "validationErrors" : validation_errors,
+ "object": "error"
+ });
+ err_json!(err_json, "Import validation errors")
+ } else {
+ Ok(())
+ }
+ }
}
use crate::db::DbConn;