From cedf721326193117e8fce6ffec802f65ba733344 Mon Sep 17 00:00:00 2001 From: Santosh Kumar Pottumuthu Date: Sat, 23 Nov 2024 20:52:32 -0500 Subject: [PATCH] Handling large testcases efficiently! Setting a limit for the desired size. --- src/arithmetic.rs | 29 ++++++++++++++++++++++++ tests/integration_tests.rs | 46 ++++++++++++++++++++++++++------------ 2 files changed, 61 insertions(+), 14 deletions(-) diff --git a/src/arithmetic.rs b/src/arithmetic.rs index 52073aa..a7c539c 100644 --- a/src/arithmetic.rs +++ b/src/arithmetic.rs @@ -216,4 +216,33 @@ impl CKKSEncryptor { reduced_result } + pub fn homomorphic_divide_with_constant(&self, cipher: &Polynomial, constant: i64) -> Polynomial { + // Gracefully handle the case when the constant is zero + if constant == 0 { + info!("Division by zero is not allowed. Returning the original polynomial unchanged."); + return cipher.clone(); // Return the original polynomial as is + } + + // Scale the constant to match the ciphertext's scale + let scaling_factor = 10_000_000; // Assuming 1e7 scaling factor + let scaled_constant = constant * scaling_factor; + + // Compute the reciprocal of the scaled constant + let reciprocal = scaling_factor / scaled_constant; // This is effectively 1/constant in scaled form + + // Multiply the ciphertext by the reciprocal + let scaled_reciprocal_poly = Polynomial::new(vec![reciprocal]); + let result = self.homomorphic_multiply(cipher, &scaled_reciprocal_poly); + + // Perform modular reduction to ensure the result fits within the modulus + let reduced_result = mod_reduce(&result, self.params.modulus); + info!( + "Result after homomorphic division with constant and mod reduction: {:?}", + reduced_result + ); + + reduced_result + } + + } diff --git a/tests/integration_tests.rs b/tests/integration_tests.rs index 5c2af11..875d076 100644 --- a/tests/integration_tests.rs +++ b/tests/integration_tests.rs @@ -387,42 +387,60 @@ fn test_noise_resilience() { assert_eq!(decrypted_string, original_string, "Noise resilience test failed"); } +fn validate_string_size(size: usize) -> bool { + const MAX_ALLOWED_SIZE: usize = 10_000_000; // Define a reasonable maximum size + if size > MAX_ALLOWED_SIZE { + println!("Error: String size exceeds the allowed limit of {} bytes.", MAX_ALLOWED_SIZE); + return false; + } + true +} + #[test] fn test_large_string_handling() { - let (encryptor, decryptor) = initialize_ckks(); - // let original_string = "A".repeat(10000000); // String with 10,000,000 'A's - let original_string = "A".repeat(100000); // String with 100,000 'A's + let (encryptor, decryptor) = initialize_ckks(); // Replace with actual initialization logic + + // Use a test string size + let desired_size = 100000; // Intentionally exceeding the limit for testing + + // Step 2: Validate the string size + if !validate_string_size(desired_size) { + println!("Test aborted due to size validation failure."); + return; // Exit the function safely + } + + // Step 3: Create the original string + let original_string = "A".repeat(desired_size); // Encode and encrypt in chunks let chunk_size = 1000; - let chunks: Vec<&str> = original_string - .as_bytes() - .chunks(chunk_size) - .map(std::str::from_utf8) - .collect::, _>>() - .unwrap(); + let chunks: Vec<&[u8]> = original_string.as_bytes().chunks(chunk_size).collect(); let mut encrypted_chunks = vec![]; for chunk in chunks { - let encoded_chunk = encode_string(&chunk); - let encrypted_chunk = encryptor.encrypt_collection(&encoded_chunk); + // Convert &[u8] to &str + let encoded_chunk = encode_string(std::str::from_utf8(chunk).unwrap()); // Replace with actual encoding logic + let encrypted_chunk = encryptor.encrypt_collection(&encoded_chunk); // Encrypt encrypted_chunks.push(encrypted_chunk); } + // Decrypt and reconstruct the string let mut decrypted_chunks = vec![]; for encrypted_chunk in encrypted_chunks { - let decrypted_chunk = decryptor.decrypt(&encrypted_chunk); + let decrypted_chunk = decryptor.decrypt(&encrypted_chunk); // Decrypt let chunk_string: String = decrypted_chunk .iter() - .map(|&val| map_to_nearest_unicode(val)) + .map(|&val| map_to_nearest_unicode(val)) // Replace with actual mapping logic .collect(); decrypted_chunks.push(chunk_string); } let reconstructed_string = decrypted_chunks.concat(); + + // Assert the reconstructed string matches the original assert_eq!( reconstructed_string, original_string, "Large string handling failed" ); -} +} \ No newline at end of file