Skip to content

Commit

Permalink
feat(integer): improve carry propagation algorithm
Browse files Browse the repository at this point in the history
  • Loading branch information
tmontaigu committed Jul 26, 2024
1 parent 2004333 commit 2574083
Show file tree
Hide file tree
Showing 17 changed files with 1,394 additions and 983 deletions.
7 changes: 6 additions & 1 deletion tfhe/src/integer/gpu/server_key/radix/add.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,14 @@ use crate::integer::gpu::{
unchecked_signed_overflowing_add_or_sub_radix_kb_assign_async,
unchecked_sum_ciphertexts_integer_radix_kb_assign_async, PBSType,
};
use crate::integer::server_key::radix_parallel::sub::SignedOperation;
use crate::shortint::ciphertext::NoiseLevel;

#[derive(Copy, Clone, PartialEq, Eq)]
pub(crate) enum SignedOperation {
Addition,
Subtraction,
}

impl CudaServerKey {
/// Computes homomorphically an addition between two ciphertexts encrypting integer values.
///
Expand Down
2 changes: 1 addition & 1 deletion tfhe/src/integer/gpu/server_key/radix/sub.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use super::add::SignedOperation;
use crate::core_crypto::gpu::lwe_ciphertext_list::CudaLweCiphertextList;
use crate::core_crypto::gpu::CudaStreams;
use crate::core_crypto::prelude::{CiphertextModulus, LweBskGroupingFactor, LweCiphertextCount};
Expand All @@ -11,7 +12,6 @@ use crate::integer::gpu::server_key::{CudaBootstrappingKey, CudaServerKey};
use crate::integer::gpu::{
unchecked_unsigned_overflowing_sub_integer_radix_kb_assign_async, PBSType,
};
use crate::integer::server_key::radix_parallel::sub::SignedOperation;
use crate::shortint::ciphertext::NoiseLevel;

impl CudaServerKey {
Expand Down
14 changes: 12 additions & 2 deletions tfhe/src/integer/server_key/radix/add.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::integer::ciphertext::IntegerRadixCiphertext;
use crate::integer::server_key::radix_parallel::sub::SignedOperation;
use crate::integer::server_key::radix_parallel::OutputFlag;
use crate::integer::server_key::CheckError;
use crate::integer::{BooleanBlock, ServerKey, SignedRadixCiphertext};
use crate::shortint::ciphertext::{Degree, MaxDegree, NoiseLevel};
Expand Down Expand Up @@ -267,6 +267,16 @@ impl ServerKey {
lhs: &SignedRadixCiphertext,
rhs: &SignedRadixCiphertext,
) -> (SignedRadixCiphertext, BooleanBlock) {
self.unchecked_signed_overflowing_add_or_sub(lhs, rhs, SignedOperation::Addition)
let mut result = lhs.clone();
let overflowed = self
.advanced_add_assign_with_carry_sequential_parallelized(
&mut result.blocks,
&rhs.blocks,
None,
OutputFlag::from_signedness(true),
)
.expect("overflow flag was requested");

(result, overflowed)
}
}
2 changes: 1 addition & 1 deletion tfhe/src/integer/server_key/radix/scalar_sub.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ impl ServerKey {
// - `None` if scalar is zero
// - `Some` if scalar is non-zero
//
fn create_negated_block_decomposer<Scalar>(
pub(crate) fn create_negated_block_decomposer<Scalar>(
&self,
scalar: Scalar,
) -> Option<impl Iterator<Item = u8>>
Expand Down
128 changes: 13 additions & 115 deletions tfhe/src/integer/server_key/radix/sub.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
use crate::integer::ciphertext::IntegerRadixCiphertext;
use crate::integer::server_key::radix_parallel::sub::SignedOperation;
use crate::integer::server_key::radix_parallel::OutputFlag;
use crate::integer::server_key::CheckError;
use crate::integer::{BooleanBlock, RadixCiphertext, ServerKey, SignedRadixCiphertext};
use crate::shortint::ciphertext::{Degree, MaxDegree, NoiseLevel};
use crate::shortint::Ciphertext;

impl ServerKey {
/// Computes homomorphically a subtraction between two ciphertexts encrypting integer values.
Expand Down Expand Up @@ -420,124 +419,23 @@ impl ServerKey {
(result, overflowed)
}

pub(crate) fn unchecked_signed_overflowing_add_or_sub(
&self,
lhs: &SignedRadixCiphertext,
rhs: &SignedRadixCiphertext,
signed_operation: SignedOperation,
) -> (SignedRadixCiphertext, BooleanBlock) {
let mut result = lhs.clone();

let num_blocks = result.blocks.len();
if num_blocks == 0 {
return (result, self.create_trivial_boolean_block(false));
}

fn block_add_assign_returning_carry(
sks: &ServerKey,
lhs: &mut Ciphertext,
rhs: &Ciphertext,
) -> Ciphertext {
sks.key.unchecked_add_assign(lhs, rhs);
let (carry, message) = rayon::join(
|| sks.key.carry_extract(lhs),
|| sks.key.message_extract(lhs),
);

*lhs = message;

carry
}

// 2_2, 3_3, 4_4
// If we have at least 2 bits and at least as much carries
if self.key.message_modulus.0 >= 4 && self.key.carry_modulus.0 >= self.key.message_modulus.0
{
if signed_operation == SignedOperation::Subtraction {
self.unchecked_sub_assign(&mut result, rhs);
} else {
self.unchecked_add_assign(&mut result, rhs);
}

let mut input_carry = self.key.create_trivial(0);

// For the first block do the first step of overflow computation in parallel
let (_, last_block_inner_propagation) = rayon::join(
|| {
input_carry =
block_add_assign_returning_carry(self, &mut result.blocks[0], &input_carry);
},
|| {
self.generate_last_block_inner_propagation(
&lhs.blocks[num_blocks - 1],
&rhs.blocks[num_blocks - 1],
signed_operation,
)
},
);

for block in result.blocks[1..num_blocks - 1].iter_mut() {
input_carry = block_add_assign_returning_carry(self, block, &input_carry);
}

// Treat the last block separately to handle last step of overflow behavior
let output_carry = block_add_assign_returning_carry(
self,
&mut result.blocks[num_blocks - 1],
&input_carry,
);
let overflowed = self.resolve_signed_overflow(
last_block_inner_propagation,
&BooleanBlock::new_unchecked(input_carry),
&BooleanBlock::new_unchecked(output_carry),
);

return (result, overflowed);
}

// 1_X parameters
//
// Same idea as other algorithms, however since we have 1 bit per block
// we do not have to resolve any inner propagation but it adds one more
// sequential PBS
if self.key.message_modulus.0 == 2 {
if signed_operation == SignedOperation::Subtraction {
self.unchecked_sub_assign(&mut result, rhs);
} else {
self.unchecked_add_assign(&mut result, rhs);
}

let mut input_carry = self.key.create_trivial(0);
for block in result.blocks[..num_blocks - 1].iter_mut() {
input_carry = block_add_assign_returning_carry(self, block, &input_carry);
}

let output_carry = block_add_assign_returning_carry(
self,
&mut result.blocks[num_blocks - 1],
&input_carry,
);

// Encode the rule
// "Overflow occurred if the carry into the last bit is different than the carry out
// of the last bit"
let overflowed = self.key.not_equal(&output_carry, &input_carry);
return (result, BooleanBlock::new_unchecked(overflowed));
}

panic!(
"Invalid combo of message modulus ({}) and carry modulus ({}) \n\
This function requires the message modulus >= 2 and carry modulus >= message_modulus \n\
I.e. PARAM_MESSAGE_X_CARRY_Y where X >= 1 and Y >= X.",
self.key.message_modulus.0, self.key.carry_modulus.0
);
}
pub fn unchecked_signed_overflowing_sub(
&self,
lhs: &SignedRadixCiphertext,
rhs: &SignedRadixCiphertext,
) -> (SignedRadixCiphertext, BooleanBlock) {
self.unchecked_signed_overflowing_add_or_sub(lhs, rhs, SignedOperation::Subtraction)
let flipped_rhs = self.bitnot(rhs);
let carry = self.create_trivial_boolean_block(true);
let mut result = lhs.clone();
let overflowed = self
.advanced_add_assign_with_carry_sequential_parallelized(
&mut result.blocks,
&flipped_rhs.blocks,
Some(&carry),
OutputFlag::from_signedness(true),
)
.expect("overflow flat was requested");
(result, overflowed)
}

pub fn signed_overflowing_sub(
Expand Down
Loading

0 comments on commit 2574083

Please sign in to comment.