diff --git a/src/coreclr/jit/lower.cpp b/src/coreclr/jit/lower.cpp index de2cd661d7f25..2e219ae329891 100644 --- a/src/coreclr/jit/lower.cpp +++ b/src/coreclr/jit/lower.cpp @@ -6247,6 +6247,22 @@ GenTree* Lowering::LowerAdd(GenTreeOp* node) return next; } + // Fold ADD(CNS1, CNS2). We mainly target a very specific pattern - byref ADD(frozen_handle, cns_offset) + // We could do this folding earlier, but that is not trivial as we'll have to introduce a way to restore + // the original object from a byref constant for optimizations. + if (comp->opts.OptimizationEnabled() && op1->IsCnsIntOrI() && op2->IsCnsIntOrI() && !node->gtOverflow() && + (op1->IsIconHandle(GTF_ICON_OBJ_HDL) || op2->IsIconHandle(GTF_ICON_OBJ_HDL)) && + !op1->AsIntCon()->ImmedValNeedsReloc(comp) && !op2->AsIntCon()->ImmedValNeedsReloc(comp)) + { + assert(node->TypeIs(TYP_I_IMPL, TYP_BYREF)); + + // TODO-CQ: we should allow this for AOT too. For that we need to guarantee that the new constant + // will be lowered as the original handle with offset in a reloc. + BlockRange().Remove(op1); + BlockRange().Remove(op2); + node->BashToConst(op1->AsIntCon()->IconValue() + op2->AsIntCon()->IconValue(), node->TypeGet()); + } + #ifdef TARGET_XARCH if (BlockRange().TryGetUse(node, &use)) {