diff --git a/src/HHbbVV/processors/TTScaleFactorsSkimmer.py b/src/HHbbVV/processors/TTScaleFactorsSkimmer.py index 6900f3db..4e3abf78 100644 --- a/src/HHbbVV/processors/TTScaleFactorsSkimmer.py +++ b/src/HHbbVV/processors/TTScaleFactorsSkimmer.py @@ -421,6 +421,8 @@ def process(self, events: ak.Array): # Lund Plane SFs ######################### + lp_hist = None + if dataset in ["SingleTop", "TTToSemiLeptonic", "TTToSemiLeptonic_ext1"]: match_dict, gen_quarks, had_bs = ttbar_scale_factor_matching( events, leading_fatjets[:, 0], selection_args @@ -431,13 +433,14 @@ def process(self, events: ak.Array): skimmed_events = {**skimmed_events, **match_dict} if np.any(top_matched): - sf_dict = get_lund_SFs( + sf_dict, lp_hist = get_lund_SFs( year, events[top_matched], fatjets[top_matched], fatjet_idx[top_matched].to_numpy(), num_prongs, gen_quarks[top_matched], + weights_dict["weight"][top_matched], trunc_gauss=True, lnN=True, gen_bs=had_bs[top_matched], # do b/l ratio uncertainty for tops as well @@ -490,7 +493,7 @@ def process(self, events: ak.Array): ) self.dump_table(pddf, fname) - return {year: {dataset: {"totals": totals_dict, "cutflow": cutflow}}} + return {year: {dataset: {"totals": totals_dict, "cutflow": cutflow, "lp_hist": lp_hist}}} def postprocess(self, accumulator): return accumulator diff --git a/src/HHbbVV/processors/corrections.py b/src/HHbbVV/processors/corrections.py index 9f196c52..c64bdca0 100644 --- a/src/HHbbVV/processors/corrections.py +++ b/src/HHbbVV/processors/corrections.py @@ -391,7 +391,7 @@ def _get_lepton_clipped(lep_pt, lep_eta, lepton_type, corr=None): # Used only for validation region right now def add_lepton_weights(weights: Weights, year: str, lepton: MuonArray, lepton_type: str = "muon"): - ul_year = get_UL_year(year) + # ul_year = get_UL_year(year) cset = correctionlib.CorrectionSet.from_file(get_pog_json(lepton_type, year)) @@ -405,9 +405,9 @@ def add_lepton_weights(weights: Weights, year: str, lepton: MuonArray, lepton_ty lepton_pt, lepton_eta = _get_lepton_clipped(lep_pt, lep_eta, lepton_type, corr) values = {} - values["nominal"] = cset[json_map_name].evaluate(ul_year, lepton_eta, lepton_pt, "sf") - values["up"] = cset[json_map_name].evaluate(ul_year, lepton_eta, lepton_pt, "systup") - values["down"] = cset[json_map_name].evaluate(ul_year, lepton_eta, lepton_pt, "systdown") + values["nominal"] = cset[json_map_name].evaluate(lepton_eta, lepton_pt, "nominal") + values["up"] = cset[json_map_name].evaluate(lepton_eta, lepton_pt, "systup") + values["down"] = cset[json_map_name].evaluate(lepton_eta, lepton_pt, "systdown") # add weights (for now only the nominal weight) weights.add(f"{lepton_type}_{corr}", values["nominal"], values["up"], values["down"])