Skip to content

Commit

Permalink
skimmer running
Browse files Browse the repository at this point in the history
  • Loading branch information
rkansal47 committed Jul 22, 2024
1 parent f761bbe commit a99a8b9
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 6 deletions.
7 changes: 5 additions & 2 deletions src/HHbbVV/processors/TTScaleFactorsSkimmer.py
Original file line number Diff line number Diff line change
Expand Up @@ -421,6 +421,8 @@ def process(self, events: ak.Array):
# Lund Plane SFs
#########################

lp_hist = None

if dataset in ["SingleTop", "TTToSemiLeptonic", "TTToSemiLeptonic_ext1"]:
match_dict, gen_quarks, had_bs = ttbar_scale_factor_matching(
events, leading_fatjets[:, 0], selection_args
Expand All @@ -431,13 +433,14 @@ def process(self, events: ak.Array):
skimmed_events = {**skimmed_events, **match_dict}

if np.any(top_matched):
sf_dict = get_lund_SFs(
sf_dict, lp_hist = get_lund_SFs(
year,
events[top_matched],
fatjets[top_matched],
fatjet_idx[top_matched].to_numpy(),
num_prongs,
gen_quarks[top_matched],
weights_dict["weight"][top_matched],
trunc_gauss=True,
lnN=True,
gen_bs=had_bs[top_matched], # do b/l ratio uncertainty for tops as well
Expand Down Expand Up @@ -490,7 +493,7 @@ def process(self, events: ak.Array):
)
self.dump_table(pddf, fname)

return {year: {dataset: {"totals": totals_dict, "cutflow": cutflow}}}
return {year: {dataset: {"totals": totals_dict, "cutflow": cutflow, "lp_hist": lp_hist}}}

def postprocess(self, accumulator):
return accumulator
Expand Down
8 changes: 4 additions & 4 deletions src/HHbbVV/processors/corrections.py
Original file line number Diff line number Diff line change
Expand Up @@ -391,7 +391,7 @@ def _get_lepton_clipped(lep_pt, lep_eta, lepton_type, corr=None):

# Used only for validation region right now
def add_lepton_weights(weights: Weights, year: str, lepton: MuonArray, lepton_type: str = "muon"):
ul_year = get_UL_year(year)
# ul_year = get_UL_year(year)

cset = correctionlib.CorrectionSet.from_file(get_pog_json(lepton_type, year))

Expand All @@ -405,9 +405,9 @@ def add_lepton_weights(weights: Weights, year: str, lepton: MuonArray, lepton_ty
lepton_pt, lepton_eta = _get_lepton_clipped(lep_pt, lep_eta, lepton_type, corr)

values = {}
values["nominal"] = cset[json_map_name].evaluate(ul_year, lepton_eta, lepton_pt, "sf")
values["up"] = cset[json_map_name].evaluate(ul_year, lepton_eta, lepton_pt, "systup")
values["down"] = cset[json_map_name].evaluate(ul_year, lepton_eta, lepton_pt, "systdown")
values["nominal"] = cset[json_map_name].evaluate(lepton_eta, lepton_pt, "nominal")
values["up"] = cset[json_map_name].evaluate(lepton_eta, lepton_pt, "systup")
values["down"] = cset[json_map_name].evaluate(lepton_eta, lepton_pt, "systdown")

# add weights (for now only the nominal weight)
weights.add(f"{lepton_type}_{corr}", values["nominal"], values["up"], values["down"])
Expand Down

0 comments on commit a99a8b9

Please sign in to comment.