Skip to content

Commit

Permalink
fix: add debug stuff
Browse files Browse the repository at this point in the history
  • Loading branch information
Ming-Yan committed Oct 31, 2024
1 parent a83ae10 commit 604135d
Show file tree
Hide file tree
Showing 4 changed files with 61 additions and 10 deletions.
29 changes: 29 additions & 0 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@

# This file is a template, and might need editing before it works on your project.
# This is a sample GitLab CI/CD configuration file that should run without any modifications.
# It demonstrates a basic 3 stage CI/CD pipeline. Instead of real tests or scripts,
# it uses echo commands to simulate the pipeline execution.
#
# A pipeline is composed of independent jobs that run scripts, grouped into stages.
# Stages run in sequential order, but jobs within stages run in parallel.
#
# For more information, see: https://docs.gitlab.com/ee/ci/yaml/index.html#stages
#
# You can copy and paste this template into a new `.gitlab-ci.yml` file.
# You should not add this template to an existing `.gitlab-ci.yml` file by using the `include:` keyword.
#
# To contribute improvements to CI/CD templates, please follow the Development guide at:
# https://docs.gitlab.com/ee/development/cicd/templates.html
# This specific template is located at:
# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Getting-Started.gitlab-ci.yml

stages: # List of stages for jobs, and their order of execution
- deploy

deploy-job: # This job runs in the deploy stage.
stage: deploy # It only runs when *both* jobs in the test stage complete successfully.
script:
- 'curl --fail --request POST --form token=$MY_TRIGGER_TOKEN --form ref=master "https://gitlab.cern.ch/cms-analysis/btv/software-and-algorithms/autobtv/trigger/pipeline"'
rules:
- if: $CI_COMMIT_TAG
environment: production
2 changes: 1 addition & 1 deletion scripts/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ def getFilesFromDas(args):
.read()
.split("\n")
)

print("Number of files: ", len(flist))
import json

dataset = dataset[:-1] if "\n" in dataset else dataset
Expand Down
36 changes: 28 additions & 8 deletions scripts/suball.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,8 +103,6 @@ def get_lumi_from_web(year):
# "QCD_mu_sf"
],
}
if args.debug:
args.local = True
if args.scheme in workflows.keys():
workflow_group["test"] = [args.scheme]
args.scheme = "test"
Expand All @@ -127,7 +125,7 @@ def get_lumi_from_web(year):

for wf in workflow_group[args.scheme]:
if args.debug:
print(f"Start running {wf} workflow!!!")
print(f"======{wf} in {args.scheme}=====")
overwrite = "--overwrite" if args.overwrite else ""
## creating dataset
if (
Expand All @@ -136,13 +134,24 @@ def get_lumi_from_web(year):
)
or args.overwrite
):
if args.debug:
print(
f"Creating MC dataset: python scripts/fetch.py -c {args.campaign} --from_workflow {wf} --DAS_campaign {args.DAS_campaign} --year {args.year} {overwrite} --skipvalidation"
)
os.system(
f"python scripts/fetch.py -c {args.campaign} --from_workflow {wf} --DAS_campaign {args.DAS_campaign} --year {args.year} {overwrite} --skipvalidation"
)
if args.debug:
os.system(f"ls metadata/{args.campaign}/*.json are generated")

## Run the workflows
for types in predefined_sample[wf].keys():

if (types != "data" or types != "MC") and args.scheme == "Validation":
continue
print(
f"hists_{wf}_{types}_{args.campaign}_{args.year}_{wf}/hists_{wf}_{types}_{args.campaign}_{args.year}_{wf}.coffea"
)
if (
not os.path.exists(
f"hists_{wf}_{types}_{args.campaign}_{args.year}_{wf}/hists_{wf}_{types}_{args.campaign}_{args.year}_{wf}.coffea"
Expand Down Expand Up @@ -181,31 +190,36 @@ def get_lumi_from_web(year):
and "limit" not in key
):
runner_config += " --limit 50"
elif args.debug:
runner_config += " --limit 1 --executor iterative"

else:
runner_config += f" --{key}={value}"
runner_config = runner_config_required + runner_config
print(runner_config)
if args.debug:
print(f"run the workflow: {runner_config}")
with open(
f"config_{args.year}_{args.campaign}_{args.scheme}_{args.version}.txt",
"w",
) as config_list:
config_list.write(runner_config)

os.system(runner_config)

if args.debug:
print(f"workflow is finished for {wf}!")
# Get luminosity
if (
os.path.exists(
f"hists_{wf}_data_{args.campaign}_{args.year}_{wf}/hists_{wf}_data_{args.campaign}_{args.year}_{wf}.coffea"
)
or args.overwrite
):
if args.debug:
print(
f"Get the luminosity from hists_{wf}_data_{args.campaign}_{args.year}_{wf}/hists_{wf}_data_{args.campaign}_{args.year}_{wf}.coffea"
)
lumi = os.popen(
f"python scripts/dump_processed.py -t all -c hists_{wf}_data_{args.campaign}_{args.year}_{wf}/hists_{wf}_data_{args.campaign}_{args.year}_{wf}.coffea --json metadata/{args.campaign}/data_{args.campaign}_{args.year}_{wf}.json -n {args.campaign}_{args.year}_{wf}"
).read()

print(lumi)
lumi = int(
round(
float(
Expand All @@ -220,14 +234,20 @@ def get_lumi_from_web(year):
)
if os.path.exists(
f"hists_{wf}_MC_{args.campaign}_{args.year}_{wf}/hists_{wf}_MC_{args.campaign}_{args.year}_{wf}.coffea"
) and os.path.exists(
f"hists_{wf}_data_{args.campaign}_{args.year}_{wf}/hists_{wf}_data_{args.campaign}_{args.year}_{wf}.coffea"
):
print(lumi)
if args.debug:
print(f"Plot the dataMC for {wf}")
os.system(
f'python scripts/plotdataMC.py -i "hists_{wf}_*_{args.campaign}_{args.year}_{wf}/hists_{wf}_*_{args.campaign}_{args.year}_{wf}.coffea" --lumi {lumi} -p {wf} -v all --ext {args.campaign}_{args.year}{args.version}'
)
## Inspired from Uttiya, create remote directory
# https://github.com/cms-btv-pog/BTVNanoCommissioning/blob/14e654feeb4b4d738ee43ab913efb343ea65fd1d/scripts/submit/createremotedir.sh
# create remote direcotry
if args.debug:
print(f"Upload plots&coffea to eos: {wf}")
if not args.local:
os.system(f"mkdir -p {args.campaign}{args.version}/{wf}")
os.system(f"cp scripts/index.php {args.campaign}{args.version}/.")
Expand Down
4 changes: 3 additions & 1 deletion src/BTVNanoCommissioning/helpers/update_branch.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,9 @@ def missing_branch(events):
events.Jet,
{"btagPNetCvNotB": jets.btagPNetCvNotB},
)
if not hasattr(events.Jet, "btagRobustParTAK4CvNotB"):
if not hasattr(events.Jet, "btagRobustParTAK4CvNotB") and hasattr(
events.Jet, "btagRobustParTAK4B"
):
jets = events.Jet
jets["btagRobustParTAK4CvNotB"] = (
jets.btagRobustParTAK4CvB
Expand Down

0 comments on commit 604135d

Please sign in to comment.