Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,7 @@ jobs:
- name: bsm rabbit setup
run: >-
scripts/ci/run_with_singularity.sh scripts/ci/setup_and_run_python.sh scripts/rabbit/setupRabbit.py
-i $HIST_FILE --lumiScale $LUMI_SCALE --excludeProcGroups QCD WtoNMu_10 WtoNMu_50 --breitwignerWMassWeights
-i $HIST_FILE --lumiScale $LUMI_SCALE --addBSM WtoNMu_5 --breitwignerWMassWeights
--postfix bsm -o $WREMNANTS_OUTDIR

- name: bsm rabbit fit
Expand Down Expand Up @@ -772,13 +772,13 @@ jobs:
run: >-
scripts/ci/run_with_singularity.sh scripts/ci/setup_and_run.sh rabbit_plot_hists.py --config utilities/styles/styles.py
$WREMNANTS_OUTDIR/ZMassWLike_eta_pt_charge/fitresults_uncorr.hdf5 --result uncorr --title CMS --titlePos 1 --subtitle Preliminary
-o $WEB_DIR/$PLOT_DIR --legCols 6 --yscale 1.2 --prefit --legCol 6 --legSize large -m Basemodel
-o $WEB_DIR/$PLOT_DIR --legCols 6 --yscale 1.2 --prefit --legCol 6 --legSize large -m BaseMapping

- name: wlike postfit plot
run: >-
scripts/ci/run_with_singularity.sh scripts/ci/setup_and_run.sh rabbit_plot_hists.py --config utilities/styles/styles.py
$WREMNANTS_OUTDIR/ZMassWLike_eta_pt_charge/fitresults_uncorr.hdf5 --result uncorr --title CMS --titlePos 1 --subtitle Preliminary
-o $WEB_DIR/$PLOT_DIR --legCols 6 --yscale 1.2 --rrange 1.03 0.97 --binSeparationLines -2.2 -2.1 1.2 1.3 -m Basemodel
-o $WEB_DIR/$PLOT_DIR --legCols 6 --yscale 1.2 --rrange 1.03 0.97 --binSeparationLines -2.2 -2.1 1.2 1.3 -m BaseMapping

- name: wlike rabbit impacts
run: >-
Expand Down Expand Up @@ -873,13 +873,13 @@ jobs:
run: >-
scripts/ci/run_with_singularity.sh scripts/ci/setup_and_run.sh rabbit_fit.py
'$WREMNANTS_OUTDIR/ZMassDilepton_etaAbsEta_mll/ZMassDilepton.hdf5'
-t -1 --computeHistErrors --saveHists --saveHistsPerProcess --computeVariations --doImpacts -m Basemodel -m Project ch0 mll
-t -1 --computeHistErrors --saveHists --saveHistsPerProcess --computeVariations --doImpacts -m BaseMapping -m Project ch0 mll
-o '$WREMNANTS_OUTDIR/ZMassDilepton_etaAbsEta_mll/'

- name: dilepton postfit mll-etaAbsEta plot
run: >-
scripts/ci/run_with_singularity.sh scripts/ci/setup_and_run.sh rabbit_plot_hists.py --config utilities/styles/styles.py
$WREMNANTS_OUTDIR/ZMassDilepton_etaAbsEta_mll/fitresults.hdf5 -o $WEB_DIR/$PLOT_DIR -m Basemodel
$WREMNANTS_OUTDIR/ZMassDilepton_etaAbsEta_mll/fitresults.hdf5 -o $WEB_DIR/$PLOT_DIR -m BaseMapping
--yscale 1.4 --binSeparationLines -0.6 0.0 --titlePos 1 --title CMS --subtitle Preliminary

- name: dilepton postfit mll plot
Expand Down Expand Up @@ -1145,12 +1145,12 @@ jobs:
- name: dilepton prefit plot
run: >-
scripts/ci/run_with_singularity.sh scripts/ci/setup_and_run.sh rabbit_plot_hists.py $WREMNANTS_OUTDIR/ZMassDilepton_ptll_yll/fitresults.hdf5
--config utilities/styles/styles.py -o $WEB_DIR/$PLOT_DIR --yscale 1.2 --prefit --title CMS --subtitle Preliminary -m Basemodel
--config utilities/styles/styles.py -o $WEB_DIR/$PLOT_DIR --yscale 1.2 --prefit --title CMS --subtitle Preliminary -m BaseMapping

- name: dilepton postfit plot
run: >-
scripts/ci/run_with_singularity.sh scripts/ci/setup_and_run.sh rabbit_plot_hists.py $WREMNANTS_OUTDIR/ZMassDilepton_ptll_yll/fitresults.hdf5
--config utilities/styles/styles.py -o $WEB_DIR/$PLOT_DIR --yscale 1.2 --title CMS --subtitle Preliminary -m Basemodel
--config utilities/styles/styles.py -o $WEB_DIR/$PLOT_DIR --yscale 1.2 --title CMS --subtitle Preliminary -m BaseMapping

- name: dilepton plotting ptll
run: >-
Expand Down Expand Up @@ -1190,7 +1190,7 @@ jobs:
run: >-
scripts/ci/run_with_singularity.sh scripts/ci/setup_and_run.sh rabbit_plot_hists.py --config utilities/styles/styles.py
$WREMNANTS_OUTDIR/ZMassDilepton_ptll/fitresults_from_ZMassWLike_eta_pt_charge.hdf5 --result asimov_uncorr --postfix uncorr_fromWLike
-o $WEB_DIR/$PLOT_DIR --legCols 1 --yscale 1.2 --rrange 1.03 0.97 --title CMS --subtitle Preliminary -m Basemodel
-o $WEB_DIR/$PLOT_DIR --legCols 1 --yscale 1.2 --rrange 1.03 0.97 --title CMS --subtitle Preliminary -m BaseMapping

gen:
# The type of runner that the job will run on
Expand Down
2 changes: 1 addition & 1 deletion rabbit
Submodule rabbit updated 1 files
+1 −1 bin/rabbit_fit.py
1 change: 1 addition & 0 deletions scripts/corrections/make_theory_corr.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,6 +250,7 @@ def main():
eventgen_procs = [
"WtoNMu_MN-5-V-0p001",
"WtoNMu_MN-10-V-0p001",
"WtoNMu_MN-30-V-0p001",
"WtoNMu_MN-50-V-0p001",
]

Expand Down
15 changes: 8 additions & 7 deletions scripts/histmakers/mw_with_mu_eta_pt.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,11 @@
default=common.get_default_mtcut(analysis_label),
help="Value for the transverse mass cut in the event selection",
)
parser.add_argument(
"--mtGenCut",
action="store_true",
help="Apply mt cut at generator level",
)
parser.add_argument(
"--vetoGenPartPt",
type=float,
Expand Down Expand Up @@ -675,11 +680,7 @@ def build_graph(df, dataset):
logger.info(f"build graph for dataset: {dataset.name}")
results = []
isW = dataset.name in common.wprocs
isBSM = dataset.name in [
"WtoNMu_MN-5-V-0p001",
"WtoNMu_MN-10-V-0p001",
"WtoNMu_MN-50-V-0p001",
]
isBSM = dataset.name.startswith("WtoNMu")
isWmunu = isBSM or dataset.name in [
"WplusmunuPostVFP",
"WminusmunuPostVFP",
Expand Down Expand Up @@ -785,7 +786,7 @@ def build_graph(df, dataset):
)
cols = [*cols, "run4axis"]

if isBSM:
if isWmunu:
# to compute inclusive cross section
unfolding_tools.add_xnorm_histograms(
results,
Expand All @@ -809,7 +810,7 @@ def build_graph(df, dataset):
"pt_min": template_minpt,
"pt_max": template_maxpt,
"abseta_max": template_maxeta,
"mtw_min": None,
"mtw_min": args.mtCut if args.mtGenCut else None,
}
if hasattr(dataset, "out_of_acceptance"):
df = unfolding_tools.select_fiducial_space(
Expand Down
20 changes: 20 additions & 0 deletions scripts/histmakers/mz_dilepton.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,6 +273,7 @@
"nonTrigMuons_charge0": hist.axis.Regular(
2, -2.0, 2.0, underflow=False, overflow=False, name="nonTrigMuons_charge0"
),
"ptll_resolution": hist.axis.Regular(1000, -1, 1, name="ptll_resolution"),
}

auxiliary_gen_axes = [
Expand Down Expand Up @@ -1068,6 +1069,25 @@ def build_graph(df, dataset):
)

# test plots
if args.validationHists:
# resolution plot
df = df.Define("ptll_relResolution", "(ptll - postfsrPTV)/postfsrPTV")
df = df.Define("ptll_resolution", "(ptll - postfsrPTV)")
results.append(
df.HistoBoost(
f"nominal_relResolution",
[all_axes["ptll_resolution"], all_axes["ptll"], axis_absYll],
["ptll_resolution", "postfsrPTV", "absYll", "nominal_weight"],
)
)
results.append(
df.HistoBoost(
f"nominal_resolution",
[all_axes["ptll_resolution"], all_axes["ptll"], axis_absYll],
["ptll_resolution", "postfsrPTV", "absYll", "nominal_weight"],
)
)

if args.validationHists and args.useDileptonTriggerSelection:
df_plusTrig = df.Filter("trigMuons_passTrigger0")
df_minusTrig = df.Filter("nonTrigMuons_passTrigger0")
Expand Down
95 changes: 53 additions & 42 deletions scripts/plotting/inclusive_xsec_summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,14 +47,12 @@

pdf_results = {}
comp_result = {}
pdf_lumis = {}
# pdf_lumis = {}
for pdf_file in args.pdfFiles:
pdf_name = pdf_file.split("/")[-2].split("_")[-1]

pdf_result, pdf_meta = rabbit.io_tools.get_fitresult(pdf_file, meta=True)

pdf_lumis[pdf_name] = pdf_meta["meta_info_input"]["channel_info"]["ch0"]["lumi"]

pdf_model = pdf_result["physics_models"]

if "CompositeModel" in pdf_model.keys():
Expand All @@ -79,9 +77,15 @@
(r"$\mathrm{W}^{+}$", "Project ch1_masked qGen", "ch1_masked", {"qGen": 1}),
(r"$\mathrm{W}$", "Project ch1_masked", "ch1_masked", None),
(r"$\mathrm{Z}$", "Project ch0_masked", "ch0_masked", None),
# (
# r"$\mathrm{W}^{-}/\mathrm{W}^{+}$",
# "Ratio ch1_masked ch1_masked qGen:0,ptGen:sum,absEtaGen:sum qGen:1,ptGen:sum,absEtaGen:sum",
# "ch1_masked",
# None,
# ),
(
r"$\mathrm{W}^{+}/\mathrm{W}^{-}$",
"Ratio ch1_masked ch1_masked qGen:0,ptGen:sum,absEtaGen:sum qGen:1,ptGen:sum,absEtaGen:sum",
"Ratio ch1_masked ch1_masked qGen:1,ptGen:sum,absEtaGen:sum qGen:0,ptGen:sum,absEtaGen:sum",
"ch1_masked",
None,
),
Expand Down Expand Up @@ -130,18 +134,13 @@
h1 = h1[{"yield": hist.sum}]
hi = hi[{"yield": hist.sum}]

if model.startswith("Ratio"):
scale = 1
else:
scale = 1 / (lumi * 1000)

prefit = hp.value * scale
prefit_error = hp.variance**0.5 * scale
prefit = hp.value
prefit_error = hp.variance**0.5

value = h1.value * scale
error = h1.variance**0.5 * scale
value = h1.value
error = h1.variance**0.5

impacts = hi.values() * scale
impacts = hi.values()

labels = np.array(hi.axes["impacts"])
mask = np.isin(labels, grouping)
Expand Down Expand Up @@ -190,22 +189,26 @@
df["prefit_error"] = prefit_error

for pdf_name, pdf_res in pdf_results.items():
hr = pdf_res[model.replace("_masked", "")]["channels"][
channel_models = pdf_res[model.replace("_masked", "")]["channels"][
channel.replace("_masked", "")
]["hist_prefit_inclusive"].get()
]
hr = channel_models["hist_prefit_inclusive"].get()
hr_impacts = channel_models[
"hist_prefit_inclusive_global_impacts_grouped"
].get()

if selection is not None:
hr = hr[selection]
hr_impacts = hr_impacts[selection]
if getattr(hr, "axes", False) and "yield" in hr.axes.name:
hr = hr[{"yield": hist.sum}]
hr_impacts = hr_impacts[{"yield": hist.sum}]

if model.startswith("Ratio"):
scale = 1
else:
scale = 1 / (pdf_lumis[pdf_name] * 1000)

df[pdf_name] = hr.value * scale
df[f"{pdf_name}_error"] = hr.variance**0.5 * scale
df[pdf_name] = hr.value
df[f"{pdf_name}_error"] = hr.variance**0.5
df[f"{pdf_name}_pdf"] = hr_impacts[
{"impacts": f"pdf{pdf_name.replace('aN3LO','an3lo')}"}
]

# Convert 'labels' column to categorical with the custom order
df["label"] = pd.Categorical(df["label"], categories=custom_order, ordered=True)
Expand Down Expand Up @@ -305,6 +308,17 @@
marker="o",
label=pdf_name if i == 0 else None,
)
# # only plot PDF uncertainties
# pdf_error_pdf = df_g[f"{pdf_name}_pdf"].values[0] / norm
# ax.errorbar(
# [pdf_value],
# [i + 1 - (j + 1) / (nPDFs + 1)],
# xerr=pdf_error_pdf,
# color=pdf_colors[pdf_name],
# capsize=5,
# capthick=2,
# marker="o",
# )

# round to two significant digits in total uncertainty
sig_digi = 2 - int(math.floor(math.log10(abs(total)))) - 1
Expand Down Expand Up @@ -387,7 +401,7 @@
ax.set_xlim([lo, hi])
ax.set_ylim([0, len(norms) + 2])

ax.set_xlabel("1./Measurement", fontsize=20)
ax.set_xlabel("Prediction / Measurement")

# Disable ticks on the top and right axes
ax.tick_params(top=False)
Expand Down Expand Up @@ -442,10 +456,10 @@ def plot_cov_ellipse(cov, pos, nstd=2, **kwargs):
("WZ", xsec_keys[2], xsec_keys[3], "pb"),
("R", xsec_keys[4], xsec_keys[5], None),
):
ckey1 = (
ckey0 = (
channel0[1].replace("_masked", "") + " " + channel0[2].replace("_masked", "")
)
ckey2 = (
ckey1 = (
channel1[1].replace("_masked", "") + " " + channel1[2].replace("_masked", "")
)

Expand All @@ -456,52 +470,49 @@ def plot_cov_ellipse(cov, pos, nstd=2, **kwargs):

for pdf_name, result in comp_result.items():
ibin = 0
if name == "R":
scale = 1
else:
scale = 1 / (1000 * pdf_lumis[pdf_name])
for k, r in result["channels"].items():
fittype = "postfit" if f"hist_postfit_inclusive" in r.keys() else "prefit"

hi = r[f"hist_{fittype}_inclusive"].get()
if getattr(hi, "axes", False) and "yield" in hi.axes.name:
hi = hi[{"yield": hist.sum}]

if k == ckey1:
if k == ckey0:
sel = channel0[-1]

if sel is not None:
x = hi[sel].value * scale
x = hi[sel].value
ix = ibin + [i for i in sel.values()][0]
else:
x = hi.value * scale
x = hi.value
ix = ibin

if k == ckey2:
if k == ckey1:
sel = channel1[-1]
if sel is not None:
y = hi[channel0[-1]].value * scale
y = hi[sel].value
iy = ibin + [i for i in sel.values()][0]
else:
y = hi.value * scale
y = hi.value
iy = ibin

ibin += hi.size if hasattr(hi, "size") else 1

cov = result[f"hist_{fittype}_inclusive_cov"].get().values()
cov = cov[np.ix_([ix, iy], [ix, iy])] * scale**2
cov = cov[np.ix_([ix, iy], [ix, iy])]

# for pos, cov in zip(points, covs):
if fittype == "postfit":
icol = "grey"
ell = plot_cov_ellipse(
cov,
np.array([x, y]),
nstd=2,
edgecolor="none",
facecolor=icol,
facecolor="grey",
label="Measurement",
)
ax.add_patch(ell)
ax.plot(x, y, color="black", marker="P")
else:
icol = pdf_colors[pdf_name]
ell = plot_cov_ellipse(
Expand All @@ -513,8 +524,8 @@ def plot_cov_ellipse(cov, pos, nstd=2, **kwargs):
linewidth=2,
label=pdf_name,
)
ax.add_patch(ell)
ax.plot(x, y, color=icol, marker="o", alpha=0) # measurement center
ax.add_patch(ell)
ax.plot(x, y, color=icol, marker="o", alpha=0)

xlim = ax.get_xlim()
ylim = ax.get_ylim()
Expand Down Expand Up @@ -550,7 +561,7 @@ def plot_cov_ellipse(cov, pos, nstd=2, **kwargs):
padding_loc="auto",
)

plot_tools.add_cms_decor(ax, args.cmsDecor, data=True, lumi=lumi, loc=args.logoPos)
plot_tools.add_cms_decor(ax, args.cmsDecor, data=True, loc=args.logoPos)

outname = f"summary_2D_{name}"
if args.postfix:
Expand Down
1 change: 1 addition & 0 deletions scripts/plotting/makeDataMCStackPlot.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@
"QCD",
"WtoNMu_5",
"WtoNMu_10",
"WtoNMu_30",
"WtoNMu_50",
],
)
Expand Down
Loading
Loading