-
Notifications
You must be signed in to change notification settings - Fork 25
Fitting new lattice-constrained NP model, PDFs from correction files through helicities #640
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
fa2cb77
0819cf3
1b30e47
5f0a14a
fc8c89b
7eb366f
639f7fb
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -912,14 +912,18 @@ def add_pdf_uncertainty(self, operation=None, scale=-1.0): | |
| if scale != -1.0 | ||
| else theory_tools.pdf_inflation_factor(pdfInfo, self.args.noi) | ||
| ) | ||
| pdf_hist = pdfName | ||
| pdf_corr_hist = ( | ||
| f"scetlib_dyturbo{pdf.upper().replace('AN3LO', 'an3lo')}VarsCorr" | ||
| if self.corr_hist_name == "scetlib_dyturboCorr" | ||
| else self.corr_hist_name.replace( | ||
| "Corr", "_CT18ZVarsCorr" | ||
| ) # TODO how do we get around this? | ||
| ) | ||
| if self.from_hels: | ||
| pdf_hist = f"{pdfName}UncertByHelicity" | ||
| pdf_corr_hist = f"{pdfName}UncertByHelicity" | ||
| else: | ||
| pdf_hist = pdfName | ||
| pdf_corr_hist = ( | ||
| f"scetlib_dyturbo{pdf.upper().replace('AN3LO', 'an3lo')}VarsCorr" | ||
| ) | ||
| pdf_hist += "UncertByHelicity" | ||
| pdf_corr_hist += "UncertByHelicity" | ||
| symmetrize = "quadratic" | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Where is this variable used? |
||
|
|
||
| if self.pdf_from_corr: | ||
| theory_unc = self.datagroups.args_from_metadata("theoryCorr") | ||
|
|
@@ -998,7 +1002,13 @@ def add_pdf_alphas_variation(self, noi=False): | |
| asname = pdf_corr_hist.replace("Vars", "_pdfas") | ||
| # alphaS from correction histograms only available for these sets, | ||
| # so fall back to CT18Z for other sets | ||
| if not ("MSHT20" in asname or "CT18Z" in asname or "MSHT20an3lo" in asname): | ||
| if not ( | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. can we dynamically check if the correction histogram is there and otherwise use the default? E.g. |
||
| "MSHT20" in asname | ||
| or "CT18Z" in asname | ||
| or "MSHT20an3lo" in asname | ||
| or "Lattice" in asname | ||
| or "nnlojet" in asname | ||
| ): # TODO should fix the correction file name for Lattice? | ||
| asname = "scetlib_dyturboCT18Z_pdfasCorr" | ||
| as_range = theory_tools.pdfMap["ct18z"]["alphasRange"] | ||
| if asname.replace("Corr", "") not in self.datagroups.args_from_metadata( | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -1747,7 +1747,7 @@ def add_pdfUncertByHelicity_hist( | |
| ], | ||
| ) | ||
| safeTensorName = f"{tensorName}_clamped" | ||
| renorm = theory_tools.pdfMap[pdf].get("renorm", False) | ||
| renorm = theory_tools.pdfMap.get(pdf, {}).get("renorm", False) | ||
| if renorm: | ||
| central_event_weight = "nominal_weight" | ||
| else: | ||
|
|
@@ -2597,11 +2597,27 @@ def add_theory_hists( | |
| cols, | ||
| **info, | ||
| ) | ||
| if theory_helpers.get("pdf_from_corr") is not None: | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Maybe better: |
||
| pdf_from_corr_helpers = theory_helpers.get("pdf_from_corr") | ||
| for pdf in pdf_from_corr_helpers: | ||
| logger.debug( | ||
| f"Make PDF (from correction file) uncertainty by helicity histograms for {dataset_name} and PDF from correction {pdf}" | ||
| ) | ||
| add_pdfUncertByHelicity_hist( | ||
| results, | ||
| df, | ||
| pdf_from_corr_helpers[pdf], | ||
| pdf, | ||
| pdf, | ||
| axes, | ||
| cols, | ||
| **info, | ||
| ) | ||
| if theory_helpers.get("alphaS") is not None: | ||
| logger.debug( | ||
| f"Make AlphaS uncertainty by helicity histograms for {dataset_name}" | ||
| ) | ||
| for k, v in theory_helpers["alphaS"].items(): | ||
| logger.debug( | ||
| f"Make alphaS uncertainty by helicity histogram for {dataset_name} and alphaS from correction {k}" | ||
| ) | ||
| add_pdfAlphaSByHelicity_hist(results, df, v, axes, cols, name=k, **info) | ||
|
|
||
| add_breit_wigner_mass_weights_hist( | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Should this not be something with "MSHT20" here?