Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 13 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ Bin-by-bin statistical uncertainties on the templates are added by default and c
Perform mappings on the parameters and observables (the histogram bins in the (masked) channels).
Baseline mappings are defined in `rabbit/mappings/` and can be called in `rabbit_fit` with the `--mapping` or `-m` option e.g. `-m Select ch0 -m Project ch1 b`.
The first argument is the mapping name followed by arguments passed into the mapping.
Available physics models are
Available mappings are:
* `BaseMapping`: Compute histograms in all bins and all channels.
* `Select`: To select histograms of a channel, and perform a selection of processes and bins, supporting rebinning.
* `Project`: To project histograms to lower dimensions, respecting the covariance matrix across bins.
Expand All @@ -149,8 +149,18 @@ Custom mappings can be defined.
They can be specified with the full path to the custom mapping e.g. `-m custom_mapping.MyCustomMapping`.
The path must be accessable from your `$PYTHONPATH` variable and an `__ini__.py` file must be in the directory.

### Physics models
TBD
### POI models
POI models can be used to introduce paramter of interests (POIs) and modify the number of predicted events in the fit.
Baseline models are defined in `rabbit/poi_models/` and can be called in `rabbit_fit` with the `--poiModel` option, e.g. `--poiModel Mu`.
Only one POI model at a time can be used at a time.
Available POI models are:
* `Mu`: Scale the number of events for each signal process with an unconstrained parameter, and background proesses with 1. This is the default model
* `Ones`: Return ones, i.e. leave the number of predicted events the same.
* `Mixture`: Scale the `primary` processes by `x` and the `complementary` processes by `1-x`

Custom POI models can be defined.
They can be specified with the full path to the custom mapping e.g. `--poiModel custom_model.MyCustomModel`.
The path must be accessable from your `$PYTHONPATH` variable and an `__ini__.py` file must be in the directory.

## Fit diagnostics

Expand Down
32 changes: 23 additions & 9 deletions bin/rabbit_fit.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from rabbit import fitter, inputdata, io_tools, workspace
from rabbit.mappings import helpers as mh
from rabbit.mappings import mapping as mp
from rabbit.poi_models import helpers as ph
from rabbit.tfhelpers import edmval_cov

from wums import output_tools, logging # isort: skip
Expand Down Expand Up @@ -115,11 +116,14 @@ def make_parser():
)
parser.add_argument(
"--expectSignal",
default=1.0,
type=float,
help="rate multiplier for signal expectation (used for fit starting values and for toys)",
default=None,
nargs=2,
action="append",
help="""
Specify tuple with key and value to be passed to POI model (used for fit starting values and for toys).
E.g. '--expectSignal BSM 0.0 --expectSignal SM 1.0'
""",
)
parser.add_argument("--POIMode", default="mu", help="mode for POI's")
parser.add_argument(
"--allowNegativePOI",
default=False,
Expand Down Expand Up @@ -280,6 +284,12 @@ def make_parser():
nargs="*",
help="run fit on pseudo data with the given name",
)
parser.add_argument(
"--poiModel",
default=["Mu"],
nargs="+",
help="Specify POI model to be used to introduce non standard parameterization",
)
parser.add_argument(
"-m",
"--mapping",
Expand Down Expand Up @@ -529,7 +539,7 @@ def fit(args, fitter, ws, dofit=True):
nllvalreduced = fitter.reduced_nll().numpy()

ndfsat = (
tf.size(fitter.nobs) - fitter.npoi - fitter.indata.nsystnoconstraint
tf.size(fitter.nobs) - fitter.poi_model.npoi - fitter.indata.nsystnoconstraint
).numpy()

chi2 = 2.0 * nllvalreduced
Expand Down Expand Up @@ -653,15 +663,19 @@ def main():
blinded_fits = [f == 0 or (f > 0 and args.toysDataMode == "observed") for f in fits]

indata = inputdata.FitInputData(args.filename, args.pseudoData)
ifitter = fitter.Fitter(indata, args, do_blinding=any(blinded_fits))

margs = args.poiModel
poi_model = ph.load_model(margs[0], indata, *margs[1:], **vars(args))

ifitter = fitter.Fitter(indata, poi_model, args, do_blinding=any(blinded_fits))

# mappings for observables and parameters
if len(args.mapping) == 0 and args.saveHists:
# if no mapping is explicitly added and --saveHists is specified, fall back to BaseMapping
args.mapping = [["BaseMapping"]]
mappings = []
for margs in args.mapping:
mapping = mh.instance_from_class(margs[0], indata, *margs[1:])
mapping = mh.load_mapping(margs[0], indata, *margs[1:])
mappings.append(mapping)

if args.compositeMapping:
Expand All @@ -676,9 +690,9 @@ def main():
meta = {
"meta_info": output_tools.make_meta_info_dict(args=args),
"meta_info_input": ifitter.indata.metadata,
"signals": ifitter.indata.signals,
"pois": ifitter.poi_model.pois,
"procs": ifitter.indata.procs,
"nois": ifitter.parms[ifitter.npoi :][indata.noiidxs],
"nois": ifitter.parms[ifitter.poi_model.npoi :][indata.noiidxs],
}

with workspace.Workspace(
Expand Down
27 changes: 27 additions & 0 deletions rabbit/common.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import importlib
import pathlib
import re

Expand All @@ -18,3 +19,29 @@ def natural_sort_dict(dictionary):
sorted_keys = natural_sort(dictionary.keys())
sorted_dict = {key: dictionary[key] for key in sorted_keys}
return sorted_dict


def load_class_from_module(class_name, class_module_dict, base_dir):
if "." in class_name:
# import from full relative or abslute path
parts = class_name.split(".")
module_name = ".".join(parts[:-1])
class_name = parts[-1]
else:
# import one of the baseline classes
if class_name not in class_module_dict:
raise ValueError(
f"Class {class_name} not found, available classes are {class_module_dict.keys()}"
)
module_name = f"{base_dir}.{class_module_dict[class_name]}"

# Try to import the module
module = importlib.import_module(module_name)

this_class = getattr(module, class_name, None)
if this_class is None:
raise AttributeError(
f"Class '{class_name}' not found in module '{module_name}'."
)

return this_class
Loading