Skip to content

Commit

Permalink
Merge pull request #534 from emanca/normwtheo
Browse files Browse the repository at this point in the history
Merge latest changes for W-like unblinding of helicity cross section fit
  • Loading branch information
bendavid authored Aug 31, 2024
2 parents de930c0 + cb4e9ea commit b57d1e9
Show file tree
Hide file tree
Showing 7 changed files with 105 additions and 126 deletions.
24 changes: 21 additions & 3 deletions scripts/histmakers/mz_dilepton.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
import ROOT
import narf
import wremnants
from wremnants import (theory_tools,syst_tools,theory_corrections, muon_validation, muon_calibration, muon_prefiring, muon_selections, unfolding_tools,
muon_efficiencies_binned, muon_efficiencies_smooth, pileup, vertex)
from wremnants import (helicity_utils,theory_tools,syst_tools,theory_corrections, muon_validation, muon_calibration, muon_prefiring, muon_selections, unfolding_tools,
muon_efficiencies_binned, muon_efficiencies_smooth, pileup, vertex, theoryAgnostic_tools)
from wremnants.histmaker_tools import scale_to_data, aggregate_groups
from wremnants.datasets.dataset_tools import getDatasets
import hist
Expand All @@ -22,10 +22,13 @@
parser.add_argument("--csVarsHist", action='store_true', help="Add CS variables to dilepton hist")
parser.add_argument("--axes", type=str, nargs="*", default=["mll", "ptll"], help="")
parser.add_argument("--finePtBinning", action='store_true', help="Use fine binning for ptll")
parser.add_argument("--useTheoryAgnosticBinning", action='store_true', help="Use theory agnostic binning (coarser) to produce the results")
parser.add_argument("--useDileptonTriggerSelection", action='store_true', help="Use dilepton trigger selection (default uses the Wlike one, with one triggering muon and odd/even event selection to define its charge, staying agnostic to the other)")
parser.add_argument("--noAuxiliaryHistograms", action="store_true", help="Remove auxiliary histograms to save memory (removed by default with --unfolding or --theoryAgnostic)")
parser.add_argument("--muonIsolation", type=int, nargs=2, default=[1,1], choices=[-1, 0, 1], help="Apply isolation cut to triggering and not-triggering muon (in this order): -1/1 for failing/passing isolation, 0 for skipping it. If using --useDileptonTriggerSelection, then the sorting is based on the muon charge as -/+")
parser.add_argument("--addRunAxis", action="store_true", help="Add axis with slices of luminosity based on run numbers (for data only)")
parser.add_argument("--flipEventNumberSplitting", action="store_true", help="Flip even with odd event numbers to consider the positive or negative muon as the W-like muon")



parser = common.set_parser_default(parser, "aggregateGroups", ["Diboson", "Top", "Wtaunu", "Wmunu"])
Expand Down Expand Up @@ -58,6 +61,11 @@
ewMassBins = theory_tools.make_ew_binning(mass = 91.1535, width = 2.4932, initialStep=0.010)

dilepton_ptV_binning = common.get_dilepton_ptV_binning(args.finePtBinning)
if args.useTheoryAgnosticBinning:
theoryAgnostic_axes, _ = differential.get_theoryAgnostic_axes(ptV_flow=True, absYV_flow=True,wlike=True)
axis_ptV_thag = theoryAgnostic_axes[0]
dilepton_ptV_binning = axis_ptV_thag.edges

# available axes for dilepton validation plots
all_axes = {
# "mll": hist.axis.Regular(60, 60., 120., name = "mll", overflow=not args.excludeFlow, underflow=not args.excludeFlow),
Expand Down Expand Up @@ -183,7 +191,7 @@ def build_graph(df, dataset):
else:
df = df.Define("weight", "std::copysign(1.0, genWeight)")

df = df.Define("isEvenEvent", "event % 2 == 0")
df = df.Define("isEvenEvent", f"event % 2 {'!=' if args.flipEventNumberSplitting else '=='} 0")

weightsum = df.SumAndCount("weight")

Expand Down Expand Up @@ -361,6 +369,16 @@ def build_graph(df, dataset):

results.append(df.HistoBoost("weight", [hist.axis.Regular(100, -2, 2)], ["nominal_weight"], storage=hist.storage.Double()))
results.append(df.HistoBoost("nominal", axes, [*cols, "nominal_weight"]))

if isZ:
#theory agnostic stuff
theoryAgnostic_axes, theoryAgnostic_cols = differential.get_theoryAgnostic_axes(ptV_bins=[], absYV_bins=[], ptV_flow=True, absYV_flow=True, wlike=True)
axis_helicity = helicity_utils.axis_helicity_multidim

df = theoryAgnostic_tools.define_helicity_weights(df)
noiAsPoiHistName = Datagroups.histName("nominal", syst="yieldsTheoryAgnostic")
logger.debug(f"Creating special histogram '{noiAsPoiHistName}' for theory agnostic to treat POIs as NOIs")
results.append(df.HistoBoost(noiAsPoiHistName, [*axes, *theoryAgnostic_axes], [*cols, *theoryAgnostic_cols, "nominal_weight_helicity"], tensor_axes=[axis_helicity]))

# histograms for corrections/uncertainties for pixel hit multiplicity

Expand Down
52 changes: 25 additions & 27 deletions scripts/plotting/makePdfUncPlot.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,14 +63,11 @@

pdfInfo = theory_tools.pdfMap
pdfNames = [pdfInfo[pdf]["name"] for pdf in args.pdfs]
# histNames = pdfNames if not args.baseName else [f"{args.baseName}_{pdfName}" for pdfName in pdfNames]

# pdfHists = input_tools.read_all_and_scale(args.infile, args.datasets, histNames)

axis_label = "pdfVar"

uncType = [pdfInfo[pdf]["combine"] for pdf in args.pdfs]
uncScale = [pdfInfo[pdf]["scale"] if "scale" in pdfInfo[pdf] else 1. for pdf in args.pdfs]
# uncHists = [[h[{axis_label : 0}], *theory_tools.hessianPdfUnc(h, axis_label, unc, scale)] for h,unc,scale in zip(pdfHists, uncType, uncScale)]
names = [[pdfName+" $\pm1\sigma$", "", ""] for pdfName in pdfNames]
cmap = cm.get_cmap("tab10")
colors = [[cmap(i)]*3 for i in range(len(args.pdfs))]
Expand Down Expand Up @@ -129,25 +126,24 @@
coeffs_alpha = []
axis_label_alpha = "alphasVar"
for ipdf,pdf in enumerate(args.pdfs):

has_as = "alphasRange" in pdfInfo[pdf]
if has_as:
asr = pdfInfo[pdf]["alphasRange"]
scale_alpha=(0.75 if asr == "002" else 1.5)
alphaNames.append(f"{args.baseName}_helicity_{args.baseName}_{pdfNames[ipdf]}alphaS{asr}")
alphaNames.append(f"{args.baseName}_helicity_{args.baseName}_helicity_{pdfNames[ipdf]}alphaS{asr}")
#nominal_gen_pdfMSHT20alphaS002
moments_alpha = input_tools.read_all_and_scale(args.infile, [dataset], alphaNames)

hel_alpha =moments_alpha[0].project('ptVgen','absYVgen','helicity','alphasVar')
# ratioUL = hh.divideHists(coeffs[{'helicity':-1.j,'muRfact':1.j,'muFfact':1.j}],scale_alpha*hel_alpha[{'helicity':-1.j}])
# coeffs_alpha.append(hh.multiplyHists(hel_alpha,ratioUL))
coeffs_alpha.append(hel_alpha)


uncHists[ipdf].extend([coeffs_alpha[ipdf][...,1]*scale_alpha,coeffs_alpha[ipdf][...,2]*scale_alpha])
uncHists[ipdf].extend([uncHists[ipdf][0]+(coeffs_alpha[ipdf][...,1]-uncHists[ipdf][0])*scale_alpha,uncHists[ipdf][0]+(coeffs_alpha[ipdf][...,2]-uncHists[ipdf][0])*scale_alpha])
names[ipdf].extend([pdfNames[ipdf]+"alpha $\pm1\sigma$",""])
# print(colors[ipdf])
colors[ipdf].extend([cmap(ipdf)]*2)
# print(colors[ipdf])

colors[ipdf].extend([cmap(ipdf+1)]*2)


# add QCD scales
uncHists.append([coeffs[{'muRfact':1.j,'muFfact':1.j}],*[coeffs[{"muRfact" : 2.j, "muFfact" : 2.j}],coeffs[{"muRfact" : 0.5j, "muFfact" : 0.5j}],coeffs[{"muRfact" : 2.j, "muFfact" : 1.j}], coeffs[{"muRfact" : 0.5j, "muFfact" : 1.j}],coeffs[{"muRfact" : 1.j, "muFfact" : 2.j}],coeffs[{"muRfact" : 1.j, "muFfact" : 0.5j}]]])
Expand Down Expand Up @@ -187,7 +183,7 @@
if not "hel" in obs:
hists1D = [action(x,obs2unroll,binwnorm=True) for x in hists]
else:
hists1D = [action(x[{'helicity': ihel*1.j}],obs2unroll,binwnorm=True,add_flow_bins=False) for x in hists]
hists1D = [action(x[{'helicity': ihel*1.j}],obs2unroll,binwnorm=True,add_flow_bins=True) for x in hists]

all_hists.extend(hists1D)
all_hists_list.append(hists1D)
Expand Down Expand Up @@ -215,33 +211,35 @@
min_envelope = np.min(np.array(hvalues), axis=0)
ax1.fill_between(all_hists[0].axes[0].centers, min_envelope, max_envelope, color=all_colors_list[igroup][0], alpha=0.2, label='Envelope', step="mid")
ax2.fill_between(all_hists[0].axes[0].centers, min_envelope/np.abs(all_hists[0].values(flow=True)), max_envelope/np.abs(all_hists[0].values(flow=True)), color=all_colors_list[igroup][0], alpha=0.2, label='Envelope', step="mid")

ax2.fill_between(all_hists[0].axes[0].centers,symm, 2-symm,color="grey",alpha=0.3, label="theory agnostic variation",hatch="//", step="mid")
ax2.set_xticklabels([])
ax2.set_xticks([])
min_val = np.min(np.concatenate((symm,2-symm)))
max_val = np.max(np.concatenate((symm,2-symm)))

if not ihel ==-1:
ax2.set_ylim(min_val,max_val)
else:
ax2.set_ylim(0,2)
plot_tools.save_pdf_and_png(outdir, outfile)
plot_tools.write_index_and_log(outdir, outfile)
# plot_tools.save_pdf_and_png(outdir, outfile)
# plot_tools.write_index_and_log(outdir, outfile)

# vars = np.ones((len(axis_ptV.centers)+1,len(axis_yV.centers)+1))
# min_arr = np.minimum(symm,2-symm)
# max_arr = np.maximum(symm,2-symm)
# max_arr = max_arr-1
vars = np.ones((len(axis_ptV.centers)+1,len(axis_yV.centers)+1))
min_arr = np.minimum(symm,2-symm)
max_arr = np.maximum(symm,2-symm)
max_arr = max_arr-1

# vars = max_arr.reshape((len(axis_ptV.centers)+1,len(axis_yV.centers)+1))
# vars[-1:,:] = 0.5*np.ones_like(vars[-1:,:])
# vars[:,-1:] = 0.5*np.ones_like(vars[:,-1:])
vars = max_arr.reshape((len(axis_ptV.centers)+1,len(axis_yV.centers)+1))
vars[-1:,:] = 0.5*np.ones_like(vars[-1:,:])
vars[:,-1:] = 0.5*np.ones_like(vars[:,-1:])

# variations[...,int(ihel)+1]=vars
variations[...,int(ihel)+1]=vars

# hvariations = hist.Hist(axis_ptV,axis_yV,axis_helicity_multidim, name=f"theorybands_{dataset}",data=variations)
# band_hists[dataset] = hvariations
hvariations = hist.Hist(axis_ptV,axis_yV,axis_helicity_multidim, name=f"theorybands_{dataset}",data=variations)
band_hists[dataset] = hvariations

# outfile = "theoryband_variations_decorr_OOA_alphaS_wUL_new_ct18z.hdf5"
# with h5py.File(outfile, 'w') as f:
# narf.ioutils.pickle_dump_h5py("theorybands", band_hists, f)
outfile = "theoryband_variations_corr.hdf5"
with h5py.File(outfile, 'w') as f:
narf.ioutils.pickle_dump_h5py("theorybands", band_hists, f)

1 change: 1 addition & 0 deletions utilities/boostHistHelpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from functools import reduce
import collections
from utilities import common, logging
from wremnants import plot_tools
import copy
import itertools

Expand Down
37 changes: 25 additions & 12 deletions utilities/styles/styles.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,18 +148,31 @@
"prefire",
"muonCalibration",
"Fake",
# "normWplus_Helicity-1",
# "normWplus_Helicity0",
# "normWplus_Helicity1",
# "normWplus_Helicity2",
# "normWplus_Helicity3",
# "normWplus_Helicity4",
# "normWminus_Helicity-1",
# "normWminus_Helicity0",
# "normWminus_Helicity1",
# "normWminus_Helicity2",
# "normWminus_Helicity3",
# "normWminus_Helicity4"
"normWplus_Helicity-1",
"normWplus_Helicity0",
"normWplus_Helicity1",
"normWplus_Helicity2",
"normWplus_Helicity3",
"normWplus_Helicity4",
"normWminus_Helicity-1",
"normWminus_Helicity0",
"normWminus_Helicity1",
"normWminus_Helicity2",
"normWminus_Helicity3",
"normWminus_Helicity4",
"normW_Helicity-1",
"normW_Helicity0",
"normW_Helicity1",
"normW_Helicity2",
"normW_Helicity3",
"normW_Helicity4",
"normZ",
"normZ_Helicity-1",
"normZ_Helicity0",
"normZ_Helicity1",
"normZ_Helicity2",
"normZ_Helicity3",
"normZ_Helicity4",
],
"min": common_groups + [
"massShiftW", "massShiftZ",
Expand Down
2 changes: 1 addition & 1 deletion wremnants-data
Submodule wremnants-data updated from 36e8fc to 6d6b6c
Loading

0 comments on commit b57d1e9

Please sign in to comment.