diff --git a/HIG16006/bin/MorphingMSSMRun2.cpp b/HIG16006/bin/MorphingMSSMRun2.cpp index bdf255c8d45..d40f4ac21cf 100644 --- a/HIG16006/bin/MorphingMSSMRun2.cpp +++ b/HIG16006/bin/MorphingMSSMRun2.cpp @@ -7,6 +7,7 @@ #include #include "boost/algorithm/string/predicate.hpp" #include "boost/program_options.hpp" +#include "boost/lexical_cast.hpp" #include "CombineHarvester/CombineTools/interface/CombineHarvester.h" #include "CombineHarvester/CombineTools/interface/Observation.h" #include "CombineHarvester/CombineTools/interface/Process.h" @@ -216,24 +217,36 @@ int main(int argc, char** argv) { } demo.Close(); cb.AddWorkspace(ws); - //cb.cp().signals().ExtractPdfs(cb, "htt", "$BIN_$PROCESS_morph"); cb.cp().process(ch::JoinStr({signal_types["ggH"], signal_types["bbH"]})).ExtractPdfs(cb, "htt", "$BIN_$PROCESS_morph"); cb.PrintAll(); string folder = "output/"+output_folder+"/cmb"; boost::filesystem::create_directories(folder); - + + + //Write out datacards. Naming convention important for rest of workflow. We + //make one directory per chn-cat, one per chn and cmb. In this code we only + //store the complete combined datacard for each directory, but we also + //introduce the label _mssm to allow to distinguish from individual cards if + //people want to store those too for some reason. cout << "Writing datacards ..."; - TFile output((folder + "/htt_mssm_input.root").c_str(), "RECREATE"); + TFile output((folder + "/htt_cmb_mssm_input.root").c_str(), "RECREATE"); + cb.cp().mass({"*"}).WriteDatacard(folder + "/htt_cmb_mssm.txt", output); + output.Close(); + + //Combined based on bin_id - i.e. make combined b-tag and no b-tag for (string chn : chns) { - auto bins = cb.cp().channel({chn}).bin_set(); - for (auto b : bins) { - cb.cp().channel({chn}).bin({b}).mass({"*"}).WriteDatacard(folder + "/" + b + ".txt", output); + auto bin_ids = cb.cp().bin_id_set(); + for (auto b : bin_ids) { + string folder_cat = "output/"+output_folder+"/htt_cmb_"+boost::lexical_cast(b)+"_13TeV"; + boost::filesystem::create_directories(folder_cat); + TFile output((folder_cat + "/htt_cmb_"+boost::lexical_cast(b)+"_13TeV_mssm_input.root").c_str(), "RECREATE"); + cb.cp().mass({"*"}).bin_id({b}).WriteDatacard(folder_cat + "/htt_cmb_"+boost::lexical_cast(b)+"_13TeV_mssm.txt", output); + output.Close(); } } - cb.cp().mass({"*"}).WriteDatacard(folder + "/htt_mssm.txt", output); - output.Close(); + //Individual channel-cats for (string chn : chns) { string folderchn = "output/"+output_folder+"/"+chn; boost::filesystem::create_directories(folderchn); diff --git a/HIG16006/input/mssm_protocol.txt b/HIG16006/input/mssm_protocol.txt new file mode 100644 index 00000000000..08f71fa54a0 --- /dev/null +++ b/HIG16006/input/mssm_protocol.txt @@ -0,0 +1,83 @@ +#This file collects up to date instructions for running the 13 TeV MSSM statistical results. For more detailed information on Combine Harvester please see the documentation at +http://cms-analysis.github.io/CombineHarvester/ + +1a) Setting up datacards: for model independent limits + +MorphingMSSMRun2 --output_folder="mssm_250116_svfit_modelindep" -m MH + +This will setup a subdirectory for each channel-cat, as well as combined channel and full combination directories. Each directory contains a single combined card as generated by WriteCards. +Note that this uses the new RooMorphingPdf and includes bin-by-bin uncertainties which are merged with merging parameter 0.4. Optional extras are controlled by --auto_rebin for automatic rebinning, --manual_rebin to choose a rebinning manually. + +1b) Setting up datacards: for model dependent limits + +MorphingMSSMRun2 --output_folder="mssm_250116_svfit_mhmodp" + +If not using the argument -m MH, the code will setup datacards for all 3 Higgs bosons as required for model dependent limits + +2a) Setting up workspaces for model independent limits + +Workspaces can be setup using the combine tool text2workspace, but this is now implemented in combineTool.py for easy conversion of multiple datacards: + +combineTool.py -M T2W -o "htt_ggPhi_mssm.root" -P CombineHarvester.CombinePdfs.ModelIndependent:floatingMSSMXSHiggs --PO 'modes=ggH' --PO 'ggHRange=0:20' -i output/mssm_250116_svfit_modelindep/* +combineTool.py -M T2W -o "htt_bbPhi_mssm.root" -P CombineHarvester.CombinePdfs.ModelIndependent:floatingMSSMXSHiggs --PO 'modes=bbH' --PO 'bbHRange=0:20' -i output/mssm_250116_svfit_modelindep/* + +This will apply the text2workspace step recursively, setting up for every subdirectory and hence every channel-cat scenario. Workspaces are created for each of the two signal cases. In each case, the other signal is profiled. There are many different options to combineTool, but this particular set will create a "combined card" named combined.txt.cmb in each subdir, and a workspace named as required, either htt_ggPhi_mssm.root or htt_bbPhi_mssm.root + + +2a) Setting up workspaces for model dependent limits + +combineTool.py -M T2W -o "htt_mhmodp_mssm.root" -P CombineHarvester.CombinePdfs.MSSM:MSSM --PO filePrefix=$CMSSW_BASE/src/auxiliaries/models/ --PO modelFiles=13TeV,mhmodp_mu200_13TeV.root,1 -i output/mssm_250116_svfit_mhmodp/* + +This will perform the same recursive application of text2workspace, this time applying the mhmodp physics model. + +3a) Running model independent limits + +This can be done directly with combine, but again combineTool makes life a lot easier for us by allowing successive calls (with different choices of job submission, and parallelisation of calculations): + +combineTool.py -m "90,100,110,120,130,140,160,180,200,250,300,350,400,450,500,600,700,800,900,1000" -M Asymptotic --boundlist ../CombinePdfs/scripts/mssm_ggh_boundaries.json --freezeNuisances MH --setPhysicsModelParameters r_ggH=0,r_bbH=0 -t -1 -d output/mssm_250116_svfit_modelindep/*/htt_ggH_mssm.root --there -n "ggH" + +combineTool.py -m "90,100,110,120,130,140,160,180,200,250,300,350,400,450,500,600,700,800,900,1000" -M Asymptotic --boundlist ../CombinePdfs/scripts/mssm_bbh_boundaries.json --freezeNuisances MH --setPhysicsModelParameters r_ggH=0,r_bbH=0 -t -1 -d output/mssm_250116_svfit_modelindep/*/htt_bbH_mssm.root --there -n "bbH" + +This goes to each subdirectory of output/mssm_250116_svfit_modelindep/ (--there) and performs the combine calculation for the masses listed on the workspace (either ggH or bbH workspace). The combine output files are stored in the directories alongside the datacards. Note the optin --parallel = X allows you to run the calculations interactively with X in parallel, and e.g.' --job-mode 'lxbatch' --task-name 'mssm_ggH' --sub-opts '-q 1nh' --merge=4 ' could be used to instead run on the lxbatch, merging 4 masspoints into 1 job and submitting to the 1 hour queue. + +Once all calculations are complete, the results are collected into json files using: + +combineTool.py -M CollectLimits output/mssm_250116_svfit_modelindep/*/higgsCombine.ggH*.root --use-dirs -o "mssm_250116_svfit_ggH.json" + +combineTool.py -M CollectLimits output/mssm_250116_svfit_modelindep/*/higgsCombine.bbH*.root --use-dirs -o "mssm_250116_svfit_bbH.json" + +This will place a json in the current directory, and append the string "mssm_250116_svfit_ggH" to them. One will be placed for every subdir, so every channel-cat, combination requested will be available then for plotting. + +3b) Running model dependent limits + +combineTool.py -M AsymptoticGrid ../CombinePdfs/scripts/mssm_asymptotic_grid.json -d output/mssm_250116_svfit_mhmodp/mt/htt_mhmodp_mssm.root --job-mode 'interactive' --task-name 'mssm_mhmodp' + +The asymptotic grid mode reads in an input json to define a set of mA-tanb points to scan and perform the limit calculation for. This time the calculation is done once per workspace, since the script has a nice feature which is that if you call it multiple times with the same workspace and asymptotic grid it will check which points have already completed successfully and only run those remaining. This makes it really easy to top up the grid for a finer scan for example. Once all points are complete, on the final call the script will create asymptotic_grid.root file containing the results. + +4a) Plotting model independent limits + +The usual Brazil band plots can be made using this script, for e.g. the mutau channel: + +python ../CombineTools/scripts/plotBSMxsBRLimit.py --file=mssm_250116_svfit_ggH_mt.json --log=1 --process="gg#phi" --custom_y_range=True --y_axis_max=1000 --y_axis_min=0.001 --expected_only + +Or comparison plots can be made using the following script: + +python scripts/MSSMlimitCompare.py --file=mssm_250116_svfit_ggH_mt.json,mssm_250116_svfit_ggH_et.json --labels="mutau,etau" --expected_only --outname="mssm_250116_mt_vs_et_ggH" --process="gg#phi" + +The options --absolute and --relative can be used to make ratio plots as well + +4a) Plotting model dependent limits + +python ../CombineTools/scripts/plotLimitGrid.py asymptotic_grid.root --scenario-label="m_{h}^{mod+} scenario" --output="mssm_250116_mhmodp_mt" --title-right="2.2 fb^{-1} (13 TeV)" --expected_only + +The plotting takes thr asymptotic_grid file as input, and performs interpolation to produce smooth contours of exclusion for a 2D mA-tanb plot. Note that this script contains many different optional settings for this interpolation. + +5) Pre or post fit plots + +Prefit plots can be made and either the model dependent (3 Higgs) or signal resonance signal can be added for a benchmark point. The usage: + +python scripts/postFitPlot.py --postfitshapes --dir=output/mssm_250116_svfit_modelindep/htt_mt_8_13TeV/ --workspace=htt_ggPhi_mssm --ratio --extra_pad=0.3 --custom_x_range --x_axis_max=999.9 --x_axis_min=0.0 --r_ggH=0.1 --r_bbH=0.1 --mPhi=700 + +This will internally call PostFitShapesFromWorkspace, which will generate the signal and background templates for the benchmark point of mPhi = 700 GeV and for cross-section values for the 2 signal processes of 0.1pb. The script will then make a stacked plot. The option --model_dep can be used with choices for --mA and --tanb to instead plot the 3 Higgs signal from a model dependent workspace. + +By default (essentially hardcoded until we pass preapproval) the data will be blinded above 200 GeV. Automated blinding, based on s/root b bigger than 0.5 in a given bin, is also implemented and can be enable. The option --auto_blind_check_only will simply report which bins the calculation would choose to blind, based on internally running PostFitShapesFromWorkspace for a few benchmark points, whilst keeping the manual blinding of 200 and above. The option --auto_blind will then actually apply the blinding based on the s/root b calculation. To be used with caution - please call the check_only mode first to check it appears sensible! To simply make an s/root b plot for a given benchmark signal, the option --soverb_plot can be used, which replaces the ratio with the s/root b plot for signal and doesnt plot ANY data. diff --git a/CombineTools/scripts/limitCompare.py b/HIG16006/scripts/MSSMlimitCompare.py similarity index 98% rename from CombineTools/scripts/limitCompare.py rename to HIG16006/scripts/MSSMlimitCompare.py index c63b606ad7e..aac64a6f34c 100644 --- a/CombineTools/scripts/limitCompare.py +++ b/HIG16006/scripts/MSSMlimitCompare.py @@ -65,9 +65,9 @@ data = {} with open(files[i]) as jsonfile: data = json.load(jsonfile) - exp_graph_list[i] = plot.LimitTGraphFromJSON(data, 'expected') + exp_graph_list[i] = plot.LimitTGraphFromJSON(data, 'exp0') if args.relative or args.absolute: - obs_graph_list[i] = plot.LimitTGraphFromJSON(data, 'observed') + obs_graph_list[i] = plot.LimitTGraphFromJSON(data, 'obs') max_vals = [] for i in range(len(exp_graph_list)): diff --git a/HIG16006/scripts/postFitPlot.py b/HIG16006/scripts/postFitPlot.py index ad77948c0c5..2afe230ac82 100644 --- a/HIG16006/scripts/postFitPlot.py +++ b/HIG16006/scripts/postFitPlot.py @@ -41,27 +41,62 @@ def createAxisHists(n,src,xmin=0,xmax=499): parser = argparse.ArgumentParser() -parser.add_argument('--dir', '-d', help='Directory') -parser.add_argument('--file', '-f',help='Input file') -parser.add_argument('--mA',help='Signal m_A') -parser.add_argument('--tanb',help='Signal tanb') +parser.add_argument('--dir', '-d', help='Directory for plot (channel-category containing the datacard and workspace)') +parser.add_argument('--file', '-f',help='Input file if shape file has already been created') +parser.add_argument('--mA',default='700',help='Signal m_A to plot for model dep') +parser.add_argument('--tanb',default='30',help='Signal tanb to plot for model dep') +parser.add_argument('--mPhi',default='700',help='Signal m_Phi to plot for model indep') +parser.add_argument('--r_ggH',default='0.1',help='Signal ggH XS*BR for model indep') +parser.add_argument('--r_bbH',default='0.1',help='Signal bbH XS*BR for model indep') parser.add_argument('--postfitshapes',default=False,action='store_true',help='Run PostFitShapesFromWorkspace') parser.add_argument('--workspace',default='mhmodp',help='Part of workspace filename right before .root') +parser.add_argument('--model_dep',action='store_true',default=False,help='Make plots for full model dependent signal h,H,A') parser.add_argument('--mode',default='prefit',help='Prefit or postfit') -parser.add_argument('--blind', default=False,action='store_true',help='Blind data') -parser.add_argument('--ratio', default=False,action='store_true',help='Draw ratio') -parser.add_argument('--x_blind_min',default=100,help='Minimum x for blinding') -parser.add_argument('--x_blind_max',default=1000,help='Maximum x for blinding') +parser.add_argument('--manual_blind', action='store_true',default=False,help='Blind data with hand chosen range') +parser.add_argument('--auto_blind',action='store_true',default=False,help='Blind data automatically based on s/root b') +parser.add_argument('--auto_blind_check_only',action='store_true',default=False,help='Only print blinding recommendation but still blind data using manual blinding') +parser.add_argument('--soverb_plot', action='store_true',default=False,help='Make plot with s/root b instead of ratio plot to test what it would blind') +parser.add_argument('--x_blind_min',default=200,help='Minimum x for manual blinding') +parser.add_argument('--x_blind_max',default=4000,help='Maximum x for manual blinding') +parser.add_argument('--ratio', default=True,action='store_true',help='Draw ratio plot') +parser.add_argument('--custom_x_range', help='Fix x axis range', action='store_true', default=False) +parser.add_argument('--x_axis_min', help='Fix x axis minimum', default=0.0) +parser.add_argument('--x_axis_max', help='Fix x axis maximum', default=1000.0) +parser.add_argument('--custom_y_range', help='Fix y axis range', action='store_true', default=False) +parser.add_argument('--y_axis_min', help='Fix y axis minimum', default=0.001) +parser.add_argument('--y_axis_max', help='Fix y axis maximum', default=100.0) +parser.add_argument('--log_y', action='store_true',help='Use log for y axis') +parser.add_argument('--log_x', action='store_true',help='Use log for x axis') +parser.add_argument('--extra_pad', help='Fraction of extra whitespace at top of plot',default=0.0) +parser.add_argument('--outname',default='',help='Optional string for start of output filename') args = parser.parse_args() mA = args.mA +mPhi = args.mPhi tb = args.tanb +r_ggH = args.r_ggH +r_bbH = args.r_bbH workspace = args.workspace mode = args.mode +manual_blind = args.manual_blind +auto_blind = args.auto_blind +soverb_plot = args.soverb_plot +auto_blind_check_only = args.auto_blind_check_only x_blind_min = args.x_blind_min x_blind_max = args.x_blind_max +extra_pad = float(args.extra_pad) +custom_x_range = args.custom_x_range +custom_y_range = args.custom_y_range +x_axis_min = float(args.x_axis_min) +x_axis_max = float(args.x_axis_max) +y_axis_min = float(args.y_axis_min) +y_axis_max = float(args.y_axis_max) +model_dep = args.model_dep +log_y=args.log_y +log_x=args.log_x +outname=args.outname + '_' if args.dir and args.file and not args.postfitshapes: print 'Provide either directory or filename, not both' @@ -76,21 +111,40 @@ def createAxisHists(n,src,xmin=0,xmax=499): print 'Provide directory when running with --postfitshapes option' sys.exit(1) +if manual_blind and auto_blind : + print 'Pick one option for blinding strategy out of --manual_blind and --auto_blind' +#For now, force that one type of blinding is always included! When unblinding the below line will need to be removed +if not manual_blind and not auto_blind: manual_blind=True -if args.postfitshapes: +#If call to PostFitWithShapes is requested, this is performed here +if args.postfitshapes or soverb_plot: for root,dirnames,filenames in os.walk(args.dir): - for filename in fnmatch.filter(filenames, '*_mssm.txt'): + for filename in fnmatch.filter(filenames, '*.txt.cmb'): datacard_file = os.path.join(root,filename) for filename in fnmatch.filter(filenames, '*%(workspace)s.root'%vars()): workspace_file = os.path.join(root,filename) - shape_file=workspace_file.replace('.root','_shapes_%(mA)s_%(tb)s.root'%vars()) - - os.system('PostFitShapesFromWorkspace -d %(datacard_file)s -w %(workspace_file)s -o %(shape_file)s --print --freeze mA=%(mA)s,tanb=%(tb)s'%vars()) + if model_dep : + shape_file=workspace_file.replace('.root','_shapes_mA%(mA)s_tb%(tb)s.root'%vars()) + shape_file_name=filename.replace ('.root','_shapes_mA%(mA)s_tb%(tb)s.root'%vars()) + else : + shape_file=workspace_file.replace('.root','_shapes_mPhi%(mPhi)s_r_ggH%(r_ggH)s_r_bbH%(r_bbH)s.root'%vars()) + shape_file_name=filename.replace ('.root','_shapes_mPhi%(mPhi)s_r_ggH%(r_ggH)s_r_bbH%(r_bbH)s.root'%vars()) + + if model_dep is True : + print "using mA and tanb" + freeze = 'mA='+mA+',tanb='+tb + else: + freeze = 'MH='+mPhi+',r_ggH='+r_ggH+',r_bbH='+r_bbH + print 'PostFitShapesFromWorkspace -d %(datacard_file)s -w %(workspace_file)s -o %(shape_file)s --print --freeze %(freeze)s'%vars() + os.system('PostFitShapesFromWorkspace -d %(datacard_file)s -w %(workspace_file)s -o %(shape_file)s --print --freeze %(freeze)s'%vars()) +#Otherwise a shape file with a given naming convention is required if not args.postfitshapes: if args.dir: for root,dirnames,filenames in os.walk(args.dir): + if model_dep: filestring = '*_shapes_%(mA)s_%(tb)s.root'%vars() + else: filestring = '*_shapes_mPhi%(mPhi)s_r_ggH%(r_ggH)s_r_bbH%(r_bbH)s.root'%vars() for filename in fnmatch.filter(filenames, '*_shapes_%(mA)s_%(tb)s.root'%vars()): shape_file = os.path.join(root,filename) elif args.file: @@ -98,22 +152,47 @@ def createAxisHists(n,src,xmin=0,xmax=499): histo_file = ROOT.TFile(shape_file) - +#Store plotting information for different backgrounds background_schemes = {'mt':[backgroundComp("QCD", ["QCD"], ROOT.TColor.GetColor(250,202,255)),backgroundComp("t#bar{t}",["TT"],ROOT.TColor.GetColor(155,152,204)),backgroundComp("Electroweak",["VV","W"],ROOT.TColor.GetColor(222,90,106)),backgroundComp("Z#rightarrow#mu#mu",["ZL","ZJ"],ROOT.TColor.GetColor(100,192,232)),backgroundComp("Z#rightarrow#tau#tau",["ZTT"],ROOT.TColor.GetColor(248,206,104))], 'et':[backgroundComp("QCD", ["QCD"], ROOT.TColor.GetColor(250,202,255)),backgroundComp("t#bar{t}",["TT"],ROOT.TColor.GetColor(155,152,204)),backgroundComp("Electroweak",["VV","W"],ROOT.TColor.GetColor(222,90,106)),backgroundComp("Z#rightarrowee",["ZL","ZJ"],ROOT.TColor.GetColor(100,192,232)),backgroundComp("Z#rightarrow#tau#tau",["ZTT"],ROOT.TColor.GetColor(248,206,104))], 'tt':[backgroundComp("QCD", ["QCD"], ROOT.TColor.GetColor(250,202,255)),backgroundComp("t#bar{t}",["TT"],ROOT.TColor.GetColor(155,152,204)),backgroundComp("Electroweak",["VV","W"],ROOT.TColor.GetColor(222,90,106)),backgroundComp("Z#rightarrow#tau#tau",["ZTT","ZL","ZJ"],ROOT.TColor.GetColor(248,206,104))], 'em':[backgroundComp("Misidentified e/#mu", ["QCD"], ROOT.TColor.GetColor(250,202,255)),backgroundComp("t#bar{t}",["TT"],ROOT.TColor.GetColor(155,152,204)),backgroundComp("Electroweak",["VV","W"],ROOT.TColor.GetColor(222,90,106)),backgroundComp("Z#rightarrowll",["ZL","ZJ"],ROOT.TColor.GetColor(100,192,232)),backgroundComp("Z#rightarrow#tau#tau",["ZTT"],ROOT.TColor.GetColor(248,206,104))]} +#Extract relevent histograms from shape file [sighist,binname] = getHistogram(histo_file,'TotalSig') +if not model_dep: sighist_ggH = getHistogram(histo_file,'ggH')[0] +if not model_dep: sighist_bbH = getHistogram(histo_file,'bbH')[0] bkghist = getHistogram(histo_file,'TotalBkg')[0] -sighist.Scale(1.0,"width") -bkghist.Scale(1.0,"width") total_datahist = getHistogram(histo_file,"data_obs")[0] blind_datahist = total_datahist.Clone() +total_datahist.SetMarkerStyle(20) blind_datahist.SetMarkerStyle(20) -if(args.blind): +#If using in test automated blinding mode, build the s/root b histogram for the ratio +sighist_forratio = sighist.Clone() +sighist_forratio.SetName("sighist_forratio") +sighist_ggH_forratio = sighist_ggH.Clone() +sighist_ggH_forratio.SetName("sighist_ggH_forratio") +sighist_bbH_forratio = sighist_bbH.Clone() +sighist_bbH_forratio.SetName("sighist_bbH_forratio") +if soverb_plot: + for j in range(1,bkghist.GetNbinsX()+1): + if model_dep: + soverb = sighist.GetBinContent(j)/math.sqrt(bkghist.GetBinContent(j)) + sighist_forratio.SetBinContent(j,soverb) + sighist_forratio.SetBinError(j,0) + else: + soverb_ggH = sighist_ggH.GetBinContent(j)/math.sqrt(bkghist.GetBinContent(j)) + soverb_bbH = sighist_bbH.GetBinContent(j)/math.sqrt(bkghist.GetBinContent(j)) + sighist_ggH_forratio.SetBinContent(j,soverb_ggH) + sighist_ggH_forratio.SetBinError(j,0) + sighist_bbH_forratio.SetBinContent(j,soverb_bbH) + sighist_bbH_forratio.SetBinError(j,0) + + +#Blinding by hand using requested range, set to 200-4000 by default +if manual_blind or auto_blind_check_only: for i in range(0,total_datahist.GetNbinsX()): low_edge = total_datahist.GetBinLowEdge(i+1) high_edge = low_edge+total_datahist.GetBinWidth(i+1) @@ -121,11 +200,58 @@ def createAxisHists(n,src,xmin=0,xmax=499): blind_datahist.SetBinContent(i+1,0) blind_datahist.SetBinError(i+1,0) -blind_datahist.Scale(1.0,"width") +#Automated blinding based on s/root b on bin by bin basis +if auto_blind or auto_blind_check_only: + #Points for testing added by hand and chosen cross-sections are the exclusion from HIG-14-029 scaled by parton lumi. Values above 1 TeV are + #crudely extrapolated using the 1 TeV limit and a higher parton lumi factor + points=[300,500,700,900,1100,1500,2000,3200] + ggH_sigmas=[0.27,0.12,0.067,0.044,0.044,0.08,0.1,0.1] + bbH_sigmas=[0.23,0.12,0.088,0.059,0.059,0.08,0.1,0.1] + for root,dirnames,filenames in os.walk(args.dir): + for filename in fnmatch.filter(filenames, '*.txt.cmb'): + datacard_file = os.path.join(root,filename) + for filename in fnmatch.filter(filenames, '*%(workspace)s.root'%vars()): + workspace_file = os.path.join(root,filename) + unblind_binlow_set=[] + unblind_binhigh_set=[] + for i,p in enumerate(points) : + shape_file=workspace_file.replace('.root','_shapes_mPhi'+str(p)+'.root') + freeze = 'MH='+str(p)+',r_ggH='+str(ggH_sigmas[i])+',r_bbH='+str(bbH_sigmas[i]) + print 'PostFitShapesFromWorkspace -d %(datacard_file)s -w %(workspace_file)s -o %(shape_file)s --freeze %(freeze)s'%vars() + os.system('PostFitShapesFromWorkspace -d %(datacard_file)s -w %(workspace_file)s -o %(shape_file)s --freeze %(freeze)s'%vars()) + + testhisto_file = ROOT.TFile(shape_file) + testsighist_ggH = getHistogram(testhisto_file,'ggH')[0] + testsighist_bbH = getHistogram(testhisto_file,'bbH')[0] + for j in range(1,bkghist.GetNbinsX()): + soverb_ggH = testsighist_ggH.GetBinContent(j)/math.sqrt(bkghist.GetBinContent(j)) + soverb_bbH = testsighist_bbH.GetBinContent(j)/math.sqrt(bkghist.GetBinContent(j)) + if soverb_ggH > 0.5 or soverb_bbH > 0.5: + if not auto_blind_check_only: blind_datahist.SetBinContent(j,0) + if not auto_blind_check_only: blind_datahist.SetBinError(j,0) + unblind_binlow_set.append(blind_datahist.GetBinLowEdge(j)) + unblind_binhigh_set.append(blind_datahist.GetBinLowEdge(j+1)) + if auto_blind_check_only: + binlow_list = sorted(set(unblind_binlow_set)) + binhigh_list = sorted(set(unblind_binhigh_set)) + print "Auto blinding algorithm would blind the following bins: " + for h in range(0,len(binlow_list)): + print binlow_list[h], "-", binhigh_list[h] + print "For this pass, applying manual blinding. Disable option --auto_blind_check_only to apply this automatic blinding" + +#Normalise by bin width except in soverb_plot mode +if not soverb_plot: + blind_datahist.Scale(1.0,"width") + total_datahist.Scale(1.0,"width") + sighist.Scale(1.0,"width") + if not model_dep: sighist_ggH.Scale(1.0,"width") + if not model_dep: sighist_bbH.Scale(1.0,"width") + bkghist.Scale(1.0,"width") channel=binname[4:6] +#Create stacked plot for the backgrounds bkg_histos = [] for i,t in enumerate(background_schemes[channel]): plots = t['plot_list'] @@ -139,7 +265,7 @@ def createAxisHists(n,src,xmin=0,xmax=499): h.SetFillColor(t['colour']) h.SetLineColor(ROOT.kBlack) h.SetMarkerSize(0) - h.Scale(1.0,"width") + if not soverb_plot: h.Scale(1.0,"width") bkg_histos.append(h) stack = ROOT.THStack("hs","") @@ -147,67 +273,140 @@ def createAxisHists(n,src,xmin=0,xmax=499): stack.Add(hists) - - -c2 = ROOT.TCanvas("c2","c2",0,0,800,800) +#Setup style related things +plot.ModTDRStyle(r=0.06, l=0.12) +c2 = ROOT.TCanvas() c2.cd() if args.ratio: pads=plot.TwoPadSplit(0.29,0.005,0.005) else: pads=plot.OnePad() pads[0].cd() -pads[0].SetLogy(1) +if(log_y): pads[0].SetLogy(1) +if(log_x): pads[0].SetLogx(1) if args.ratio: - axish = createAxisHists(2,bkghist,0,999) + if(log_x): pads[1].SetLogx(1) + axish = createAxisHists(2,bkghist,bkghist.GetXaxis().GetXmin(),bkghist.GetXaxis().GetXmax()) axish[1].GetXaxis().SetTitle("m_{#tau#tau} (GeV)") axish[1].GetYaxis().SetNdivisions(4) - axish[1].GetYaxis().SetTitle("Obs/Exp") + if not soverb_plot: axish[1].GetYaxis().SetTitle("Obs/Exp") + else: axish[1].GetYaxis().SetTitle("S/#sqrt(B)") + axish[0].GetXaxis().SetTitleSize(0) + axish[0].GetXaxis().SetLabelSize(0) + axish[0].GetYaxis().SetTitleOffset(1.4) + axish[1].GetYaxis().SetTitleOffset(1.4) + if custom_x_range: + axish[0].GetXaxis().SetRangeUser(x_axis_min,x_axis_max) + axish[1].GetXaxis().SetRangeUser(x_axis_min,x_axis_max) + if custom_y_range: + axish[0].GetYaxis().SetRangeUser(y_axis_min,y_axis_max) + axish[1].GetYaxis().SetRangeUser(y_axis_min,y_axis_max) else: - axish = createAxisHists(1,bkghist,0,999) -axish[0].GetYaxis().SetTitle("dN/dM_{#tau#tau} (1/GeV)") + axish = createAxisHists(1,bkghist,bkghist.GetXaxis().GetXmin(),bkghist.GetXaxis().GetXmax()) + axish[0].GetYaxis().SetTitleOffset(1.4) + if custom_x_range: + axish[0].GetXaxis().SetRangeUser(x_axis_min,x_axis_max) + if custom_y_range: + axish[0].GetYaxis().SetRangeUser(y_axis_min,y_axis_max) +if not soverb_plot: axish[0].GetYaxis().SetTitle("dN/dM_{#tau#tau} (1/GeV)") +else: axish[0].GetYaxis().SetTitle("Events") axish[0].GetXaxis().SetTitle("m_{#tau#tau} (GeV)") -axish[0].SetMaximum(1.2*bkghist.GetMaximum()) +axish[0].SetMaximum(extra_pad*bkghist.GetMaximum()) axish[0].SetMinimum(0.0009) axish[0].Draw() + +#Draw uncertainty band bkghist.SetFillColor(plot.CreateTransparentColor(12,0.4)) bkghist.SetMarkerSize(0) stack.Draw("histsame") bkghist.Draw("e2same") -sighist.SetLineColor(ROOT.kRed) -sighist.Draw("histsame") -blind_datahist.DrawCopy("psame") +#Add signal, either model dependent or independent +if model_dep is True: + sighist.SetLineColor(ROOT.kRed) + sighist.Draw("histsame") +else: + sighist_ggH.SetLineColor(ROOT.kBlue) + sighist_bbH.SetLineColor(ROOT.kBlue + 3) + sighist_ggH.Draw("histsame") + sighist_bbH.Draw("histsame") +if not soverb_plot: blind_datahist.DrawCopy("psame") axish[0].Draw("axissame") -legend = plot.PositionedLegend(0.20,0.20,3,0.03) +#Setup legend +legend = plot.PositionedLegend(0.30,0.30,3,0.03) legend.SetTextFont(42) -legend.AddEntry(sighist,"H,h,A#rightarrow#tau#tau"%vars(),"l") +legend.SetTextSize(0.025) +if model_dep is True: + legend.AddEntry(sighist,"H,h,A#rightarrow#tau#tau"%vars(),"l") +else: + legend.AddEntry(sighist_ggH,"gg#phi#rightarrow#tau#tau"%vars(),"l") + legend.AddEntry(sighist_bbH,"bb#phi#rightarrow#tau#tau"%vars(),"l") legend.SetFillColor(0) for legi,hists in enumerate(bkg_histos): legend.AddEntry(hists,background_schemes[channel][legi]['leg_text'],"f") legend.AddEntry(bkghist,"Background uncertainty","f") -legend.AddEntry(blind_datahist,"Observation","P") +if not soverb_plot: legend.AddEntry(blind_datahist,"Observation","P") legend.Draw("same") latex = ROOT.TLatex() latex.SetNDC() latex.SetTextAngle(0) latex.SetTextColor(ROOT.kBlack) -latex.SetTextSize(0.023) -latex.DrawLatex(0.63,0.63,"m_{h}^{mod+}, m_{A}=%(mA)s GeV, tan#beta=%(tb)s"%vars()) - -if args.ratio: +latex.SetTextSize(0.026) +if model_dep is True: + latex.DrawLatex(0.61,0.53,"m_{h}^{mod+}, m_{A}=%(mA)s GeV, tan#beta=%(tb)s"%vars()) +else: + latex.DrawLatex(0.61,0.53,"#sigma(gg#phi)=%(r_ggH)s pb,#sigma(bb#phi)=%(r_bbH)s pb"%vars()) + +#CMS and lumi labels +plot.FixTopRange(pads[0], plot.GetPadYMax(pads[0]), extra_pad if extra_pad>0 else 0.15) +plot.DrawCMSLogo(pads[0], 'CMS', 'Preliminary', 11, 0.045, 0.05, 1.0, '', 1.0) +plot.DrawTitle(pads[0], "2.2 fb^{-1} (13 TeV)", 3); + +#Add ratio plot if required +if args.ratio and not soverb_plot: + total_datahist.Divide(bkghist) blind_datahist.Divide(bkghist) - blind_datahist.SetFillColor(plot.CreateTransparentColor(12,0.4)) + total_datahist.SetFillColor(plot.CreateTransparentColor(12,0.4)) pads[1].cd() pads[1].SetGrid(0,1) axish[1].Draw("axis") axish[1].SetMinimum(0.7) axish[1].SetMaximum(1.3) - blind_datahist.Draw("e2psame") + total_datahist.SetMarkerSize(0) + total_datahist.Draw("e2same") + blind_datahist.DrawCopy("psame") + +if soverb_plot: + pads[1].cd() + pads[1].SetGrid(0,1) + axish[1].Draw("axis") + axish[1].SetMinimum(0) + axish[1].SetMaximum(2) + if model_dep: + sighist_forratio.SetLineColor(2) + sighist_forratio.Draw("same") + else: + sighist_ggH_forratio.SetLineColor(ROOT.kRed) + sighist_ggH_forratio.Draw("same") + sighist_bbH_forratio.SetLineColor(ROOT.kRed+3) + sighist_bbH_forratio.Draw("same") -outname = shape_file.replace(".root","_%(mode)s.pdf"%vars()) -c2.SaveAs("%(outname)s"%vars()) +pads[0].cd() +pads[0].GetFrame().Draw() +pads[0].RedrawAxis() + + +#Save as png and pdf with some semi sensible filename +shape_file_name = shape_file_name.replace(".root","_%(mode)s"%vars()) +shape_file_name = shape_file_name.replace("_shapes","") +outname += shape_file_name +if soverb_plot : outname+="_soverb" +if(log_y): outname+="_logy" +if(log_x): outname+="_logx" +c2.SaveAs("%(outname)s.png"%vars()) +c2.SaveAs("%(outname)s.pdf"%vars())