-
Notifications
You must be signed in to change notification settings - Fork 1
/
HHWWgg_Run_Jobs.sh
executable file
·195 lines (165 loc) · 6.52 KB
/
HHWWgg_Run_Jobs.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
#!/bin/sh
############################################################################################################################################################################################
#
# Abe Tishelman-Charny
# 12 December 2019
#
# The purpose of this script is the run fggrunjobs with the HHWWgg tagger on data, signal, or background.
#
# Example Usage:
#
## -- workspaces
# . HHWWgg_Run_Jobs.sh --labelName HHWWgg_v2-6_Workspaces_X600 --nEvents all --json Taggers/test/HHWWgg_v2-6/HHWWgg_v2-6_X600.json --condorQueue microcentury -g -s -w
#
## -- all final state variable branches with cut flow
# . HHWWgg_Run_Jobs.sh --labelName HHWWgg_v2-6_Trees --nEvents all --json Taggers/test/HHWWgg_v2-6/HHWWgg_v2-6.json --condorQueue microcentury -g -c -v -t
# . HHWWgg_Run_Jobs.sh --labelName HHWWgg_fggBackgrounds_v2_1_oneDY --nEvents all --json Taggers/test/Era2017_RR-31Mar2018_v2_1_oneDY.json --condorQueue microcentury -g -c -v -t
# . HHWWgg_Run_Jobs.sh --labelName HHWWgg_v2-3_CutFlow_SM_CutFlow --nEvents all --json Taggers/test/HHWWgg_v2-3/HHWWgg_v2-3_SM.json --condorQueue longlunch -g -c -t
# . HHWWgg_Run_Jobs.sh --labelName HHWWgg_fggBkgs_1_DataMC --nEvents all --json Taggers/test/Era2017_RR-31Mar2018_v2_1.json --condorQueue longlunch -g -c -v -t
# . HHWWgg_Run_Jobs.sh --labelName HHWWgg_2017Data_Again --nEvents all --json Taggers/test/HHWWgg_2017_Data_All/HHWWgg_Data_All_2017.json --condorQueue microcentury -g -c -v -t
# . HHWWgg_Run_Jobs.sh --labelName HHWWgg_fggBackgrounds_v2_3 --nEvents all --json Taggers/test/Era2017_RR-31Mar2018_v2_3.json --condorQueue longlunch -g -c -v -t
###########################################################################################################################################################################################
## Perform these steps before running:
# cmsenv
# voms
# cp /tmp/MYPROXY ~/
# export X509_USER_PROXY=~/MYPROXY
## User specific variables. Customize to your own working area(s)
fggDirec="/afs/cern.ch/work/c/chuw/NewCheck/CMSSW_10_5_0/src/flashgg/" # flashgg directory
ntupleDirec="/afs/cern.ch/work/c/chuw/NewCheck/CMSSW_10_5_0/src/flashgg/Samples/" # condor output directory
## Other script parameters
label="" # name for condor output directory in ntupleDirec
numEvents="" # integer, or 'all' to run on all events
runWorkspaceStd="false" # use Systematics/test/workspaceStd.py as config
doCutFlow="false" # perform HHWWgg cutflow within workspaceStd.py workflow
saveHHWWggFinalStateVars="false" # save extra variables
runttH="false" # run on ttH background sample only
calcSystematics="false" # run workspaceStd.py systematics
dumpTrees="false" # dump trees in fggrunjobs output
dumpWorkspaces="false" # dump workspaces in fggrunjobs output
dryRun="false" # do not submit jobs
jsonpath="" # optional local json file to use for fggrunjobs arguments such as dataset and campaign
condorQueue="tomorrow"
## Get user specified argumenets
options=$(getopt -o gcvstwr --long nEvents: --long labelName: --long json: --long condorQueue: -- "$@") # end name with colon ':' to specify argument string
[ $? -eq 0 ] || {
echo "Incorrect option provided"
exit 1
}
eval set -- "$options"
while true; do
case "$1" in
#-s)
# runSignal="true"
# ;;
# -d) runData="true" ;;
-g) runWorkspaceStd="true" ;;
-c) doCutFlow="true" ;;
-v) saveHHWWggFinalStateVars="true" ;;
-s) calcSystematics="true" ;;
-t) dumpTrees="true" ;;
-w) dumpWorkspaces="true" ;;
-r) dryRun="true" ;;
--nEvents) shift; numEvents=$1 ;;
--labelName) shift; label=$1 ;;
--json) shift; jsonpath=$1 ;;
--condorQueue) shift; condorQueue=$1 ;;
--)
shift
break
;;
esac
shift
done
## Output read arguments to user
echo "label = $label"
echo "numEvents = $numEvents"
echo "runWorkspaceStd = $runWorkspaceStd"
echo "jsonpath = $jsonpath"
echo "calcSystematics = $calcSystematics"
echo "dumpTrees = $dumpTrees"
echo "dumpWorkspaces = $dumpWorkspaces"
## Make sure numEvents and label arguments are specified. These are compulsory
if [ -z "$numEvents" ]
then
echo ""
echo "Please enter a number of events with the --nEvents flag "
echo "exiting"
return
fi
if [ $numEvents == "all" ]
then
echo "MaxEvents = -1"
numEvents=-1
fi
if [ -z "$label" ]
then
echo "Please enter a directory name with the --labelName flag"
echo "exiting"
return
fi
## Make sure a json file is specified
# if [ $runData == 'false' ] && [ $runSignal == 'false' ] && [ $jsonpath == '' ]
if [ $jsonpath == '' ]
then
echo "Please specify a json path with --jsonpath <json_path>"
echo "exiting"
return
fi
## Set variables to user inputs
output_direc=$label
# Make output directories if they don't exist
mkdir -p $output_direc;
mkdir -p $ntupleDirec$output_direc;
root_file_output=$ntupleDirec
root_file_output+=$output_direc
## Run HHWWgg Tagger with Systematics/test/workspaceStd.py
if [ $runWorkspaceStd == 'true' ]
then
echo "Submitting jobs with Systematics/test/workspaceStd.py as cmssw config"
command='fggRunJobs.py --load '
command+=$jsonpath
command+=' -D -P -n 500 -d '
command+=$output_direc
command+=" --stage-to="$root_file_output
command+=' -x cmsRun Systematics/test/workspaceStd.py maxEvents=' # workspaceStd.py
command+=$numEvents
command+=' -q microcentury --no-use-tarball --no-copy-proxy metaConditions='
command+=$fggDirec
command+='MetaData/data/MetaConditions/Era2017_RR-31Mar2018_v1.json '
command+=' doHHWWggFLTag=True HHWWggFLTagsOnly=True '
if [ $calcSystematics == 'true' ]
then
command+=' doSystematics=True '
else
command+='doSystematics=False '
fi
if [ $dumpTrees == 'true' ]
then
command+=' dumpTrees=True '
else
command+=' dumpTrees=False '
fi
if [ $dumpWorkspaces == 'true' ]
then
command+=' dumpWorkspace=True '
else
command+=' dumpWorkspace=False '
fi
echo "dryRun: $dryRun"
if [ $dryRun == 'true' ]
then
command+=' dryRun=1 '
fi
if [ $doCutFlow == 'true' ]
then
command+=' doHHWWggFLTagCutFlow=1 '
fi
if [ $saveHHWWggFinalStateVars == 'true' ]
then
command+=' saveHHWWggFinalStateVars=1'
fi
fi
echo "Evaluating command: $command"
eval "$command"
echo "Finished job for file: $jsonpath"