Skip to content
Snippets Groups Projects
Commit 6a10d1c1 authored by Patrick L.S. Connor's avatar Patrick L.S. Connor
Browse files

first commit, ready to submit FastSim but debugging FullSim

parents
No related branches found
No related tags found
No related merge requests found
*/
../env
\ No newline at end of file
../job
\ No newline at end of file
../parallel
\ No newline at end of file
Fast/run 0 → 100755
#!/bin/zsh
nevents=1000
id=$1
cd $id
cfi=TTbar_13TeV_TuneCUETP8M1_cfi
cmsDriver.py $cfi --conditions auto:run2_mc --fast -n $nevents --era Run2_2016 --eventcontent FEVTDEBUGHLT,DQM --relval 100000,1000 -s GEN,SIM,RECOBEFMIX,DIGI:pdigi_valid,L1,DIGI2RAW,L1Reco,RECO,EI,VALIDATION:@standardValidation,DQM:@standardDQM --datatier GEN-SIM-DIGI-RECO,DQMIO --beamspot Realistic50ns13TeVCollision --fileout file:step1.root --no_exec
py=${cfi}_GEN_SIM_RECOBEFMIX_DIGI_L1_DIGI2RAW_L1Reco_RECO_EI_VALIDATION_DQM.py
echo "process.RandomNumberGeneratorService.generator.initialSeed = cms.untracked.uint32($id)" >> $py
cmsRun $py
#cmsDriver.py step2 --conditions auto:run2_mc --scenario pp --fast --era Run2_2016 -s HARVESTING:validationHarvesting --filetype DQM --mc -n $nevents --filein file:step1_inDQM.root --fileout file:step2.root #> step2_TTbar_13+TTbarFS_13+HARVESTUP15FS+MINIAODMCUP15FS.log 2>&1
cmsDriver.py step3 --conditions auto:run2_mc --fast -n $nevents --era Run2_2016 --eventcontent AODSIM,MINIAODSIM --runUnscheduled --filein file:step1.root -s PAT --datatier AODSIM,MINIAODSIM --mc --fileout file:step3.root #> step3_TTbar_13+TTbarFS_13+HARVESTUP15FS+MINIAODMCUP15FS.log 2>&1
../submit
\ No newline at end of file
*/
../env
\ No newline at end of file
../job
\ No newline at end of file
../parallel
\ No newline at end of file
Full/run 0 → 100755
#!/bin/zsh
nevents=$1
id=$2
cd $id
cfi=TTbar_13TeV_TuneCUETP8M1_cfi
cmsDriver.py $cfi --conditions auto:run2_mc -n $nevents --era Run2_2016 --eventcontent FEVTDEBUG --relval 9000,50 -s GEN,SIM --datatier GEN-SIM --beamspot Realistic50ns13TeVCollision --fileout file:step1.root --no_exec #> step1_TTbar_13+TTbar_13+DIGIUP15+RECOUP15+HARVESTUP15+ALCATTUP15.log 2>&1
py=${cfi}_GEN_SIM.py
ls $py
echo "process.RandomNumberGeneratorService.generator.initialSeed = cms.untracked.uint32($id)" >> $py
cmsRun $py
cmsDriver.py step2 --conditions auto:run2_mc -s DIGI:pdigi_valid,L1,DIGI2RAW,HLT:@relval2016 --datatier GEN-SIM-DIGI-RAW-HLTDEBUG -n $nevents --era Run2_2016 --eventcontent FEVTDEBUGHLT --filein file:step1.root --fileout file:step2.root #> step2_TTbar_13+TTbar_13+DIGIUP15+RECOUP15+HARVESTUP15+ALCATTUP15.log 2>&1
cmsDriver.py step3 --runUnscheduled --conditions auto:run2_mc -s RAW2DIGI,L1Reco,RECO,RECOSIM,EI,PAT,VALIDATION:@standardValidation+@miniAODValidation,DQM:@standardDQM+@ExtraHLT+@miniAODDQM --datatier GEN-SIM-RECO,AODSIM,MINIAODSIM,DQMIO -n $nevents --era Run2_2016 --eventcontent RECOSIM,AODSIM,MINIAODSIM,DQM --filein file:step2.root --fileout file:step3.root #> step3_TTbar_13+TTbar_13+DIGIUP15+RECOUP15+HARVESTUP15+ALCATTUP15.log 2>&1
../submit
\ No newline at end of file
# FullSim & FastSim dataset production
This repository is a setup to produce CMS MINIAODSIM datasets using either `FullSim` or `FastSim`.
## First installation
At DESY, you should load the CMSSW modules:
```
module use -a /afs/desy.de/group/cms/modulefiles/
module load cmssw
```
Then install `CMSSW_10_6_22`:
```
cmsrel CMSSW_10_6_22
```
Finally clone this repository in `$CMSSW_BASE/src`.
## Execution
Load the environement (necessary each time you open a new shell):
```
source init
```
Since only the options of the `cmsDriver.py` are changing, the common scripts are present in the root directory and are accessible via soft links in the two subdirectories `Fast` and `Full`.
Choose any of the two directories depending on what you want to generate; the instructions are then identical.
### A first simple test
To run a small test, use the (local) `run` script.
```
./run 42
```
The option corresponds to the job id *and* to the seed of the random generator.
If it works without any problem (can take up to a few minutes), then you can proceed to the next step.
### Running parallel locally
One can run on all cores of the machines with `parallel`: this uses the cores of the machines without submitting any job.
Don't run more jobs than the number of cores (which you can get with `nproc`).
This cannot really be used for large scale production, but can be run for testing without submitting jobs on Condor.
Just do:
```
./parallel
```
No option is necessary.
You can change the number of events
This approach can be useful to ensure that different seed are used for each job (the second option of `run`, which we previously ignored).
Always check the occupancy of the local machine with `htop`, and don't go for this option if too many people are on this machine.
### Large scale submission on the cluster
Similar, just another script:
```
./submit
```
This should be the privileged approach for large-scale production.
#### Troubleshooting
Check the status of your jobs:
```
condor_q
```
If your job is on hold and you want to know more:
```
condor_q -global -better-analyze JOBID
```
If you want to kill all your jobs:
```
condor_rm -all
```
env 0 → 100755
#!/bin/zsh
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH_STORED
export PATH=$PATH
echo "$*"
eval "$*"
echo Done
init 0 → 100755
#!/bin/zsh
module use -a /afs/desy.de/group/cms/modulefiles/
module load cmssw
cmsenv
job 0 → 100644
getenv = True
executable = env
arguments = "./run $(ProcId)"
transfer_executable = False
universe = vanilla
output = $(ProcId)/out
error = $(ProcId)/err
log = $(ProcId)/log
queue $ENV(NJOBS)
parallel 0 → 100755
#!/bin/zsh
export NJOBS=2
for i in {1..$NJOBS}
do
rm -rf $i
mkdir $i
./run $i &
done
wait
submit 0 → 100755
#!/bin/zsh
eval `/usr/bin/modulecmd zsh use -a /afs/desy.de/group/cms/modulefiles/`
eval `/usr/bin/modulecmd zsh load cmssw`
eval `scramv1 runtime -sh`
export LD_LIBRARY_PATH_STORED=$LD_LIBRARY_PATH
export NJOBS=1000
for i in {1..$NJOBS}
do
rm -rf $i
mkdir $i
done
condor_submit job
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment