3 # Submission script for GridEngine (GE). Each job will
4 # be executed via the jobScript.sh
5 # This jobScript supports up to 7 parameters. Edit
6 # the user specific part of the script according to
9 # Input to the script is a filelist with 1 file per line.
10 # For each file a job is started. With the parameter
11 # nFilesPerJob a comma separated filelist will be
12 # generated and handed to the job script. This feature
13 # is usefull when running many small jobs. Each
14 # job has its own logfile. All needed directories for the
15 # logfiles will be created if non existing.
17 # IMPORTANT: the hera/prometheus cluster jobs will only
18 # see the /hera file system. All needed scripts, programs
19 # and parameters have to be located on /hera or the job
20 # will crash. This script syncs your working dir to the submission
21 # dir on /hera . Make sure your scripts use the submission dir!
22 # Software should be taken from /cvmfs/hades.gsi.de/install/
24 # job log files will be named like inputfiles. If nFilesPerJob > 1
25 # the log files will contain the partnumber.
27 ######################################################################
31 currentDir=$(pwd | xargs -i basename {})
32 currentDir=../$currentDir
35 submmissionbase=/lustre/nyx/hades/user/${user}/sub/apr12
36 submissiondir=${submmissionbase}/dstreal
37 nFilesPerJob=1 # number of files to be analyzed by 1 job (default==1)
38 jobscript=${submissiondir}/jobScript_SL.sh # exec script (full path, call without dot, set it executable!)
39 outputdir=/lustre/nyx/hades/user/${user}/dst/apr12/gen8/$day # outputdir for files AND logFiles
40 pathoutputlog=${outputdir}/out # protocol from batch farm for each file
41 filename=testrun # filename of log file if nFilesPerJob > 1 (partnumber will be appended)
42 par1=/cvmfs/hades.gsi.de/install/5.34.34/hydra2-4.m/defall.sh # optional par1 : environment script
43 par2=${submissiondir}/analysisDST # optional par2 : executable
44 par3="" # optional par3 : input file list
45 par4=${outputdir} # optional par4 : outputfile (part number will be appended (_num.root))
46 par5=1000000 # optional par5 : number of events
47 par6="no" # optional par6
48 par7="no" # optional par7
49 resources="--mem=2000 --time=0-10:00:00" # runtime < 10h, mem < 2GB
51 jobarrayFile="gen8_day_${day}_jobarray.dat"
53 filelist=day_${day}_nyx.list # file list in local dir! not in submissiondir!!!
57 ######################################################################
60 nFiles=$( cat $filelist | wc -l)
62 #---------------------------------------------------------------------
64 if [ ! -d $submmissionbase ]
66 echo "===> CREATE SUBMISSIONBASEDIR : $submmissionbase"
67 mkdir -p $submmissionbase
69 echo "===> USE SUBMISSIONBASEDIR : $submmissionbase"
72 #---------------------------------------------------------------------
75 if [ ! -d $pathoutputlog ]
77 echo "===> CREATE LOGDIR : $pathoutputlog"
78 mkdir -p $pathoutputlog
80 echo "===> USE LOGDIR : $pathoutputlog"
83 if [ ! -d $outputdir ]
85 echo "===> CREATE OUTPUTDIR : $outputdir"
88 echo "===> USE OUTPUTDIR : $outputdir"
92 if [ ! -d $outputdir/crash ]
94 echo "===> CREATE CRASHDIR : $outputdir/crash"
95 mkdir -p $outputdir/crash
98 if [ ! -d $outputdir/root ]
100 echo "===> CREATE ROOTDIR : $outputdir/root"
101 mkdir -p $outputdir/root
104 if [ ! -d $outputdir/filter ]
106 echo "===> CREATE FILTERDIR : $outputdir/filter"
107 mkdir -p $outputdir/filter
110 if [ ! -d $outputdir/qa ]
112 echo "===> CREATE QADIR : $outputdir/qa"
113 mkdir -p $outputdir/qa
116 #---------------------------------------------------------------------
121 ctF=0 # counter for file number
122 ctJ=0 # counter for job number
123 partNumber=0 # counter for part number
126 if [ -f $jobarrayFile ]
131 echo "===> CREATING JOB ARRAY FILE"
132 #---------------------------------------------------------------------
133 # read the files list into an job array
136 for file in $(cat $filelist)
141 #---------------------------------------------------------------------
144 #---------------------------------------------------------------------
145 # loop over the job array and submit parts with
148 while ((ctF<$nFiles))
150 #---------------------------------------------------------------------
151 # build comma separated file list
153 if [ $nFilesPerJob -gt 1 ]
155 infileList=${jobarray[${ctF}]}
157 for (( ctList=1;ctList<$nFilesPerJob; ctList++ ))
159 if [ $ctF -lt ${nFiles} ]
161 infileList="${infileList},${jobarray[${ctF}]}"
166 infileList=${jobarray[${ctF}]}
169 #---------------------------------------------------------------------
173 logfile="${pathoutputlog}/${filename}_${partNumber}.log"
175 if [ $nFilesPerJob -eq 1 ]
177 file=$(basename ${infileList})
178 logfile="${pathoutputlog}/${file}.log"
186 ######################################################################
187 # SEND NEW JOB (USER SPECIFIC)
191 # defall.sh prog filelist outdir nev
192 echo "${par1} ${par2} ${par3} ${par4} ${par5} ${par6} ${par7}" >> $jobarrayFile
195 ######################################################################
198 #---------------------------------------------------------------------
200 #---------------------------------------------------------------------
201 # sync the local modified stuff
202 # to the submission dir
203 echo "===> SYNC CURENTDIR TO SUBMISSIONDIR : rsync -vHaz $currentDir ${submmissionbase}"
204 rsync -vHaz $currentDir ${submmissionbase}/
208 if [ ! $syncStat -eq 0 ]
210 echo "===> ERROR : SYNCHRONIZATION ENCOUNTERED PROBLEMS"
213 echo "-------------------------------------------------"
215 nFiles=$( cat $jobarrayFile | wc -l)
219 while ((${ctsend} * ${block} < ${nFiles}))
221 ((start=${ctsend}*${block}))
222 ((rest=${nFiles}-${start}))
223 if [ $rest -le $block ]
229 ((arrayoffset=${ctsend} * ${block}))
230 command="--array=1-${stop} ${resources} -D ${submissiondir} --output=${pathoutputlog}/slurm-%A_%a.out ${jobscript} ${submissiondir}/${jobarrayFile} ${pathoutputlog} ${arrayoffset}"
237 echo "${nFiles} jobs for day ${day} submitted"