[3520] | 1 | ##################################################### |
---|
| 2 | # Author : Simona Flavoni for NEMO |
---|
| 3 | # Contact : sflod@locean-ipsl.upmc.fr |
---|
| 4 | # |
---|
| 5 | # ---------------------------------------------------------------------- |
---|
| 6 | # NEMO/SETTE , NEMO Consortium (2010) |
---|
| 7 | # Software governed by the CeCILL licence (NEMOGCM/NEMO_CeCILL.txt) |
---|
| 8 | # ---------------------------------------------------------------------- |
---|
| 9 | # |
---|
| 10 | # Some scripts called by sette.sh |
---|
| 11 | # prepare_job.sh : creates the job script for running job |
---|
| 12 | ###################################################### |
---|
[3661] | 13 | #set -vx |
---|
[3520] | 14 | set -o posix |
---|
| 15 | #set -u |
---|
| 16 | #set -e |
---|
[3661] | 17 | #+ |
---|
[3520] | 18 | # |
---|
| 19 | # ================ |
---|
| 20 | # prepare_job.sh |
---|
| 21 | # ================ |
---|
| 22 | # |
---|
| 23 | # ------------------------------------------------- |
---|
| 24 | # script that creates the job script for NEMO tests |
---|
| 25 | # ------------------------------------------------- |
---|
| 26 | # |
---|
| 27 | # SYNOPSIS |
---|
| 28 | # ======== |
---|
| 29 | # |
---|
| 30 | # :: |
---|
| 31 | # |
---|
[3661] | 32 | # $ ./prepare_job.sh INPUT_FILE_CONFIG_NAME NUMBER_PROC TEST_NAME MPI_FLAG JOB_FILE |
---|
[3520] | 33 | # |
---|
| 34 | # |
---|
| 35 | # DESCRIPTION |
---|
| 36 | # =========== |
---|
| 37 | # |
---|
[3661] | 38 | # Part of the SETTE package to run tests for NEMO |
---|
[3520] | 39 | # |
---|
[3661] | 40 | # prepare the script $JOB_FILE to run the tests |
---|
[3520] | 41 | # |
---|
| 42 | # EXAMPLES |
---|
| 43 | # ======== |
---|
| 44 | # |
---|
| 45 | # :: |
---|
| 46 | # |
---|
[3661] | 47 | # $ ./prepare_job.sh INPUT_FILE_CONFIG_NAME NUMBER_PROC TEST_NAME MPI_FLAG $JOB_FILE |
---|
[3520] | 48 | # |
---|
[3661] | 49 | # prepare the $JOB_FILE for execution |
---|
[3520] | 50 | # |
---|
| 51 | # |
---|
| 52 | # TODO |
---|
| 53 | # ==== |
---|
| 54 | # |
---|
| 55 | # option debug |
---|
| 56 | # |
---|
| 57 | # |
---|
| 58 | # EVOLUTIONS |
---|
| 59 | # ========== |
---|
| 60 | # |
---|
[3661] | 61 | # $Id: prepare_job.sh 3050 2011-11-07 14:11:34Z acc $ |
---|
[3520] | 62 | # |
---|
| 63 | # |
---|
| 64 | # |
---|
| 65 | # * creation |
---|
| 66 | # |
---|
| 67 | #- |
---|
| 68 | # |
---|
| 69 | |
---|
[7753] | 70 | usage=" Usage : ./prepare_job.sh INPUT_FILE_CONFIG_NAME NUMBER_PROC TEST_NAME MPI_FLAG JOB_FILE NUM_XIO_SERVERS" |
---|
[9663] | 71 | usage=" example : ./prepare_job.sh input_ORCA2_ICE_PISCES.cfg 8 SHORT no/yes $JOB_FILE 0 2" |
---|
[3520] | 72 | |
---|
| 73 | |
---|
[4245] | 74 | minargcount=6 |
---|
[3520] | 75 | if [ ${#} -lt ${minargcount} ] |
---|
| 76 | then |
---|
[3661] | 77 | echo "not enough arguments for prepare_job.sh script" |
---|
| 78 | echo "control number of argument of prepare_job.sh in sette.sh" |
---|
[3520] | 79 | echo "${usage}" |
---|
| 80 | exit 1 |
---|
| 81 | fi |
---|
| 82 | unset minargcount |
---|
| 83 | if [ ! -f ${SETTE_DIR}/output.sette ] ; then |
---|
| 84 | touch ${SETTE_DIR}/output.sette |
---|
| 85 | fi |
---|
| 86 | |
---|
| 87 | # |
---|
| 88 | # set and export TEST_NAME. It will be used within the post_test_tidyup function |
---|
| 89 | # |
---|
| 90 | INPUTARFILE=$1 |
---|
| 91 | NB_PROC=$2 |
---|
| 92 | TEST_NAME=$3 |
---|
| 93 | MPI_FLAG=$4 |
---|
| 94 | JOB_FILE=$5 |
---|
[4245] | 95 | NXIO_PROC=$6 |
---|
[11161] | 96 | NEMO_VALID=$7 |
---|
[3520] | 97 | |
---|
| 98 | # export EXE_DIR. This directory is used to execute model |
---|
| 99 | # |
---|
| 100 | # |
---|
| 101 | # |
---|
| 102 | echo "date: `date`" >> ${SETTE_DIR}/output.sette |
---|
| 103 | echo "" >> ${SETTE_DIR}/output.sette |
---|
| 104 | echo "running config: ${NEW_CONF}" >> ${SETTE_DIR}/output.sette |
---|
| 105 | echo "" >> ${SETTE_DIR}/output.sette |
---|
| 106 | echo "list of cpp_keys: " >> ${SETTE_DIR}/output.sette |
---|
[7715] | 107 | echo "`more ${CONFIG_DIR}/${NEW_CONF}/cpp_${NEW_CONF}.fcm`" >> ${SETTE_DIR}/output.sette |
---|
[3520] | 108 | echo "" >> ${SETTE_DIR}/output.sette |
---|
| 109 | echo "compiling with: ${CMP_NAM}" >> ${SETTE_DIR}/output.sette |
---|
| 110 | echo "" >> ${SETTE_DIR}/output.sette |
---|
| 111 | echo "executing script : \"fcm_job $@\" " >> ${SETTE_DIR}/output.sette |
---|
| 112 | echo " " >> ${SETTE_DIR}/output.sette |
---|
| 113 | |
---|
| 114 | ################################################################ |
---|
[4147] | 115 | # SET INPUT |
---|
[3520] | 116 | # get the input tarfile if needed |
---|
[4147] | 117 | if [ "$(cat ${SETTE_DIR}/$INPUTARFILE | wc -w)" -ne 0 ] ; then |
---|
| 118 | echo "looking for input files in ${SETTE_DIR}/$INPUTARFILE " >> ${SETTE_DIR}/output.sette |
---|
| 119 | # number of tarfiles: NBTAR |
---|
| 120 | NBTAR=`cat ${SETTE_DIR}/$INPUTARFILE |wc -l` |
---|
| 121 | echo "NB of tarfiles ${NBTAR} " >> ${SETTE_DIR}/output.sette |
---|
| 122 | # loop on tarfiles |
---|
| 123 | # read file name and directory |
---|
[13292] | 124 | while read tar_file dir_conf_forc |
---|
| 125 | do |
---|
| 126 | echo looking for tarfile ${tar_file} and directory ${FORCING_DIR}/${dir_conf_forc} |
---|
| 127 | echo looking for tarfile ${tar_file} and directory ${FORCING_DIR}/${dir_conf_forc} >> ${SETTE_DIR}/output.sette |
---|
| 128 | if [ -d ${FORCING_DIR}/${dir_conf_forc} ] ; then |
---|
| 129 | # input dir ar there, only check the links |
---|
| 130 | echo "input dir ar there, only check the links" >> ${SETTE_DIR}/output.sette |
---|
| 131 | # extract tarfile |
---|
| 132 | else |
---|
| 133 | if [ ! -f ${FORCING_DIR}/${tar_file} ] ; then |
---|
| 134 | echo "tarfile ${FORCING_DIR}/${tar_file} cannot be found we stop " ; exit 2 ; fi |
---|
[3520] | 135 | |
---|
[13292] | 136 | echo " extract from tarfile ${FORCING_DIR}/${tar_file} in ${FORCING_DIR}/${dir_conf_forc}" >> ${SETTE_DIR}/output.sette |
---|
[3520] | 137 | |
---|
[13292] | 138 | istgz=$( echo ${FORCING_DIR}/${tar_file} | grep -c "gz$" ) |
---|
| 139 | if [ $istgz -eq 1 ] |
---|
| 140 | then |
---|
| 141 | withdir=$( tar tfz ${FORCING_DIR}/${tar_file} | head -n 1 | grep -c "${dir_conf_forc}/$" ) |
---|
| 142 | else |
---|
| 143 | withdir=$( tar tf ${FORCING_DIR}/${tar_file} | head -n 1 | grep -c "${dir_conf_forc}/$" ) |
---|
| 144 | fi |
---|
| 145 | if [ $withdir -eq 0 ] |
---|
| 146 | then |
---|
| 147 | mkdir ${FORCING_DIR}/${dir_conf_forc} |
---|
| 148 | cd ${FORCING_DIR}/${dir_conf_forc} |
---|
| 149 | else |
---|
| 150 | cd ${FORCING_DIR} |
---|
| 151 | fi |
---|
| 152 | if [ $istgz -eq 1 ] |
---|
| 153 | then |
---|
| 154 | tar xvfz ${FORCING_DIR}/${tar_file} |
---|
| 155 | else |
---|
| 156 | tar xvf ${FORCING_DIR}/${tar_file} |
---|
| 157 | [ $( ls -1 *gz 2>/dev/null | wc -l ) -gt 0 ] && gunzip -f *gz |
---|
| 158 | fi |
---|
| 159 | fi |
---|
| 160 | # Tarfile and input dir ar there, only check the links |
---|
| 161 | cd ${FORCING_DIR}/${dir_conf_forc} |
---|
| 162 | for fida in * |
---|
| 163 | do |
---|
[4147] | 164 | [ -f ${EXE_DIR}/${fida} ] || ln -s ${FORCING_DIR}/${dir_conf_forc}/${fida} ${EXE_DIR}/${fida} |
---|
[13292] | 165 | done |
---|
| 166 | done < ${SETTE_DIR}/$INPUTARFILE |
---|
| 167 | |
---|
[4147] | 168 | else |
---|
[13292] | 169 | echo "no input file to be searched " |
---|
[3520] | 170 | fi |
---|
| 171 | ################################################################ |
---|
| 172 | |
---|
| 173 | ################################################################ |
---|
| 174 | # RUN OPA |
---|
| 175 | cd ${EXE_DIR} |
---|
[9576] | 176 | if [ ! -r ${EXE_DIR}/nemo ] |
---|
[3520] | 177 | then |
---|
[9576] | 178 | echo "executable nemo does not exist" |
---|
| 179 | echo "executable nemo does not exist, exit" >> ${SETTE_DIR}/output.sette |
---|
[3520] | 180 | exit 1 |
---|
| 181 | fi |
---|
| 182 | |
---|
[4687] | 183 | # example for NOCS ClusterVision system using SLURM batch submission (requires ${SETTE_DIR}/sette_batch_template file) |
---|
[3520] | 184 | # |
---|
[3661] | 185 | # if [ ${MPI_FLAG} == "no" ] ; then |
---|
[3520] | 186 | case ${COMPILER} in |
---|
[4687] | 187 | X64_MOBILIS) |
---|
| 188 | NB_REM=$( echo $NB_PROC $NXIO_PROC | awk '{print ( $1 + $2 ) % 16}') |
---|
[3520] | 189 | if [ ${NB_REM} == 0 ] ; then |
---|
[4687] | 190 | # number of processes required is an integer multiple of 16 |
---|
[3520] | 191 | # |
---|
[4687] | 192 | NB_NODES=$( echo $NB_PROC $NXIO_PROC | awk '{print ($1 + $2 ) / 16}') |
---|
[3520] | 193 | else |
---|
| 194 | # |
---|
[4687] | 195 | # number of processes required is not an integer multiple of 16 |
---|
[3520] | 196 | # round up the number of nodes required. |
---|
| 197 | # |
---|
[4687] | 198 | NB_NODES=$( echo $NB_PROC $NXIO_PROC | awk '{printf("%d",($1 + $2 ) / 16 + 1 )}') |
---|
[3520] | 199 | fi |
---|
| 200 | ;; |
---|
[4814] | 201 | XC_ARCHER_INTEL) |
---|
| 202 | # ocean cores are packed 24 to a node |
---|
| 203 | NB_REM=$( echo $NB_PROC | awk '{print ( $1 % 24 ) }') |
---|
| 204 | if [ ${NB_REM} == 0 ] ; then |
---|
| 205 | # number of processes required is an integer multiple of 24 |
---|
| 206 | # |
---|
| 207 | NB_NODES=$( echo $NB_PROC $NXIO_PROC | awk '{print ($1) / 24}') |
---|
| 208 | else |
---|
| 209 | # |
---|
| 210 | # number of processes required is not an integer multiple of 24 |
---|
| 211 | # round up the number of nodes required. |
---|
| 212 | # |
---|
| 213 | NB_NODES=$( echo $NB_PROC | awk '{printf("%d",($1) / 24 + 1 )}') |
---|
| 214 | fi |
---|
| 215 | # xios cores are sparsely packed at 4 to a node |
---|
| 216 | # but can not share nodes with the ocean cores |
---|
| 217 | NB_REM=$( echo $NXIO_PROC | awk '{print ( $2 % 4 ) }') |
---|
| 218 | if [ ${NB_REM} == 0 ] ; then |
---|
| 219 | # number of processes required is an integer multiple of 4 |
---|
| 220 | # |
---|
| 221 | NB_NODES=$( echo $NB_NODES $NXIO_PROC | awk '{print ($1 + ( $2 / 4 ))}') |
---|
| 222 | else |
---|
| 223 | # |
---|
| 224 | # number of processes required is not an integer multiple of 4 |
---|
| 225 | # round up the number of nodes required. |
---|
| 226 | # |
---|
[5480] | 227 | NB_NODES=$( echo $NB_NODES $NXIO_PROC | awk '{print ($1 + ( $2 / 4 ) + 1)}') |
---|
[4814] | 228 | fi |
---|
| 229 | ;; |
---|
[5480] | 230 | XC40_METO*) #Setup for Met Office XC40 with any compiler |
---|
| 231 | # ocean cores are packed 32 to a node |
---|
| 232 | # If we need more than one node then have to use parallel queue and XIOS must have a node to itself |
---|
| 233 | NB_REM=$( echo $NB_PROC | awk '{print ( $1 % 32 ) }') |
---|
| 234 | if [ ${NB_REM} == 0 ] ; then |
---|
| 235 | # number of processes required is an integer multiple of 32 |
---|
| 236 | # |
---|
| 237 | NB_NODES=$( echo $NB_PROC $NXIO_PROC | awk '{print ($1) / 32}') |
---|
| 238 | else |
---|
| 239 | # |
---|
| 240 | # number of processes required is not an integer multiple of 32 |
---|
| 241 | # round up the number of nodes required. |
---|
| 242 | # |
---|
| 243 | NB_NODES=$( echo $NB_PROC $NXIO_PROC | awk '{printf("%d",($1) / 32 + 1 )}') |
---|
| 244 | fi |
---|
| 245 | # xios cores are sparsely packed at 4 to a node |
---|
| 246 | if [ $NXIO_PROC == 0 ] ; then |
---|
| 247 | NB_XNODES=0 |
---|
| 248 | else |
---|
| 249 | NB_REM=$( echo $NXIO_PROC | awk '{print ( $1 % 4 ) }') |
---|
| 250 | if [ ${NB_REM} == 0 ] ; then |
---|
| 251 | # number of processes required is an integer multiple of 4 |
---|
| 252 | # |
---|
| 253 | NB_XNODES=$( echo $NXIO_PROC | awk '{print (( $1 / 4 ) + 1)}') |
---|
| 254 | else |
---|
| 255 | # |
---|
| 256 | # number of processes required is not an integer multiple of 4 |
---|
| 257 | # round up the number of nodes required. |
---|
| 258 | # |
---|
| 259 | NB_XNODES=$( echo $NXIO_PROC | awk '{printf("%d",($1) / 4 + 1) }') |
---|
| 260 | fi |
---|
| 261 | fi |
---|
| 262 | if [ ${NB_XNODES} -ge 1 ] ; then |
---|
| 263 | NB_NODES=$((NB_NODES+NB_XNODES)) |
---|
| 264 | fi |
---|
| 265 | echo NB_XNODES=${NB_XNODES} |
---|
| 266 | echo Total NB_NODES=${NB_NODES} |
---|
[6140] | 267 | QUEUE=normal |
---|
| 268 | SELECT="select=$NB_NODES" |
---|
| 269 | module unload cray-snplauncher #Make sure snplauncher module is not loaded |
---|
[5480] | 270 | ;; |
---|
[11645] | 271 | X64_JEANZAY*) #Setup for Jean-Zay |
---|
| 272 | export GROUP_IDRIS=`echo ${USER} |cut -c 2-4` |
---|
| 273 | ;; |
---|
[12798] | 274 | openmpi_KARA_MERCATOR*) |
---|
[11805] | 275 | NB_PROC_NODE=32 |
---|
[12798] | 276 | NB_NODES=$( echo $NB_PROC | awk '{print $1 - $1 % $NB_PROC_NODE }' | awk '{print $1 / $NB_PROC_NODE }' ) |
---|
[9480] | 277 | if [ ${NB_PROC} -le 128 ] ; then |
---|
| 278 | QUEUE=multi |
---|
| 279 | fi |
---|
| 280 | ;; |
---|
[12798] | 281 | ifort_beaufix_sette*) |
---|
| 282 | NB_PROC_NODE=40 |
---|
| 283 | NB_NODES=$( echo $NB_PROC | awk '{print $1 - $1 % $NB_PROC_NODE }' | awk '{print $1 / $NB_PROC_NODE }' ) |
---|
| 284 | ;; |
---|
[3520] | 285 | *) |
---|
| 286 | NB_NODES=${NB_PROC} |
---|
| 287 | ;; |
---|
| 288 | esac |
---|
| 289 | # |
---|
| 290 | # Pass settings into job file by using sed to edit predefined strings |
---|
| 291 | # |
---|
[4373] | 292 | TOTAL_NPROCS=$(( $NB_PROC + $NXIO_PROC )) |
---|
[12798] | 293 | cat ${SETTE_DIR}/job_batch_template | sed -e"s/\(=\| \)NODES/\1${NB_NODES}/" \ |
---|
[4373] | 294 | -e"s/TOTAL_NPROCS/${TOTAL_NPROCS}/" \ |
---|
[4245] | 295 | -e"s/NPROCS/${NB_PROC}/" \ |
---|
| 296 | -e"s/NXIOPROCS/${NXIO_PROC}/" \ |
---|
[3520] | 297 | -e"s:DEF_SETTE_DIR:${SETTE_DIR}:" -e"s:DEF_INPUT_DIR:${INPUT_DIR}:" \ |
---|
| 298 | -e"s:DEF_EXE_DIR:${EXE_DIR}:" \ |
---|
| 299 | -e"s:DEF_CONFIG_DIR:${CONFIG_DIR}:" \ |
---|
[8468] | 300 | -e"s:DEF_TOOLS_DIR:${TOOLS_DIR}:" \ |
---|
[3520] | 301 | -e"s:MPI_FLAG:${MPI_FLAG}:" \ |
---|
[11161] | 302 | -e"s:DEF_NEMO_VALIDATION:${NEMO_VALID}:" -e"s:DEF_NEW_CONF:${NEW_CONF}:" \ |
---|
[3520] | 303 | -e"s:DEF_CMP_NAM:${CMP_NAM}:" -e"s:DEF_TEST_NAME:${TEST_NAME}:" > run_sette_test.job |
---|
[3661] | 304 | |
---|
[3665] | 305 | case ${COMPILER} in |
---|
[9480] | 306 | openmpi_KARA_MERCATOR_XIOS ) |
---|
| 307 | cat run_sette_test.job | sed -e"s/NPROC_NODE/${NB_PROC_NODE}/" \ |
---|
| 308 | -e"s:QUEUE:${QUEUE}:" > run_sette_test1.job |
---|
| 309 | mv run_sette_test1.job run_sette_test.job |
---|
| 310 | ;; |
---|
[5480] | 311 | XC40_METO*) |
---|
[6140] | 312 | cat run_sette_test.job | sed -e"s/SELECT/${SELECT}/" > run_sette_test1.job |
---|
[5480] | 313 | mv run_sette_test1.job run_sette_test.job |
---|
| 314 | ;; |
---|
[11645] | 315 | X64_JEANZAY*) |
---|
| 316 | cat run_sette_test.job | sed -e"s/GROUP_IDRIS/${GROUP_IDRIS}/" > run_sette_test1.job |
---|
| 317 | mv run_sette_test1.job run_sette_test.job |
---|
| 318 | ;; |
---|
[3665] | 319 | esac |
---|
[3520] | 320 | # |
---|
| 321 | # create the unique submission job script |
---|
| 322 | # |
---|
| 323 | if [ ! -f $JOB_FILE ] ; then |
---|
| 324 | mv run_sette_test.job $JOB_FILE |
---|
| 325 | else |
---|
| 326 | e=`grep -n "# END_BODY" ${JOB_FILE} | cut -d : -f 1` |
---|
| 327 | e=$(($e - 1)) |
---|
| 328 | head -$e $JOB_FILE > ${JOB_FILE}_new |
---|
| 329 | mv ${JOB_FILE}_new ${JOB_FILE} |
---|
| 330 | l=`wc -l run_sette_test.job | sed -e "s:run_sette_test.job::"` |
---|
| 331 | b=`grep -n "# BODY" run_sette_test.job | cut -d : -f 1` |
---|
| 332 | t=$(($l - $b)) |
---|
| 333 | tail -$t run_sette_test.job >> $JOB_FILE |
---|
| 334 | fi |
---|
| 335 | |
---|
[3661] | 336 | chmod a+x $JOB_FILE ; echo "$JOB_FILE is ready" |
---|
[3520] | 337 | |
---|
| 338 | #fi |
---|