#!/bin/bash ###################### ## CURIE TGCC/CEA ## ###################### #MSUB -r XIOS #MSUB -o client_output.out # standard output #MSUB -e client_error.err #error output #MSUB -eo #MSUB -c 1 #MSUB -n 16 # Number of MPI tasks (SPMD case) or cores (MPMD case) #MSUB -X #MSUB -x #MSUB -T 1800 # Wall clock limit (seconds) #MSUB -q skylake # thin nodes #MSUB -A devcmip6 #MSUB -Q test #MSUB -m work cd $BRIDGE_MSUB_PWD module unload netcdf-c netcdf-fortran hdf5 flavor perl hdf5 boost blitz mpi gnu module load gnu module load mpi/intelmpi/2017.0.6.256 module load flavor/buildcompiler/intel/17 module load flavor/hdf5/parallel module load netcdf-fortran/4.4.4 module load hdf5/1.8.20 module load boost module load blitz module load feature/bridge/heterogenous_mpmd module load arm-forge module load nco module load cdo export KMP_STACKSIZE=3g export KMP_LIBRARY=turnaround export MKL_SERIAL=YES export OMP_NUM_THREADS=${BRIDGE_MSUB_NCORE} set -x #export GMON_OUT_PREFIX='gmon.out' #export TAU_MAKEFILE=$TAU_MAKEFILEDIR/Makefile.tau-icpc-papi-mpi-pdt-openmp-opari-scorep #ccc_mprun -E '--enable_perf' amplxe-cl -collect hotspots -r ${PWD}/1omp_vtune/vtune_results ../../bin/test_send.exe #ccc_mprun ../build_prod/bin/test_omp.exe 4 #ccc_mprun tau_exec -io ../../bin/test_send.exe #ddt -start -n 8 ../buile_intelmpi/bin/test_omp.exe #============================= Run EP with IntelMPI ============================= export machine_name=irene export xios_dir=/ccc/cont003/home/gencmip6/wangyush/XIOS/dev_trunk_omp export build_dir=build_ep_intelmpi_prod rm -f setup.sh touch setup.sh >setup.sh for i in $(ls -d test_*/) do cp setup.py ${i%%/} cp run_sub_test.sh ${i%%/} echo "bash -c \"cd " ${i%%/} " && python setup.py\" ">> setup.sh echo "echo \"setup.py called from " ${i%%/} "\"">> setup.sh done export output=$(python user_config.py 2>&1 >/dev/null) if [ "$output" -ne 0 ] then echo "user_config.py failed" exit else echo "user_config.py OK" fi cmake . ctest -V #ctest --output-on-failure make report #echo "Generic testcase report" | mailx -s "report" -a report.html yushan.wang@lsce.ipsl.fr rm -f test_*/setup.py rm -f test_*/run_sub_test.sh rm -f test_*/run_test_*.py rm -f test_*/CMakeLists.txt rm -f test_*/context_grid_dynamico.xml rm -f test_*/dynamico_grid.nc rm -f test_*/default_param.pyc rm -f test_*/user_param.pyc rm -f test_*/user_param.py.* rm -f report_ep_intelmpi.* cp report.txt report_ep_intelmpi.txt cp report.html report_ep_intelmpi.html #============================= Run MPI with IntelMPI ============================= export machine_name=irene export xios_dir=/ccc/cont003/home/gencmip6/wangyush/XIOS/dev_trunk_omp export build_dir=build_mpi_intelmpi_prod rm -f setup.sh touch setup.sh >setup.sh for i in $(ls -d test_*/) do cp setup.py ${i%%/} cp run_sub_test.sh ${i%%/} echo "bash -c \"cd " ${i%%/} " && python setup.py\" ">> setup.sh echo "echo \"setup.py called from " ${i%%/} "\"">> setup.sh done export output=$(python user_config.py 2>&1 >/dev/null) if [ "$output" -ne 0 ] then echo "user_config.py failed" exit else echo "user_config.py OK" fi cmake . ctest -V #ctest --output-on-failure make report #echo "Generic testcase report" | mailx -s "report" -a report.html yushan.wang@lsce.ipsl.fr rm -f test_*/setup.py rm -f test_*/run_sub_test.sh rm -f test_*/run_test_*.py rm -f test_*/CMakeLists.txt rm -f test_*/context_grid_dynamico.xml rm -f test_*/dynamico_grid.nc rm -f test_*/default_param.pyc rm -f test_*/user_param.pyc rm -f test_*/user_param.py.* rm -f report_mpi_intelmpi.* cp report.txt report_mpi_intelmpi.txt cp report.html report_mpi_intelmpi.html