-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy pathww3_systrackexe.sh
executable file
·132 lines (120 loc) · 5.32 KB
/
ww3_systrackexe.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
#!/bin/bash
set -xa
# -----------------------------------------------------------
# UNIX Shell Script File
# Tested Operating System(s): RHEL 5,6
# Tested Run Level(s): 3, 5
# Shell Used: BASH shell
# Original Author(s): [email protected]
# File Creation Date: 01/25/2011
# Date Last Modified: 07/09/2013
#
# Version control: 1.13
#
# Support Team:
#
# Contributors: Roberto Padilla-Hernandez
# -----------------------------------------------------------
# ------------- Program Description and Details -------------
# -----------------------------------------------------------
#
# Script used to auto detect the hardware platform and select
# the correct SWAN binary.
#
# -----------------------------------------------------------
# Setup our NWPS environment
if [ "${USHnwps}" == "" ]
then
echo "ERROR - Your USHnwps variable is not set"
export err=1; err_chk
fi
# Check to see if our NWPS env is set
if [ "${NWPSenvset}" == "" ]
then
if [ -e ${USHnwps}/nwps_config.sh ];then
source ${USHnwps}/nwps_config.sh
else
"ERROR - Cannot find ${USHnwps}/nwps_config.sh"
export err=1; err_chk
fi
fi
cd ${RUNdir}
if [ "${ARCH}" == "" ] || [ "${ARCHBITS}" == "" ] || [ "${NUMCPUS}" == "" ] || [ "${MPIEXEC}" == "" ]
then
source ${USHnwps}/set_os_env.sh
fi
cat /dev/null > ${LOGdir}/systrk_info.log
echo "Starting clustering-based Python script ww3_systrk_cluster.py"
echo " In ww3_systrackexe.sh, calling mpiexec"
# Step 1: Search for optimum number of clusters in parallel using silhouette coefficient
cat /dev/null > ${RUNdir}/ww3_systrk_elements.sh
for i in {2..5}; do
echo "${PYTHON} ${NWPSdir}/ush/python/ww3_systrk_cluster_silhouette.py ${SITEID,,} ${i}" >> ${RUNdir}/ww3_systrk_elements.sh
done
mpiexec -np 4 --cpu-bind verbose,core cfp ${RUNdir}/ww3_systrk_elements.sh
export err=$?
if [ "${err}" != "0" ];then
echo " ============ E R R O R ===============" | tee -a ${LOGdir}/systrk_info.log
echo "Exit Code: ${err}" | tee -a ${LOGdir}/systrk_info.log
echo " Something went wrong running ww3_systrk_cluster_silhouette.py" | tee -a ${LOGdir}/systrk_info.log
echo " HERE IS WHAT WE HAVE IN THE FILE " | tee -a ${LOGdir}/systrk_info.log
echo " " | tee -a ${LOGdir}/systrk_info.log
echo " ${DATAdir}/logs/run_wavetrack_exe_error.log" | tee -a ${LOGdir}/systrk_info.log
cat ${DATAdir}/logs/run_wavetrack_exe_error.log >> ${LOGdir}/systrk_info.log
msg="FATAL ERROR: Wave system tracking script ww3_systrk_cluster_silhouette.py failed."
postmsg "$jlogfile" "$msg"
err_chk
fi
# Step 2: Calculate wave systems using optimum number of clusters (in parallel)
cat /dev/null > ${RUNdir}/ww3_systrk_jobs.sh
for i in {0..5}; do
echo "${PYTHON} ${NWPSdir}/ush/python/ww3_systrk_cluster_parallel.py ${SITEID,,} ${i}" >> ${RUNdir}/ww3_systrk_jobs.sh
done
mpiexec -np 6 --cpu-bind verbose,core cfp ${RUNdir}/ww3_systrk_jobs.sh
export err=$?
if [ "${err}" != "0" ];then
echo " ============ E R R O R ===============" | tee -a ${LOGdir}/systrk_info.log
echo "Exit Code: ${err}" | tee -a ${LOGdir}/systrk_info.log
echo " Something went wrong running ww3_systrk_cluster.py" | tee -a ${LOGdir}/systrk_info.log
echo " HERE IS WHAT WE HAVE IN THE FILE " | tee -a ${LOGdir}/systrk_info.log
echo " " | tee -a ${LOGdir}/systrk_info.log
echo " ${DATAdir}/logs/run_wavetrack_exe_error.log" | tee -a ${LOGdir}/systrk_info.log
cat ${DATAdir}/logs/run_wavetrack_exe_error.log >> ${LOGdir}/systrk_info.log
msg="FATAL ERROR: Wave system tracking script ww3_systrk_cluster.py failed."
postmsg "$jlogfile" "$msg"
err_chk
else
echo " ww3_systrk_cluster.py run was successful " | tee -a ${LOGdir}/systrk_info.log
echo " Exit Code: ${err}" | tee -a ${LOGdir}/systrk_info.log
fi
# Step 3: Combine component output files
cat /dev/null > ${RUNdir}/SYS_HSIGN.OUT
cat /dev/null > ${RUNdir}/SYS_DIR.OUT
cat /dev/null > ${RUNdir}/SYS_TP.OUT
cat /dev/null > ${RUNdir}/SYS_PNT.OUT
cat ${RUNdir}/SYS_HSIGN.OUT-00? > ${RUNdir}/SYS_HSIGN.OUT
cat ${RUNdir}/SYS_DIR.OUT-00? > ${RUNdir}/SYS_DIR.OUT
cat ${RUNdir}/SYS_TP.OUT-00? > ${RUNdir}/SYS_TP.OUT
cat ${RUNdir}/SYS_PNT.OUT-00? > ${RUNdir}/SYS_PNT.OUT
yyyymmdd=`ls *.wnd | cut -c1-8`
hh=`ls *.wnd | cut -c9-10`
cat /dev/null > ${SITEID,,}_nwps_CG0_Trkng_${yyyymmdd}_${hh}00.bull
cat ${SITEID,,}_nwps_CG0_Trkng_*.bull-00? > ${SITEID,,}_nwps_CG0_Trkng_${yyyymmdd}_${hh}00.bull
rm ${RUNdir}/SYS_HSIGN.OUT-00?
rm ${RUNdir}/SYS_DIR.OUT-00?
rm ${RUNdir}/SYS_TP.OUT-00?
rm ${RUNdir}/SYS_PNT.OUT-00?
rm ${SITEID,,}_nwps_CG0_Trkng_*.bull-00?
#if [ "${err}" == "0" ];then
# mv -fv sys_pnt.ww3 SYS_PNT.OUT
# mv -fv sys_coord.ww3 SYS_COORD.OUT
# mv -fv sys_hs.ww3 SYS_HSIGN.OUT
# mv -fv sys_tp.ww3 SYS_TP.OUT
# mv -fv sys_dir.ww3 SYS_DIR.OUT
# mv -fv sys_dspr.ww3 SYS_DSPR.OUT
#fi
exit 0
# -----------------------------------------------------------
# *******************************
# ********* End of File *********
# *******************************