diff --git a/ush/getges.sh b/ush/getges.sh index b50655d51fb..2d6d9dc622c 100755 --- a/ush/getges.sh +++ b/ush/getges.sh @@ -20,7 +20,7 @@ # sfcges, sfcgm3, sfcgm2, sfcgm1, sfcgp1, sfcgp2, sfcgp3, # biascr, satang, satcnt, gesfil # pgbges, pgiges, pgbgm6, pgigm6, pgbgm3, pgigm3, pgbgp3, pgigp3, -# sigcur, sfccur, pgbcur, pgicur, prepqc, tcvg12, tcvges, tcvitl, +# sigcur, sfccur, pgbcur, pgicur, prepqc, tcvg12, tcvges, tcvitl, # enggrb, enggri, icegrb, icegri, snogrb, snogrb_high, snogri, sstgrb, sstgri. # natges, natgm3, natgm2, natgm1, natgp1, natgp2, natgp3, natcur, # nsfges, nsfgm3, nsfgm2, nsfgm1, nsfgp1, nsfgp2, nsfgp3, nsfcur, @@ -37,7 +37,7 @@ # The script uses the utility command NHOUR. # # Example 1. Copy the production sigma guess for 1998100100 to the file sges. -# getges.sh -e prod -t sigges -v 1998100100 sges +# getges.sh -e prod -t sigges -v 1998100100 sges # # Example 2. Assign the pressure grib guess for the date 1998100121. # export XLFUNIT_12="$(getges.sh -qt pgbges||echo /dev/null)" @@ -148,7 +148,7 @@ if [[ $gfile = '?' || $# -gt 1 || $err -ne 0 || -z $valid ||\ echo " gfile is the guess file to write" >&2 echo " (default is to write the guess file name to stdout)" >&2 else - echo " (Note: set a given option to '?' for more details)" >&2 + echo " (Note: set a given option to '?' for more details)" >&2 fi exit 1 fi @@ -158,7 +158,7 @@ if [[ $envir != prod && $envir != test && $envir != para && $envir != dump && $e envir=prod echo '************************************************************' >&2 echo '* WARNING: Using "-e" is deprecated in this case. *' >&2 - echo '* Please use "-n" instead. *' >&2 + echo '* Please use "-n" instead. *' >&2 echo '************************************************************' >&2 fi if [[ "$netwk" = "namopl" || "$resol" = "namopl" ]];then @@ -188,13 +188,13 @@ if [[ $typef = enggrb ]];then typef=icegrb echo '************************************************************' >&2 echo '* WARNING: Using "-t enggrb" is now deprecated. *' >&2 - echo '* Please use "-t icegrb". *' >&2 + echo '* Please use "-t icegrb". *' >&2 echo '************************************************************' >&2 elif [[ $typef = enggri ]];then typef=icegri echo '************************************************************' >&2 echo '* WARNING: Using "-t enggri" is now deprecated. *' >&2 - echo '* Please use "-t icegri". *' >&2 + echo '* Please use "-t icegri". *' >&2 echo '************************************************************' >&2 fi @@ -224,26 +224,26 @@ if [[ "$netwk" = "gdas" ]];then $COMINgdas/gdas.t${cyc}z.radstat' ;; pgbges) geslist=' - $COMINgdas/gdas.t${cyc}z.pgrbh$fh + $COMINgdas/gdas.t${cyc}z.pgrbh$fh $COMINgdas/gdas.t${cyc}z.pgrbf$fh' ;; pg2ges) geslist=' $COMINgdas/gdas.t${cyc}z.pgrb2.0p25.f$gh' ;; pgbgm6) geslist=' - $COMINgdas/gdas.t${cyc}z.pgrbh$fhm6 + $COMINgdas/gdas.t${cyc}z.pgrbh$fhm6 $COMINgdas/gdas.t${cyc}z.pgrbf$fhm6' ;; pgbgm3) geslist=' - $COMINgdas/gdas.t${cyc}z.pgrbh$fhm3 + $COMINgdas/gdas.t${cyc}z.pgrbh$fhm3 $COMINgdas/gdas.t${cyc}z.pgrbf$fhm3' ;; pgbgp3) geslist=' - $COMINgdas/gdas.t${cyc}z.pgrbh$fhp3 + $COMINgdas/gdas.t${cyc}z.pgrbh$fhp3 $COMINgdas/gdas.t${cyc}z.pgrbf$fhp3' ;; pgbcur) geslist=' - $COMINgdas/gdas.t${cyc}z.pgrbh$fh + $COMINgdas/gdas.t${cyc}z.pgrbh$fh $COMINgdas/gdas.t${cyc}z.pgrbf$fh' fhbeg=00 ;; @@ -446,35 +446,35 @@ elif [[ "$netwk" = "cfs-cdas" ]];then $COMINcfs_cdas/cdas1.t${cyc}z.sfluxgrbf$fhp3' ;; pgbges) geslist=' - $COMINcfs_cdas/cdas1.t${cyc}z.pgrbh$fh + $COMINcfs_cdas/cdas1.t${cyc}z.pgrbh$fh $COMINcfs_cdas/cdas1.t${cyc}z.pgrbf$fh' ;; pgiges) geslist=' - $COMINcfs_cdas/cdas1.t${cyc}z.pgrbih$fh + $COMINcfs_cdas/cdas1.t${cyc}z.pgrbih$fh $COMINcfs_cdas/cdas1.t${cyc}z.pgrbif$fh' ;; pgbgm6) geslist=' - $COMINcfs_cdas/cdas1.t${cyc}z.pgrbh$fhm6 + $COMINcfs_cdas/cdas1.t${cyc}z.pgrbh$fhm6 $COMINcfs_cdas/cdas1.t${cyc}z.pgrbf$fhm6' ;; pgigm6) geslist=' - $COMINcfs_cdas/cdas1.t${cyc}z.pgrbih$fhm6 + $COMINcfs_cdas/cdas1.t${cyc}z.pgrbih$fhm6 $COMINcfs_cdas/cdas1.t${cyc}z.pgrbif$fhm6' ;; pgbgm3) geslist=' - $COMINcfs_cdas/cdas1.t${cyc}z.pgrbh$fhm3 + $COMINcfs_cdas/cdas1.t${cyc}z.pgrbh$fhm3 $COMINcfs_cdas/cdas1.t${cyc}z.pgrbf$fhm3' ;; pgigm3) geslist=' - $COMINcfs_cdas/cdas1.t${cyc}z.pgrbih$fhm3 + $COMINcfs_cdas/cdas1.t${cyc}z.pgrbih$fhm3 $COMINcfs_cdas/cdas1.t${cyc}z.pgrbif$fhm3' ;; pgbgp3) geslist=' - $COMINcfs_cdas/cdas1.t${cyc}z.pgrbh$fhp3 + $COMINcfs_cdas/cdas1.t${cyc}z.pgrbh$fhp3 $COMINcfs_cdas/cdas1.t${cyc}z.pgrbf$fhp3' ;; pgigp3) geslist=' - $COMINcfs_cdas/cdas1.t${cyc}z.pgrbih$fhp3 + $COMINcfs_cdas/cdas1.t${cyc}z.pgrbih$fhp3 $COMINcfs_cdas/cdas1.t${cyc}z.pgrbif$fhp3' ;; sigcur) geslist=' @@ -490,12 +490,12 @@ elif [[ "$netwk" = "cfs-cdas" ]];then fhbeg=00 ;; pgbcur) geslist=' - $COMINcfs_cdas/cdas1.t${cyc}z.pgrbh$fh + $COMINcfs_cdas/cdas1.t${cyc}z.pgrbh$fh $COMINcfs_cdas/cdas1.t${cyc}z.pgrbf$fh' fhbeg=00 ;; pgicur) geslist=' - $COMINcfs_cdas/cdas1.t${cyc}z.pgrbih$fh + $COMINcfs_cdas/cdas1.t${cyc}z.pgrbih$fh $COMINcfs_cdas/cdas1.t${cyc}z.pgrbif$fh' fhbeg=00 ;; @@ -1405,7 +1405,7 @@ if [[ -z "$gfile" ]];then echo ${ges} err=$? else - cp ${ges} ${gfile} + cpfs ${ges} ${gfile} err=$? fi diff --git a/ush/minmon_xtrct_costs.pl b/ush/minmon_xtrct_costs.pl index c56ac3bdad8..765526b40d1 100755 --- a/ush/minmon_xtrct_costs.pl +++ b/ush/minmon_xtrct_costs.pl @@ -3,7 +3,7 @@ #--------------------------------------------------------------------------- # minmon_xtrct_costs.pl # -# Extract cost data from gsistat file and load into cost +# Extract cost data from gsistat file and load into cost # and cost term files. #--------------------------------------------------------------------------- @@ -63,7 +63,7 @@ my $jl_number = 8; my $costfile = $ENV{"mm_costfile"}; - + if( (-e $costfile) ) { open( COSTFILE, "<${costfile}" ) or die "Can't open ${costfile}: $!\n"; my $line; @@ -87,7 +87,7 @@ #------------------------------------------------------------------------ # Open the infile and search for the $costterms_target and $cost_target - # strings. If found, parse out the cost information and push into + # strings. If found, parse out the cost information and push into # holding arrays. #------------------------------------------------------------------------ if( $rc == 0 ) { @@ -107,14 +107,14 @@ $use_costterms = 1; } - if( $line =~ /$cost_target/ ) { + if( $line =~ /$cost_target/ ) { my @costline = split( / +/, $line ); push( @cost_array, $costline[$cost_number] ); } if( $term_ctr > 0 ) { my @termline = split( / +/, $line ); - + if ( $term_ctr < 10 ) { push( @term_array, trim($termline[1]) ); push( @term_array, trim($termline[2]) ); @@ -132,7 +132,7 @@ push( @term_array, trim($termline[3]) ); push( @term_array, trim($termline[4]) ); $term_ctr = 1; - } + } } close( INFILE ); @@ -145,13 +145,13 @@ for my $i (0 .. $#cost_array) { my $iterline; if( $use_costterms == 1 ){ - $iterline = sprintf ' %d,%e,%e,%e,%e,%e%s', - $i, $cost_array[$i], $jb_array[$i], $jo_array[$i], + $iterline = sprintf ' %d,%e,%e,%e,%e,%e%s', + $i, $cost_array[$i], $jb_array[$i], $jo_array[$i], $jc_array[$i], $jl_array[$i], "\n"; } else { - $iterline = sprintf ' %d,%e,%e,%e,%e,%e%s', - $i, $cost_array[$i], $no_data, $no_data, + $iterline = sprintf ' %d,%e,%e,%e,%e,%e%s', + $i, $cost_array[$i], $no_data, $no_data, $no_data, $no_data, "\n"; } @@ -161,7 +161,7 @@ #--------------------------------------------------- # move term_array into all_cost_terms by iteration #--------------------------------------------------- - if( @term_array > 0 ) { + if( @term_array > 0 ) { my $nterms = 32; my $max_iter = ($#term_array+1)/$nterms; my $niter = $max_iter -1; @@ -170,18 +170,18 @@ my $step = $iter * $nterms; my $iterline = sprintf '%d, %e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e%s', $iter, $term_array[$step], $term_array[$step+1], $term_array[$step+2], - $term_array[$step+3], $term_array[$step+4], $term_array[$step+5], - $term_array[$step+6], $term_array[$step+7], $term_array[$step+8], - $term_array[$step+9], $term_array[$step+10], $term_array[$step+11], - $term_array[$step+12], $term_array[$step+13], $term_array[$step+14], - $term_array[$step+15], $term_array[$step+16], $term_array[$step+17], - $term_array[$step+18], $term_array[$step+19], $term_array[$step+20], - $term_array[$step+21], $term_array[$step+22], $term_array[$step+23], - $term_array[$step+24], $term_array[$step+25], $term_array[$step+26], - $term_array[$step+27], $term_array[$step+28], $term_array[$step+29], + $term_array[$step+3], $term_array[$step+4], $term_array[$step+5], + $term_array[$step+6], $term_array[$step+7], $term_array[$step+8], + $term_array[$step+9], $term_array[$step+10], $term_array[$step+11], + $term_array[$step+12], $term_array[$step+13], $term_array[$step+14], + $term_array[$step+15], $term_array[$step+16], $term_array[$step+17], + $term_array[$step+18], $term_array[$step+19], $term_array[$step+20], + $term_array[$step+21], $term_array[$step+22], $term_array[$step+23], + $term_array[$step+24], $term_array[$step+25], $term_array[$step+26], + $term_array[$step+27], $term_array[$step+28], $term_array[$step+29], $term_array[$step+30], $term_array[$step+31], "\n"; push( @all_cost_terms, $iterline ); - } + } } #------------------------------------------ @@ -210,15 +210,15 @@ my $tankdir = $ENV{"M_TANKverf"}; if(! -d $tankdir) { system( "mkdir -p $tankdir" ); - } + } if( -e $filename2 ) { my $newfile2 = "${tankdir}/${filename2}"; - system("cp -f $filename2 $newfile2"); + system("cpfs $filename2 $newfile2"); } if( -e $filename3 ) { my $newfile3 = "${tankdir}/${filename3}"; - system("cp -f $filename3 $newfile3"); + system("cpfs $filename3 $newfile3"); } } # $rc still == 0 after reading gmon_cost.txt diff --git a/ush/minmon_xtrct_gnorms.pl b/ush/minmon_xtrct_gnorms.pl index ac83c08cd3b..d4913b0619b 100755 --- a/ush/minmon_xtrct_gnorms.pl +++ b/ush/minmon_xtrct_gnorms.pl @@ -9,7 +9,7 @@ #--------------------------------------------------------------------------- # minmon_xtrct_gnorms.pl # -# Update the gnorm_data.txt file with data from a new cycle. Add +# Update the gnorm_data.txt file with data from a new cycle. Add # this new data to the last line of the gnorm_data.txt file. # # Note: If the gnorm_data.txt file does not exist, it will be created. @@ -28,17 +28,17 @@ sub updateGnormData { my $rc = 0; my @filearray; - - my $gdfile = "gnorm_data.txt"; + + my $gdfile = "gnorm_data.txt"; my $outfile = "new_gnorm_data.txt"; my $yr = substr( $cycle, 0, 4); my $mon = substr( $cycle, 4, 2); my $day = substr( $cycle, 6, 2); my $hr = substr( $cycle, 8, 2); - - my $newln = sprintf ' %04d,%02d,%02d,%02d,%e,%e,%e,%e,%e%s', - $yr, $mon, $day, $hr, $igrad, $fgnorm, + + my $newln = sprintf ' %04d,%02d,%02d,%02d,%e,%e,%e,%e,%e%s', + $yr, $mon, $day, $hr, $igrad, $fgnorm, $avg_gnorm, $min_gnorm, $max_gnorm, "\n"; #------------------------------------------------------------- @@ -49,12 +49,12 @@ sub updateGnormData { @filearray = ; -# This is the mechanism that limits the data to 30 days worth. Should I +# This is the mechanism that limits the data to 30 days worth. Should I # keep it or let the transfer script(s) truncate? 6/12/16 -- I'm going to keep -# it. I can add this as a later change once I add a user mechanism to vary the +# it. I can add this as a later change once I add a user mechanism to vary the # amount of data plotted (on the fly). - my $cyc_interval = $ENV{'CYCLE_INTERVAL'}; + my $cyc_interval = $ENV{'CYCLE_INTERVAL'}; if( $cyc_interval eq "" ) { $cyc_interval = 6; } @@ -65,35 +65,35 @@ sub updateGnormData { if( $cyc_interval != 6 && $cyc_interval != 0 ) { my $cyc_per_day = 24 / $cyc_interval; $max_cyc = (30 * $cyc_per_day) - 1; - } - - while( $#filearray > $max_cyc ) { + } + + while( $#filearray > $max_cyc ) { shift( @filearray ); } close( INFILE ); } - # Here is the problem Russ encountered after re-running the MinMon: - # If the cycle time in $newln is the same as an existing record in + # Here is the problem Russ encountered after re-running the MinMon: + # If the cycle time in $newln is the same as an existing record in # *.gnorm_data.txt then we end up with 2+ rows for the same cycle time. # In that case $newln should replace the first existing line # in @filearray and all other lines that might match should be deleted. - # Else when the cycle time doesn't already exist (the expected condition) + # Else when the cycle time doesn't already exist (the expected condition) # it should be pushed into @filearray. - - # algorithm: + + # algorithm: # ========= # Establish $count of matches on "$yr,$mon,$day,$hr" - # if $count > 0 + # if $count > 0 # while $count > 1 # get last_index and remove with splice # replace first_index with $newln # else # push $newln - # + # my $srch_strng = "$yr,$mon,$day,$hr"; my $count = true { /$srch_strng/ } @filearray; - + if( $count > 0 ) { while( $count > 1 ) { my $l_index = last_index { /$srch_strng/ } @filearray; @@ -106,19 +106,19 @@ sub updateGnormData { else { push( @filearray, $newln ); } - + open( OUTFILE, ">$outfile" ) or die "Can't open ${$outfile}: $!\n"; print OUTFILE @filearray; close( OUTFILE ); - system("cp -f $outfile $gdfile"); + system("cpfs $outfile $gdfile"); } #--------------------------------------------------------------------------- # makeErrMsg # -# Apply a gross check on the final value of the gnorm for a specific +# Apply a gross check on the final value of the gnorm for a specific # cycle. If the final_gnorm value is greater than the gross_check value # then put that in the error message file. Also check for resets or a # premature halt, and journal those events to the error message file too. @@ -134,7 +134,7 @@ sub updateGnormData { my $reset_flag = $_[5]; my $reset_iter = $_[6]; #reset iteration array my $infile = $_[7]; - my $gross_check = $_[8]; + my $gross_check = $_[8]; my $mail_msg =""; my $out_file = "${cycle}.errmsg.txt"; @@ -146,7 +146,7 @@ sub updateGnormData { } if( $reset_flag > 0 ) { - my $ctr=0; + my $ctr=0; my $reset_msg = "\n Gnorm check detected $reset_flag reset(s): suffix = $suffix, cycle = $cycle"; $mail_msg .= $reset_msg; $mail_msg .= "\n"; @@ -175,7 +175,7 @@ sub updateGnormData { print OUTFILE $mail_msg; print OUTFILE "\n\n $mail_link"; close( OUTFILE ); - } + } } @@ -202,7 +202,7 @@ sub updateGnormData { # # This needs to be redesigned to get the gnorm value from the gsistat file -# using the line that starts "cost,grad,step,b,step?:". The line formerly +# using the line that starts "cost,grad,step,b,step?:". The line formerly # used for the gnorm and reduction values may not be available if the the # verbose output flag is set to FALSE. # @@ -245,7 +245,7 @@ sub updateGnormData { $rc = 4; } -if( $rc == 0 ) { +if( $rc == 0 ) { if( (-e $infile) ) { open( INFILE, "<${infile}" ) or die "Can't open ${infile}: $!\n"; @@ -271,21 +271,21 @@ sub updateGnormData { while( $line = ) { ############################################## - # if the reset_iter_flag is 1 then record the + # if the reset_iter_flag is 1 then record the # current outer & inner iteration number ############################################## if( $reset_iter_flag == 1 ) { if( $line =~ /${igrad_target}/ ) { - my @iterline = split( / +/, $line ); + my @iterline = split( / +/, $line ); my $iter_str = $iterline[2] . "," . $iterline[3]; push( @reset_iter, $iter_str); - $reset_iter_flag = 0; + $reset_iter_flag = 0; } } if( $line =~ /${igrad_target}/ ) { - my @gradline = split( / +/, $line ); + my @gradline = split( / +/, $line ); my $grad = $gradline[$igrad_number]; @@ -297,7 +297,7 @@ sub updateGnormData { my $igrad_sqr = $igrad**2; my $grad_sqr = $grad**2; my $gnorm = $grad_sqr/$igrad_sqr; - + push( @gnorm_array, $gnorm ); } @@ -318,7 +318,7 @@ sub updateGnormData { ######################################################################## # If the stop_flag is >0 then record the last outer & inner - # iteration number. The trick is that it's the last iteration in the + # iteration number. The trick is that it's the last iteration in the # log file and we just passed it when we hit the stop warning message, # so we have to reopen the file and get the last iteration number. ######################################################################## @@ -328,7 +328,7 @@ sub updateGnormData { my @lines = reverse ; foreach $line (@lines) { if( $line =~ /${igrad_target}/ ){ - my @iterline = split( / +/, $line ); + my @iterline = split( / +/, $line ); $stop_iter = $iterline[2] . "," . $iterline[3]; last; } @@ -338,17 +338,17 @@ sub updateGnormData { my @all_gnorm = @gnorm_array; - - ############################################################################## + + ############################################################################## ## ## If the iterations were halted due to error then the @all_gnorm array won't - ## be the expected size. In that case we need to pad the array out with + ## be the expected size. In that case we need to pad the array out with ## RMISS values so GrADS won't choke when it tries to read the data file. ## ## Note that we're padding @all_gnorm. The @gnorm_array is examined below ## and we don't want to pad that and mess up the min/max calculation. - ## - ############################################################################### + ## + ############################################################################### my $arr_size = @all_gnorm; if( $arr_size < $expected_gnorms ) { @@ -378,19 +378,19 @@ sub updateGnormData { $avg_gnorm = $sum_10_gnorm / 10; - + ##################################################################### - # Update the gnorm_data.txt file with information on the - # initial gradient, final gnorm, and avg/min/max for the last 10 + # Update the gnorm_data.txt file with information on the + # initial gradient, final gnorm, and avg/min/max for the last 10 # iterations. ##################################################################### updateGnormData( $cdate,$igrad,$final_gnorm,$avg_gnorm,$min_gnorm,$max_gnorm,$suffix ); ##################################################################### - # Call makeErrMsg to build the error message file to record any + # Call makeErrMsg to build the error message file to record any # abnormalities in the minimization. This file can be mailed by - # a calling script. + # a calling script. ##################################################################### makeErrMsg( $suffix, $cdate, $final_gnorm, $stop_flag, $stop_iter, $reset_flag, \@reset_iter, $infile, $gross_check_val ); @@ -398,7 +398,7 @@ sub updateGnormData { ######################################################### # write to GrADS ready output data file # - # Note: this uses pack to achieve the same results as + # Note: this uses pack to achieve the same results as # an unformatted binary Fortran file. ######################################################### my $filename2 = "${cdate}.gnorms.ieee_d"; @@ -417,21 +417,21 @@ sub updateGnormData { if(! -d $tankdir) { system( "mkdir -p $tankdir" ); } - + if( -e $filename2 ) { - system("cp -f $filename2 ${tankdir}/."); + system("cpfs $filename2 ${tankdir}/."); } - my $gdfile = "gnorm_data.txt"; + my $gdfile = "gnorm_data.txt"; if( -e $gdfile ) { - system("cp -f $gdfile ${tankdir}/."); + system("cpfs $gdfile ${tankdir}/."); } my $errmsg = "${cdate}.errmsg.txt"; if( -e $errmsg ) { - system("cp -f $errmsg ${tankdir}/."); + system("cpfs $errmsg ${tankdir}/."); } - + } # $rc still == 0 after reading gmon_gnorm.txt }else { # $infile does not exist diff --git a/ush/minmon_xtrct_reduct.pl b/ush/minmon_xtrct_reduct.pl index cc5da86af8c..7290a27ba5c 100755 --- a/ush/minmon_xtrct_reduct.pl +++ b/ush/minmon_xtrct_reduct.pl @@ -42,13 +42,13 @@ while( my $line = ) { if( $line =~ /$reduct_target/ ) { - my @reduct_ln = split( / +/, $line ); + my @reduct_ln = split( / +/, $line ); $iter_gradient = $reduct_ln[$gradient_num]; if( $initial_gradient == -999.0 ){ $initial_gradient = $iter_gradient; } - - $reduct = $iter_gradient / $initial_gradient; + + $reduct = $iter_gradient / $initial_gradient; push( @reduct_array, $reduct ); } @@ -77,7 +77,7 @@ if( -e $outfile ) { my $newfile = "${tankdir}/${outfile}"; - system("cp -f $outfile $newfile"); + system("cpfs $outfile $newfile"); } } else { # $infile does not exist diff --git a/ush/syndat_qctropcy.sh b/ush/syndat_qctropcy.sh index 3e3babf9750..022851c3e45 100755 --- a/ush/syndat_qctropcy.sh +++ b/ush/syndat_qctropcy.sh @@ -6,7 +6,7 @@ # MODIFIED 07/06/1997 (Keyser) # MODIFIED 03/03/2000 (Keyser) # -# Abstract: This script handles the pre-processing of the tcvital +# Abstract: This script handles the pre-processing of the tcvital # files that are made by NHC and other tropical # prediction centers by the executable syndat_qctropcy # @@ -105,10 +105,10 @@ positional parameter 1" # (Note: Only do so if files don't already exist) if [[ ! -s "${COMOUT_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" ]]; then - cp "/dev/null" "${COMOUT_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" + touch "${COMOUT_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" fi if [[ ! -s "${COMOUT_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" ]]; then - cp "/dev/null" "${COMOUT_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" + touch "${COMOUT_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" fi exit @@ -124,16 +124,16 @@ set_trace year=${run_date:0:4} - + # Copy the seasonal statistics from archive directory to local - -cp $ARCHSYND/syndat_akavit akavit; touch akavit -cp $ARCHSYND/syndat_dateck dateck -cp $ARCHSYND/syndat_stmcat.scr stmcat.scr; touch stmcat.scr -cp $ARCHSYND/syndat_stmcat stmcat; touch stmcat -cp $ARCHSYND/syndat_sthisto sthisto -cp $ARCHSYND/syndat_sthista sthista - + +cpreq $ARCHSYND/syndat_akavit akavit; touch akavit +cpreq $ARCHSYND/syndat_dateck dateck +cpreq $ARCHSYND/syndat_stmcat.scr stmcat.scr; touch stmcat.scr +cpreq $ARCHSYND/syndat_stmcat stmcat; touch stmcat +cpreq $ARCHSYND/syndat_sthisto sthisto +cpreq $ARCHSYND/syndat_sthista sthista + touch dateck dateck_size=$(ls -l dateck | awk '{ print $5 }') if [ $dateck_size -lt 10 ]; then @@ -153,7 +153,7 @@ fi # 06Z GDAS at tm00 (last run of day centered on 06Z) # 12Z GDAS at tm00 (last run of day centered on 12Z) # 18Z GDAS at tm00 (last run of day centered on 18Z) - + net=$NET files=F, if [ "$RUN" = 'ndas' ]; then @@ -164,11 +164,11 @@ fi if [ -n "$files_override" ]; then # for testing, typically want FILES=F files_override=$(echo "$files_override" | tr [a-z] [A-Z] | tr -d [.] | cut -c 1) if [ "$files_override" = 'T' -o "$files_override" = 'F' ]; then - msg="***WARNING: Variable files setting will be overriden from $files to $files_override. Override expected if testing." + msg="***WARNING: Variable files setting will be overriden from $files to $files_override. Override expected if testing." files=$files_override else - msg="***WARNING: Invalid attempt to override files setting. Will stay with default for this job" - fi + msg="***WARNING: Invalid attempt to override files setting. Will stay with default for this job" + fi set +x echo -e "\n${msg}\n" set_trace @@ -177,11 +177,11 @@ fi echo " &INPUT RUNID = '${net}_${tmmark}_${cyc}', FILES = $files " > vitchk.inp cat ${PARMgfs}/relo/syndat_qctropcy.${RUN}.parm >> vitchk.inp - + # Copy the fixed fields - -cp ${FIXgfs}/am/syndat_fildef.vit fildef.vit -cp ${FIXgfs}/am/syndat_stmnames stmnames + +cpreq ${FIXgfs}/am/syndat_fildef.vit fildef.vit +cpreq ${FIXgfs}/am/syndat_stmnames stmnames rm -f nhc fnoc lthistry @@ -195,7 +195,7 @@ rm -f nhc fnoc lthistry if [ -s ${HOMENHC}/tcvitals ]; then echo "tcvitals found" >> $pgmout - cp ${HOMENHC}/tcvitals nhc + cpreq ${HOMENHC}/tcvitals nhc else echo "WARNING: tcvitals not found, create empty tcvitals" >> $pgmout > nhc @@ -208,7 +208,7 @@ touch nhc mv -f nhc nhc1 ${USHgfs}/parse-storm-type.pl nhc1 > nhc -cp -p nhc nhc.ORIG +cpreq -p nhc nhc.ORIG # JTWC/FNOC ... execute syndat_getjtbul script to write into working directory # as fnoc; copy to archive ${USHgfs}/syndat_getjtbul.sh ${run_date} @@ -225,9 +225,9 @@ fi ######################################################################### -cp $slmask slmask.126 - - +cpreq $slmask slmask.126 + + # Execute program syndat_qctropcy pgm=$(basename ${EXECgfs}/syndat_qctropcy.x) @@ -277,10 +277,10 @@ if [ "$errqct" -gt '0' ];then # (Note: Only do so if files don't already exist) if [[ ! -s "${COMOUT_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" ]]; then - cp "/dev/null" "${COMOUT_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" + touch "${COMOUT_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" fi if [[ ! -s ${COMOUT_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark} ]]; then - cp "/dev/null" "${COMOUT_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" + touch "${COMOUT_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" fi exit @@ -295,12 +295,12 @@ set_trace if [ "$copy_back" = 'YES' ]; then cat lthistry>>$ARCHSYND/syndat_lthistry.$year - cp akavit $ARCHSYND/syndat_akavit - cp dateck $ARCHSYND/syndat_dateck - cp stmcat.scr $ARCHSYND/syndat_stmcat.scr - cp stmcat $ARCHSYND/syndat_stmcat - cp sthisto $ARCHSYND/syndat_sthisto - cp sthista $ARCHSYND/syndat_sthista + cpfs akavit $ARCHSYND/syndat_akavit + cpfs dateck $ARCHSYND/syndat_dateck + cpfs stmcat.scr $ARCHSYND/syndat_stmcat.scr + cpfs stmcat $ARCHSYND/syndat_stmcat + cpfs sthisto $ARCHSYND/syndat_sthisto + cpfs sthista $ARCHSYND/syndat_sthista fi @@ -316,7 +316,7 @@ then if [ "$copy_back" = 'YES' -a ${envir} = 'prod' ]; then if [ -s ${HOMENHC}/tcvitals ]; then - cp nhc ${HOMENHC}/tcvitals + cpfs nhc ${HOMENHC}/tcvitals fi err=$? @@ -354,14 +354,14 @@ fi # This is the file that connects to the later RELOCATE and/or PREP scripts -cp current "${COMOUT_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" +cpfs current "${COMOUT_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" -# Create the DBNet alert +# Create the DBNet alert if [[ "${SENDDBN}" == "YES" ]]; then "${DBNROOT}/bin/dbn_alert" "MODEL" "GDAS_TCVITALS" "${job}" "${COMOUT_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" fi - + # Write JTWC/FNOC Tcvitals to /com path since not saved anywhere else -cp fnoc "${COMOUT_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" +cpfs fnoc "${COMOUT_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" exit diff --git a/ush/tropcy_relocate.sh b/ush/tropcy_relocate.sh index 65feb754d22..38caecba3b6 100755 --- a/ush/tropcy_relocate.sh +++ b/ush/tropcy_relocate.sh @@ -20,7 +20,7 @@ # processing). This script has been designed to be executed by either an # operational job script, a test job script, a parallel job script, or a # stand-alone batch run initiated by a user. -# +# # Script history log: # 2006-06-12 Dennis A. Keyser -- Original version for implementation - split # off from USH script prepobs_makeprepbufr.sh, this was done to allow @@ -37,7 +37,7 @@ # guess (input to relocation) in /com (with .pre-relocate. qualifier) so # it can be identified later # 2012-08-01 Luke Lin -- alerts inform.relocate, tcvitals.relocate, tropcy_relocation_status -# 2012-12-03 J. Woollen -- transitioned to WCOSS system. Introduced mpi version of the +# 2012-12-03 J. Woollen -- transitioned to WCOSS system. Introduced mpi version of the # relocate code which precesses three backgrounds in one run. Removed the older # poe/cmdfile parallelism from the script. # 2013-10-11 D. Stokes -- Modified some variable names for reorganization. @@ -112,7 +112,7 @@ # Default is "-pgmmodel mpmd -ilevel 2 -labelio yes \ # -stdoutmode ordered" # RELOX String indicating executable path for RELOCATE_MV_NVORTEX -# program +# program # Default is "${EXECgfs}/relocate_mv_nvortex" # SUPVX String indicating executable path for SUPVIT utility # program @@ -121,7 +121,7 @@ # program # Default is "${EXECgfs}/gettrk" # BKGFREQ Frequency of background files for relocation -# Default is "3" +# Default is "3" # SENDDBN String when set to "YES" alerts output files to $COMSP # # These do not have to be exported to this script. If they are, they will @@ -135,7 +135,7 @@ # run (this is passed to child script # tropcy_relocate_extrkr.sh - if "$CMODEL" is not set here, # it defaults to "$RUN") -# +# # # Modules and files referenced: # Herefile: RELOCATE_GES @@ -175,7 +175,7 @@ qid=$$ if [ $# -ne 1 ] ; then err0=1 -else +else run_date=$1 if [ "${#run_date}" -ne '10' ]; then err0=1 @@ -348,7 +348,7 @@ to center relocation date/time;" if [ $fhr = "0" ]; then "${USHgfs}/getges.sh" -e "${envir_getges}" -n "${network_getges}" -v "${run_date}" \ -t "${stype}" > "${COMOUT_OBS}/${RUN}.${cycle}.sgesprep_pre-relocate_pathname.${tmmark}" - cp "${COMOUT_OBS}/${RUN}.${cycle}.sgesprep_pre-relocate_pathname.${tmmark}" \ + cpfs "${COMOUT_OBS}/${RUN}.${cycle}.sgesprep_pre-relocate_pathname.${tmmark}" \ "${COMOUT_OBS}/${RUN}.${cycle}.sgesprep_pathname.${tmmark}" fi set +x @@ -389,9 +389,9 @@ echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" done if [ -f ${tstsp}syndata.tcvitals.$tmmark ]; then - cp ${tstsp}syndata.tcvitals.$tmmark tcvitals.now + cpreq ${tstsp}syndata.tcvitals.$tmmark tcvitals.now else - cp "${COMOUT_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" "tcvitals.now" + cpreq "${COMOUT_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" "tcvitals.now" fi @@ -403,7 +403,7 @@ MP_PULSE=0 MP_TIMEOUT=600 GDATE10=$(date --utc +%Y%m%d%H -d "${run_date:0:8} ${run_date:8:2} - 6 hours") -# make unique combined tcvitals file for t-12, t-6 and t+0 -- +# make unique combined tcvitals file for t-12, t-6 and t+0 -- # if tcvitals does not contains record from current time, skip relocation # processing # ----------------------------------------------------------------------- @@ -430,12 +430,12 @@ RELOCATION PROCESSING" echo "NO RECORDS to process" > "${COMOUT_OBS}/${RUN}.${cycle}.tropcy_relocation_status.${tmmark}" if [[ ! -s "${COMOUT_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" ]]; then - cp "/dev/null" "${COMOUT_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" + touch "${COMOUT_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" fi else cat VITL >>tcvitals - grep "${PDY} ${cyc}" VITL > tcvitals.now1 + grep "${PDY} ${cyc}" VITL > tcvitals.now1 # create model forecast track location file @@ -492,7 +492,7 @@ else # ------------------------------------------------------------------ set +u - [ -z "$LONB" ] && LONB=0 + [ -z "$LONB" ] && LONB=0 [ -z "$LATB" ] && LATB=0 set -u @@ -513,12 +513,12 @@ else export MP_USE_BULK_XFER=yes export RELOX_threads=${RELOX_threads:-16} export KMP_STACKSIZE=1024m - export OMP_NUM_THREADS=$RELOX_threads + export OMP_NUM_THREADS=$RELOX_threads export MP_TASK_AFFINITY=core:$RELOX_threads ${APRNRELOC:-mpirun.lsf} $RELOX >stdo.prints errSTATUS=$? - + # copy relocation print output here and there # ------------------------------------------- @@ -570,8 +570,8 @@ else rm -f RELOCATE_GES cmd - cp "rel_inform1" "${COMOUT_OBS}/${RUN}.${cycle}.inform.relocate.${tmmark}" - cp "tcvitals" "${COMOUT_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" + cpfs "rel_inform1" "${COMOUT_OBS}/${RUN}.${cycle}.inform.relocate.${tmmark}" + cpfs "tcvitals" "${COMOUT_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" if [[ "${SENDDBN}" == "YES" ]]; then if test "$RUN" = "gdas1" then diff --git a/ush/tropcy_relocate_extrkr.sh b/ush/tropcy_relocate_extrkr.sh index 50f9dd9a12a..42b53df4cf1 100755 --- a/ush/tropcy_relocate_extrkr.sh +++ b/ush/tropcy_relocate_extrkr.sh @@ -10,7 +10,7 @@ export machine=$(echo $machine|tr '[a-z]' '[A-Z]') # module load mpt #fi -# Variable "gribver" defines if input GRIB data is in +# Variable "gribver" defines if input GRIB data is in # GRIB1 (gribver=1) or GRIB2 (gribver=2) format. export gribver=${gribver:-1} @@ -33,11 +33,11 @@ fi cd $vdir -# This script kicks off the hurricane tracker system. You have the option of -# running the tracker on several operational models (the system will +# This script kicks off the hurricane tracker system. You have the option of +# running the tracker on several operational models (the system will # automatically search in the appropriate operational directories for the -# data), or on your own model data. In either case, the current system -# will ONLY be able to read GRIB data. To run the tracker, fill in the +# data), or on your own model data. In either case, the current system +# will ONLY be able to read GRIB data. To run the tracker, fill in the # required fields below on the "export" lines, then llsubmit this script. @@ -45,12 +45,12 @@ cd $vdir # 1. ENTER MODEL TYPE # ------------------- # -# Enter the name of the model that you're running the tracker on. The -# tracker is already hard-wired to read the operational GFS, MRF, UKMET, -# ECMWF, NGM, Early NAM and NAVGEM files in their operational directories -# on /com. If your model is one of these, *AND* the data files are still -# on /com (data on the SP sits on /com for ~10 days), enter one of the -# following below: GFS, MRF, UKMET, ECMWF, NGM, NAM, NGPS. Otherwise, +# Enter the name of the model that you're running the tracker on. The +# tracker is already hard-wired to read the operational GFS, MRF, UKMET, +# ECMWF, NGM, Early NAM and NAVGEM files in their operational directories +# on /com. If your model is one of these, *AND* the data files are still +# on /com (data on the SP sits on /com for ~10 days), enter one of the +# following below: GFS, MRF, UKMET, ECMWF, NGM, NAM, NGPS. Otherwise, # enter OTHER below: # # Example: export CMODEL=gfs @@ -62,36 +62,36 @@ export CMODEL=${CMODEL:-$RUN} # 2. ENTER FILE NAME # ------------------- # -# If you entered an operational model above, then skip this step, as the -# script already knows the filename and directory location . Otherwise, -# you must enter the full name (include directory) of one of your GRIB -# forecast files. Also, your 00h forecast file must have the characters -# "00" in the name, not just "anl". +# If you entered an operational model above, then skip this step, as the +# script already knows the filename and directory location . Otherwise, +# you must enter the full name (include directory) of one of your GRIB +# forecast files. Also, your 00h forecast file must have the characters +# "00" in the name, not just "anl". # # Finally, and this is important, in the export statement below, in the # character positions where the forecast hour characters "00" would appear -# replace those characters with the characters "XX". This allows the -# tracker script to manipulate the file names, and to avoid confusion of +# replace those characters with the characters "XX". This allows the +# tracker script to manipulate the file names, and to avoid confusion of # the forecast hour "00" with the initial hour "00" that may be in a name. # # Example: If the actual name of your datafile is # /ptmp/wx20tm/trakdat/ngm.1997110700.pgrb.f00, then enter below: # # export INPFILE=/ptmp/wx20tm/trakdat/ngm.1997110700.pgrb.fXX - + export INPFILE=${INPFILE:-} - + # ------------------------------- # 3. ENTER FORECAST HOUR INTERVAL # ------------------------------- # # If above you entered an operational model from an operational directory, -# then skip this step, as the script already knows the forecast hour -# interval. Otherwise, enter the integer number of hours between +# then skip this step, as the script already knows the forecast hour +# interval. Otherwise, enter the integer number of hours between # successive forecasts. By the way, there are a couple of caveats: -# a) Your forecast hours must be evenly spaced (i.e., if you start out -# with 00, 06, 12,... etc, and then after 48 you skip to 60, the +# a) Your forecast hours must be evenly spaced (i.e., if you start out +# with 00, 06, 12,... etc, and then after 48 you skip to 60, the # program will stop after 48 hours). # b) Currently, a maximum of 14 forecast times can be handled (this is # enough to get you 6h forecast intervals from 00 to 78h, or 12h @@ -123,24 +123,24 @@ export INPSTORM= # 6. ENTER NAME OF AUXILIARY TC VITALS FILE # ----------------------------------------- # -# If you are going to use this script to track a storm that has been +# If you are going to use this script to track a storm that has been # tracked by an operational hurricane center (NHC, JTWC, etc.), then # skip this step, as the TC Vitals files for 1991-1999 are online # (except we're currently missing 1992) and will be used. -# However, if you're trying to track, for example, a midlatitude -# synoptic storm or a subtropical low, then you need to create a TC -# Vitals record for that storm that includes the following parameters +# However, if you're trying to track, for example, a midlatitude +# synoptic storm or a subtropical low, then you need to create a TC +# Vitals record for that storm that includes the following parameters # in the appropriate character positions: 3-character storm ID, Storm # name, starting YYMMDD, starting HHMM, initial Lat, initial Lon, # direction of storm movement, and speed of storm movement. (See # Tim Marchok or Steve Lord to get the exact format). NOTE: the -# online TC Vitals files are the operational files, NOT Best Track. +# online TC Vitals files are the operational files, NOT Best Track. # If you want to use Best Track data, include that file name here and # the tracker will use it instead of the operational file. Enter the # name of your auxiliary TC Vitals file: # # Example: export AUXTCVIT=/ptmp/wx20tm/ecoast.wint97 - + export AUXTCVIT= @@ -150,8 +150,8 @@ export AUXTCVIT= # # This allows you to enter your own thresholds for mslp gradient and # 850 mb tangential winds. These are used in subroutine is_it_a_storm -# at each forecast hour to verify that the center that you've found -# at least resembles a storm. It helps to have this check so that, +# at each forecast hour to verify that the center that you've found +# at least resembles a storm. It helps to have this check so that, # in the case of weak storms that have dissipated, you don't end up # latching onto some weak passing trough. When this check was not in # there in the original version of the tracker, we wound up with some @@ -159,8 +159,8 @@ export AUXTCVIT= # operational version, the values are hard-wired in as requiring a # mslp gradient of at least 1 mb / 200 km (0.005 mb/km), and requiring # the average cyclonic tangential winds at 850 mb within a specified -# radius (the radius depends on each model's grid resolution) to be -# at least 5 m/s. If you want different thresholds, then change the +# radius (the radius depends on each model's grid resolution) to be +# at least 5 m/s. If you want different thresholds, then change the # default values below.... # # Example: @@ -181,7 +181,7 @@ export BKGFREQ=${BKGFREQ:-1} # data files exist, it also pulls all of the needed data records out of the # various GRIB forecast files and puts them into one, consolidated GRIB file, # it also runs scripts that read the TC Vitals records for the input day and -# updates the TC Vitals (if necessary). It then executes the gettrk +# updates the TC Vitals (if necessary). It then executes the gettrk # executable, which actually does the tracking. @@ -226,14 +226,14 @@ case ${shh} in esac #---------------------------------------------------# -# Convert the input model to lowercase letters and +# Convert the input model to lowercase letters and # check to see if it's a valid model, and assign a # model ID number to it. #---------------------------------------------------# cmodel=$(echo ${cmodel} | tr "[A-Z]" "[a-z]") -case ${cmodel} in +case ${cmodel} in gdas) set +x; echo " "; echo " ++ operational GDAS chosen"; set_trace; fcstlen=9 ; @@ -314,17 +314,17 @@ case ${cmodel} in esac #------------------------------------------------- -# Initialize the fh array that's used in +# Initialize the fh array that's used in # telling the fortran program the number of -# forecast hours and what those forecast -# hours are. If the model selected is an +# forecast hours and what those forecast +# hours are. If the model selected is an # operational model, the tracker already knows # what those hours are; it only needs this info # if the model is user-defined. #------------------------------------------------- fct=1 -while [ ${fct} -le 14 ]; +while [ ${fct} -le 14 ]; do fh[${fct}]=99 let fct=fct+1 @@ -333,20 +333,20 @@ done #------------------------------------------------------# # Set the directories for the operational files. For -# a user-specified model, we need to process the -# input file to get the necessary info about the +# a user-specified model, we need to process the +# input file to get the necessary info about the # data directory, etc..... #------------------------------------------------------# if [ ${cmodel} = 'other' ]; then # This next bit of code tears apart the input file name to get the -# data directory and data file names. pos1, pos2, pos3 and pos4 +# data directory and data file names. pos1, pos2, pos3 and pos4 # refer to character string positions in the filename string. The # idea of this next bit of code is to pull the file name apart to # get a shell for the file name, so that if a user has a file -# name such as pgbf00.97090100, the script knows where in the -# string to substitute the forecast hours. IMPORTANT NOTE: The +# name such as pgbf00.97090100, the script knows where in the +# string to substitute the forecast hours. IMPORTANT NOTE: The # file name that the user enters must have the characters "XX" in # in place of the forecast hour characters "00" in order for this # to work. @@ -358,7 +358,7 @@ if [ ${cmodel} = 'other' ]; then otherdir=$(dirname ${inpfile}) fname=$(basename ${inpfile}) - + pos2=$(echo ${fname} | awk '{ match($0,/XX/); print RSTART }') pos4=$(echo ${fname} | awk '{ match($0,/$/); print RSTART }') let pos4=pos4-1 @@ -376,21 +376,21 @@ if [ ${cmodel} = 'other' ]; then set_trace exit 8 fi - + fnamebeg=$(echo ${fname} | cut -c1-${pos1}) if [ ${pos4} -ge ${pos3} ]; then fnameend=$(echo ${fname} | cut -c${pos3}-${pos4}) else fnameend="" fi - + fflag='y' fhour=0 fcsthrsother='' fhrct=0 - while [ ${fflag} = 'y' ]; + while [ ${fflag} = 'y' ]; do - + if [ ${fhrct} -eq 14 ]; then set +x echo " " @@ -398,12 +398,12 @@ if [ ${cmodel} = 'other' ]; then echo " " set_trace break - fi - + fi + if [ ${fhour} -lt 10 ]; then fhour=0${fhour} fi - + if [ -s ${otherdir}/${fnamebeg}${fhour}${fnameend} ]; then maxhour=${fhour} fcsthrsother=${fcsthrsother}" ${fhour}" @@ -418,7 +418,7 @@ if [ ${cmodel} = 'other' ]; then fi let fhour=fhour+fhint - + done if [ ! -s ${otherdir}/${fnamebeg}00${fnameend} ]; then @@ -434,25 +434,25 @@ if [ ${cmodel} = 'other' ]; then set_trace exit 8 fi - + set +x echo " " - echo " Max forecast hour is $maxhour" + echo " Max forecast hour is $maxhour" echo " List of forecast hours: $fcsthrsother" echo " " set_trace # -------------------------------------------------- -# In order for the fortran program to know how many +# In order for the fortran program to know how many # forecast times there are and what those forecast # hours are, we need to include this information in # the namelist file. So first, put this information -# into an array, then at the end of this script, +# into an array, then at the end of this script, # we'll put it into the namelist file. fhour=0 fct=1 - while [ ${fct} -le 14 ]; + while [ ${fct} -le 14 ]; do if [ ${fhour} -le ${maxhour} ]; then @@ -468,7 +468,7 @@ if [ ${cmodel} = 'other' ]; then fi -cp $DATA/tcvitals ${vdir}/vitals.${symd}${dishh} +cpfs $DATA/tcvitals ${vdir}/vitals.${symd}${dishh} grep -v TEST ${vdir}/vitals.${symd}${dishh} | \ awk 'substr($0,6,1) !~ /[8-9]/ {print $0}' >${vdir}/tempvit.nonameless @@ -564,7 +564,7 @@ awk ' mv ${TMPDIR}/vitals.${symd}${dishh}.y4 ${vdir}/vitals.${symd}${dishh} -#cp $auxtcvit ${vdir}/vitals.${symd}${dishh} +#cpfs $auxtcvit ${vdir}/vitals.${symd}${dishh} pgm=$(basename $SUPVX) if [ -s $DATA/prep_step ]; then @@ -702,12 +702,12 @@ done # # The utility /nwprod/util/exec/wgrib is used to cut out the # needed parms for the GFS, MRF, UKMET and NAVGEM files. -# The utility /nwprod/util/exec/copygb is used to interpolate the -# NGM (polar stereographic) and NAM (Lambert Conformal) data from -# their grids onto lat/lon grids. Note that while the lat/lon -# grid that I specify overlaps into areas that don't have any data -# on the original grid, Mark Iredell wrote the copygb software so -# that it will mask such "no-data" points with a bitmap (just be +# The utility /nwprod/util/exec/copygb is used to interpolate the +# NGM (polar stereographic) and NAM (Lambert Conformal) data from +# their grids onto lat/lon grids. Note that while the lat/lon +# grid that I specify overlaps into areas that don't have any data +# on the original grid, Mark Iredell wrote the copygb software so +# that it will mask such "no-data" points with a bitmap (just be # sure to check the lbms in your fortran program after getgb). #-----------------------------------------------------------------# @@ -729,7 +729,7 @@ regflag=$(grep NHC ${vdir}/vitals.upd.${cmodel}.${symd}${dishh} | wc -l) # ---------------------------- # Process NGM, if selected # ---------------------------- - + if [ ${model} -eq 5 ]; then grid='255 0 151 71 70000 190000 128 0000 340000 1000 1000 64' @@ -774,8 +774,8 @@ if [ ${model} -eq 5 ]; then echo " " set_trace - g1=${ngmdir}/${ngmgfile}${fhour} - + g1=${ngmdir}/${ngmgfile}${fhour} + ${COPYGB:?} -g"$grid" -k'2*-1 104 -1 33 100 850' $g1 $x1 $TMPDIR/ngmllu850.grb.f${fhour}; rcc1=$? ${COPYGB:?} -g"$grid" -k'2*-1 104 -1 33 100 700' $g1 $x1 $TMPDIR/ngmllu700.grb.f${fhour}; rcc2=$? ${COPYGB:?} -g"$grid" -k'2*-1 104 -1 33 100 500' $g1 $x1 $TMPDIR/ngmllu500.grb.f${fhour}; rcc3=$? @@ -819,7 +819,7 @@ fi # ---------------------------------- # Process Early NAM, if selected # ---------------------------------- - + if [ ${model} -eq 6 ]; then grid='255 0 301 141 70000 190000 128 0000 340000 500 500 64' @@ -863,9 +863,9 @@ if [ ${model} -eq 6 ]; then echo " Extracting Early NAM GRIB data for forecast hour = $fhour" echo " " set_trace - + g1=${namdir}/${namgfile}${fhour}.tm00 - + if [ -s $TMPDIR/namlatlon.pgrb ]; then rm $TMPDIR/namlatlon.pgrb; fi ${COPYGB:?} -g"$grid" -k'4*-1 33 100 850' $g1 $x1 $TMPDIR/namllu850.grb.f${fhour}; rcc1=$? ${COPYGB:?} -g"$grid" -k'4*-1 33 100 700' $g1 $x1 $TMPDIR/namllu700.grb.f${fhour}; rcc2=$? @@ -897,7 +897,7 @@ if [ ${model} -eq 6 ]; then $TMPDIR/namllav850.grb.f${fhour} $TMPDIR/namllav700.grb.f${fhour} \ $TMPDIR/namllu10m.grb.f${fhour} \ >>${vdir}/namlatlon.pgrb.${symd}${dishh} - + done ${GRBINDEX:?} ${vdir}/namlatlon.pgrb.${symd}${dishh} ${vdir}/namlatlon.pgrb.ix.${symd}${dishh} @@ -912,18 +912,18 @@ fi # ------------------------------ # ECMWF is not a regional grid, however they currently (6/98) only send us the -# global belt from 35S to 35N. Thus, it will have grid boundaries that may -# interfere with the tracking algorithm. It is crucial to the proper -# functioning of the tracking program to give any regional grid dataset a -# buffer zone around the grid boundaries, with null values in that buffer -# zone that are bitmapped out. That's why we use Mark Iredell's grib +# global belt from 35S to 35N. Thus, it will have grid boundaries that may +# interfere with the tracking algorithm. It is crucial to the proper +# functioning of the tracking program to give any regional grid dataset a +# buffer zone around the grid boundaries, with null values in that buffer +# zone that are bitmapped out. That's why we use Mark Iredell's grib # interpolater here, to add a 5 degree buffer zone to the north and south of # the ECMWF grid boundary; his interpolater adds the null values in the -# bitmap surrounding the area with valid data. If ECMWF begins sending us +# bitmap surrounding the area with valid data. If ECMWF begins sending us # the entire global data set, then this bit of code should # be taken out, and the data should then be processed as the other normal # full-coverage global models (ukmet, mrf, gfs, NAVGEM) currently are. - + if [ ${model} -eq 4 ]; then if [ ! -s ${ecmwfdir}/${ecmwfgfile} ]; then @@ -962,10 +962,10 @@ fi # ------------------------------ # Process GFS, if selected # ------------------------------ - + if [ ${model} -eq 1 ]; then - if [ -s ${vdir}/gfsgribfile.${symd}${dishh} ]; then + if [ -s ${vdir}/gfsgribfile.${symd}${dishh} ]; then rm ${vdir}/gfsgribfile.${symd}${dishh} fi @@ -1013,7 +1013,7 @@ fi # ------------------------------ # Process GDAS, if selected # ------------------------------ - + if [ ${model} -eq 8 ]; then export nest_type="fixed" @@ -1022,9 +1022,9 @@ if [ ${model} -eq 8 ]; then export trkrnbd=85.0 export trkrsbd=-85.0 rundescr="xxxx" - atcfdescr="xxxx" + atcfdescr="xxxx" - if [ -s ${vdir}/gdasgribfile.${symd}${dishh} ]; then + if [ -s ${vdir}/gdasgribfile.${symd}${dishh} ]; then rm ${vdir}/gdasgribfile.${symd}${dishh} fi @@ -1136,10 +1136,10 @@ fi # ------------------------------ # Process MRF, if selected # ------------------------------ - + if [ ${model} -eq 2 ]; then - if [ -s ${vdir}/mrfgribfile.${symd}${dishh} ]; then + if [ -s ${vdir}/mrfgribfile.${symd}${dishh} ]; then rm ${vdir}/mrfgribfile.${symd}${dishh} fi @@ -1189,10 +1189,10 @@ fi # ------------------------------ # Process UKMET, if selected # ------------------------------ - + if [ ${model} -eq 3 ]; then - if [ -s ${vdir}/ukmetgribfile.${symd}${dishh} ]; then + if [ -s ${vdir}/ukmetgribfile.${symd}${dishh} ]; then rm ${vdir}/ukmetgribfile.${symd}${dishh} fi @@ -1296,9 +1296,9 @@ fi if [ ${model} -eq 9 ]; then # We need to first check whether or not the data in the file are stored -# on a lat/lon grid or not. We do this by scanning the analysis file -# with Wesley's grib utility, and checking the value of the "Data -# Representation Type", which is stored in byte #6 in the GDS of each +# on a lat/lon grid or not. We do this by scanning the analysis file +# with Wesley's grib utility, and checking the value of the "Data +# Representation Type", which is stored in byte #6 in the GDS of each # grib file. A value of 0 indicates an equidistant lat/lon grid. if [ -s ${vdir}/otherlatlon.pgrb.${symdh} ]; then @@ -1310,13 +1310,13 @@ if [ ${model} -eq 9 ]; then if [ ${gridtyp} -eq 0 ]; then -# The data are already on a lat/lon grid, we do not need to +# The data are already on a lat/lon grid, we do not need to # interpolate the data, just pull out the records that we need # using wgrib. for fhour in ${fcsthrsother} do - + if [ ! -s ${otherdir}/${fnamebeg}${fhour}${fnameend} ]; then set +x echo " " @@ -1327,7 +1327,7 @@ if [ ${model} -eq 9 ]; then set_trace continue fi - + gfile=${otherdir}/${fnamebeg}${fhour}${fnameend} ${WGRIB:?} -s $gfile >$TMPDIR/other.ix @@ -1347,9 +1347,9 @@ if [ ${model} -eq 9 ]; then esac done - + done - + else # The data are on a grid that is something other than a lat/lon grid. @@ -1377,7 +1377,7 @@ if [ ${model} -eq 9 ]; then x1=$TMPDIR/tmpixfile g1=${otherdir}/${fnamebeg}${fhour}${fnameend} - + ${COPYGB:?} -g"$othergrid" -k'4*-1 33 100 850' $g1 $x1 $TMPDIR/otherllu850.grb.f${fhour}; rcc1=$? ${COPYGB:?} -g"$othergrid" -k'4*-1 33 100 700' $g1 $x1 $TMPDIR/otherllu700.grb.f${fhour}; rcc2=$? ${COPYGB:?} -g"$othergrid" -k'4*-1 33 100 500' $g1 $x1 $TMPDIR/otherllu500.grb.f${fhour}; rcc3=$? @@ -1448,7 +1448,7 @@ done namelist=${vdir}/gettrk.input.${cmodel}.${symdh} ATCFNAME=$( echo "${atcfname}" | tr '[a-z]' '[A-Z]') - + export atcfymdh=${scc}${syy}${smm}${sdd}${shh} contour_interval=100.0 write_vit=n @@ -1490,7 +1490,7 @@ echo " wait_min_size=100," >>${namelist} echo " wait_max_wait=1800," >>${namelist} echo " wait_sleeptime=5," >>${namelist} echo " per_fcst_command=''/" >>${namelist} - + pgm=$(basename $GETTX) if [ -s $DATA/prep_step ]; then . $DATA/prep_step diff --git a/ush/wave_prnc_cur.sh b/ush/wave_prnc_cur.sh index e3743151a55..cdeecc1f7c5 100755 --- a/ush/wave_prnc_cur.sh +++ b/ush/wave_prnc_cur.sh @@ -44,7 +44,7 @@ mv -f "cur_temp3.nc" "cur_uv_${PDY}_${fext}${fh3}_flat.nc" # Convert to regular lat lon file # If weights need to be regenerated due to CDO ver change, use: # $CDO genbil,r4320x2160 rtofs_glo_2ds_f000_3hrly_prog.nc weights.nc -cp ${FIXgfs}/wave/weights_rtofs_to_r4320x2160.nc ./weights.nc +cpreq ${FIXgfs}/wave/weights_rtofs_to_r4320x2160.nc ./weights.nc # Interpolate to regular 5 min grid ${CDO} remap,r4320x2160,weights.nc "cur_uv_${PDY}_${fext}${fh3}_flat.nc" "cur_5min_01.nc" diff --git a/ush/wave_prnc_ice.sh b/ush/wave_prnc_ice.sh index 2ff2030bcec..3812b5e8120 100755 --- a/ush/wave_prnc_ice.sh +++ b/ush/wave_prnc_ice.sh @@ -71,7 +71,7 @@ EOF if [ -f $file ] then - cp $file ice.grib + cpreq $file ice.grib fi if [ -f ice.grib ] @@ -108,7 +108,7 @@ EOF printf " Run through preprocessor ...\n" - cp -f ${DATA}/ww3_prnc.ice.$WAVEICE_FID.inp.tmpl ww3_prnc.inp + cpreq -f ${DATA}/ww3_prnc.ice.$WAVEICE_FID.inp.tmpl ww3_prnc.inp export pgm="${NET,,}_ww3_prnc.x" source prep_step @@ -139,7 +139,7 @@ EOF fi echo " Saving ice.ww3 as ${COMOUT_WAVE_PREP}/${icefile}" - cp ice.ww3 "${COMOUT_WAVE_PREP}/${icefile}" + cpfs ice.ww3 "${COMOUT_WAVE_PREP}/${icefile}" rm -f ice.ww3 # --------------------------------------------------------------------------- #