1 """!This module implements Python classes that run the 2014 HWRF Relocation.
3 The HWRF relocation is divided into four parts:
5 * Stage1 --- remove the prior cycle's vortex
7 * Stage2 --- remove the parent vortex
9 * Stage3 --- Relocate and paste the fields together
11 * Merge --- merge the relocation output and GSI. This is only used when
14 In addition, due to the requirement of splitting the relocation into
15 three stages, there are "relocation info" files for passing
16 information between the three. The RelocationInfo object provides a
17 means to read and write these files."""
21 __all__=[
'Stage1',
'Stage2',
'Stage3',
'Merge',
'RelocationInfo',
45 from ConfigParser
import ConfigParser, SafeConfigParser, RawConfigParser
47 from produtil.run import checkrun, run,exe, bigexe, alias,runsync
48 from hwrf.numerics import partial_ordering, TimeArray, to_timedelta, \
49 within_dt_epsilon, to_datetime_rel, \
50 to_datetime, to_fraction
67 """!Passes information about relocation status between relocation stages.
69 This class is used to pass information between the relocate
70 stages. There are three public member variables that ARE meant to
71 be written depending on logic within the relocation Stage1, Stage2
72 and Stage3 classes. One then writes out the relocation status
73 information to an intermediate file at the end of each relocation
74 stage using self.write_info. It is read in by a later step of the
75 relocation, or by the merge, via RelocationInfo(filename,...)
77 * iflag_cold = 1 or 0 used by several of the fortran relocation
80 * warm_cold_flag = the constants WARM, COLD or None, depending on
81 whether this is a warm start, a cold start, or an unknown state.
83 * cold_ok = True only if the relocation intentionally vetoed a
84 warm start, such as for a weak storm
86 * initopt = 0, full vortex initialization; 1, relocation only
88 * ensda_relocate = True relcocation for ensemble member"""
90 """!Creates a new RelocationInfo object by reading in the
91 specified *.info file."""
98 if filename
is not None:
123 """!A Pythonic string representation of this object."""
124 return 'RelocationInfo(iflag_cold=%s,warm_cold_flag=%s,cold_ok=%s,'\
125 'initopt=%s,ensda_relocate=%s,ensda_relocate_continue=%s)'\
130 """!Returns the module-level constants COLD or WARM, or None,
131 for the specified string value.
133 @param value A string that is "WARM", "COLD" or "NONE" and is
134 case-insensitive. If it has any other value, None is returned
135 and a message is logged at ERROR level (if a logger is given).
136 @param logger a logging.Logger for log messages."""
138 if v==
'COLD':
return COLD
139 elif v==
'WARM':
return WARM
140 elif v==
'NONE':
return None
142 if logger
is not None:
144 'Invalid value %s (string %s) for warm_cold_flag. '
145 'Assuming None' %(repr(value),repr(v)))
148 """!This is the opposite of the make_warm_cold routine: it
149 returns "COLD", "WARM", or "None" for the constants COLD, WARM
150 or None based on self.warm_cold_flag"""
155 """!Reads the relocation information into this object from the
156 specified filename. This is called automatically by the
157 constructor. Error messages are logged to the given logger,
159 @param filename the name of the file to read
160 @param logger a logging.Logger for log messages"""
161 if not isinstance(filename,basestring):
163 'RelocationInfo.read_info expects a string for its '
164 'filename parameter. You provided a %s %s.'
165 %(type(filename).__name__,repr(filename)))
166 with open(filename,
'rt')
as f:
167 scp=SafeConfigParser(defaults={
168 'iflag_cold':
'0',
'warm_cold_flag':
'None',
169 'cold_ok':
'False',
'initopt':
'0',
170 'ensda_relocate':
'False',
171 'ensda_relocate_continue':
'True'})
173 self.
iflag_cold=scp.getint(
'info',
'iflag_cold')
175 scp.get(
'info',
'warm_cold_flag',
'None'))
176 self.
cold_ok=scp.getboolean(
'info',
'cold_ok')
177 self.
initopt=scp.getint(
'info',
'initopt')
182 """!Writes this object's relocation information to the
183 specified *.info file. Logs errors to the specified logger,
185 @param filename the file to write
186 @param logger a logging.Logger for log messages"""
187 if logger
is not None:
188 logger.info(
'Generating in-memory ConfigParser object for '
190 filename=str(filename)
191 thedir=os.path.dirname(filename)
194 c.add_section(
'info')
195 c.set(
'info',
'iflag_cold',str(self.
iflag_cold))
197 c.set(
'info',
'cold_ok',str(self.
cold_ok))
198 c.set(
'info',
'initopt',str(self.
initopt))
202 if logger
is not None:
203 logger.info(
'Writing RelocationInfo to file: %s'%(filename,))
204 with open(filename,
'wt')
as f:
208 """!This is a HWRF task that forms the base class for all vortex
209 relocation tasks, including the Merge. It exists solely to reduce
211 def __init__(self,dstore,conf,section,sim,domains,taskname=None,
212 modin=
'GDAS1',wrfanl=
None,wrfghost=
None,wrfinput=
None,
213 parentTrack=
None,trackName=
'track0',ghost_domains=
None,
214 dest_dir=
None,gsi_d02=
None,gsi_d03=
None,
215 gsi_d01=
None,cycling_interval=-6*3600,info=
None,
216 fgat_times=
None,centrack=
None,ensda=
None,**kwargs):
217 """!RelocationTask constructor.
219 @param dstore the produtil.datastore.Datastore for database storage
220 @param conf the hwrf.config.HWRFConfig for configuration info
221 @param section the configuration section to use
222 @param sim the hwrf.wrf.WRFSimulation describing the simulation being relocated
223 @param domains the hwrf.wrf.WRFDomains being relocated
224 @param taskname the taskname in the database
225 @param modin input model: "GFS" or "GDAS1".
226 @param wrfanl the wrfanl input source, which should have a get_wrfanl() function
227 that accepts an hwrf.wrf.WRFDomain and returns an hwrf.datastore.Product
228 @param wrfghost the ghost file input source, which should have a get_ghost() function
229 that accepts an hwrf.wrf.WRFDomain and returns an hwrf.datastore.Product
230 @param wrfinput the wrfinput datasource, which must have a get_wrfinput() function
231 that returns an hwrf.datastore.Product for the wrfinput file
232 @param parentTrack the parent track file input source, which must have a products()
233 function that takes the track product name and returns the produtil.datastore.Product
234 @param trackName the parent track name to pass to parentTrack.products()
235 @param ghost_domains the list of ghost domains for relocation input
236 @param dest_dir output directory for the relocation
237 @param gsi_d02 the hwrf.gsi.FGATGSI that will run the GSI for the intermediate domain.
238 @param gsi_d03 the hwrf.gsi.FGATGSI that will run the GSI for the innermost domain.
239 @param gsi_d01 the hwrf.gsi.GSIBase that will run GSI for the outermost domain.
240 @param cycling_interval negative number of seconds between cycles (-6*3600)
241 @param info the RelocationInfo object to use
242 @param fgat_times the list of FGAT times, datetime.datetime objects
243 @param centrack the product for the center FGAT time track
244 @param kwargs passed to hwrf.hwrftask.HWRFTask.__init__"""
245 assert(
not isinstance(domains,basestring))
250 if 'location' not in kwargs:
251 kwargs[
'location']=os.path.join(conf.getdir(
'intercom'),taskname)
255 cycling_interval=-cycling_interval
256 if cycling_interval>0:
257 cycling_interval=-cycling_interval
261 super(RelocationTask,self).
__init__(dstore,conf,section,taskname,
267 if fgat_times
is not None:
268 parent_cycle=to_datetime_rel(cycling_interval,self.conf.cycle)
270 t=to_fraction(t-parent_cycle)
271 t=int(float(round(t)))
274 'In RelocationTask.__init__, all fgat_times must '
275 'be 0 or greater (times relative to the parent '
276 'model). You gave: %s.'%(repr(t),))
279 logger.debug(
'domains: '+repr(domains))
280 self.
domains=[sim[domain]
for domain
in domains]
286 if wrfanl
is not None:
293 if self.
_ensda is not None:
297 domain,atime=self.conf.cycle)
300 domain,atime=self.conf.cycle)
308 if wrfinput
is not None:
313 if wrfghost
is not None:
314 logger.debug(
'ghost domains: '+repr(ghost_domains))
315 logger.debug(
'ghost domains iterated: '
316 +repr([ d
for d
in wrfghost.wrf() ]))
318 for domain
in ghost_domains]
319 self.
_ghost_d02 = wrfghost.get_ghost(ghost_domains[1])
321 self.
_ghost_d03 = wrfghost.get_ghost(ghost_domains[2])
329 logger.debug(
'self._wrfanl='+repr(self.
_wrfanl))
330 logger.debug(
'self._wrfanl.wrf()='+repr(self._wrfanl.wrf()))
332 'self._wrfanl.wrf().simstart()='+
333 repr(self._wrfanl.wrf().simstart()))
335 'self._wrfanl.wrf().simstart().strftime(%%Y%%m%%d%%H)='
336 +repr(self._wrfanl.wrf().simstart().strftime(
'%Y%m%d%H')))
337 dest_dir=os.path.join(
338 self.
getdir(
'WORKhwrf'),modin.lower()+
'.'+
339 self._wrfanl.wrf().simstart().strftime(
'%Y%m%d%H'))
341 logger.debug(
'self.dest_dir='+self.
dest_dir)
344 dt = self.sim.simstart() - self.conf.cycle
345 self.
_fhr = 6 + dt.days*24 + dt.seconds //3600
351 oldcom=self.
getdir(
'oldcom')
352 oldsid=self.
getdir(
'oldsid')
353 logger.info(
'oldcom directory: %s'%oldcom)
354 if os.path.isdir(oldcom):
367 elif self.
modin ==
'ENKF':
379 """!Returns the wrfinput output Product for the specified
380 domain, or None if no such domain is known.
382 @param domain the domain of interest
383 @note This is a abstract function that should be replaced in subclasses."""
412 """!Returns the wrfanl output Product for this Task for the
413 specified domain or None if no such product exists
414 @param domain the domain of interest
416 @note This is a abstract function that should be replaced in subclasses."""
420 """!Returns the wrfghost output Product for this Task for the
422 @param domain the domain of interest
423 @note This is a abstract function that should be replaced in subclasses."""
427 """!Returns the wrfinput output file for the specified time and
428 domain, or returns None if no such file exists.
429 @param atime the time of interest
430 @param domain the domain of interest"""
431 if atime
is not None and \
432 not within_dt_epsilon(atime,self.sim.simstart(),
435 'wrfinput_at_time: atime=%s is not near my time %s'
436 %(atime.strftime(
'%Y%m%d%H'),domain.strftime(
'%Y%m%d%H')))
441 """!Returns the wrfanl output file for the specified time and
442 domain, or None if no such file exists.
443 @param atime the time of interest
444 @param domain the domain of interest"""
445 if atime
is not None and \
446 not within_dt_epsilon(atime,self.sim.simstart(),
449 'wrfanl_at_time: atime=%s is not near my time %s'
450 %(atime.strftime(
'%Y%m%d%H'),domain.strftime(
'%Y%m%d%H')))
454 """!Copies the fixed files to the local directory."""
458 tbl=self.
confstrinterp(
'{FIXhwrf}/hwrf_eta_micro_lookup.dat')
460 assert(isnonempty(tbl))
461 make_symlink(tbl,
"eta_micro_lookup.dat",
462 force=
True, logger=logger)
465 """!Deletes all temporary files created by the relocation jobs."""
469 logger.warning(
'Internal error: dest_dir is None. Cannot '
470 'delete temporary files.')
472 logger.warning(
'Internal error: dest_dir is the empty '
473 'string. Cannot delete temporary files.')
475 logger.warning(str(dd)+
': deleting this directory tree.')
476 if not os.path.exists(dd):
478 str(dd)+
': does not exist; nothing to delete.')
480 def rmerr(function,path,exc_info):
481 logger.info(
'%s: exception while deleting file: %s %s'
482 %(str(path),str(function),str(exc_info)))
484 shutil.rmtree(dd,onerror=rmerr)
485 except EnvironmentError
as ee:
487 str(dd)+
': unable to delete this directory tree; '
488 'continuing anyway: '+str(e),exc_info=
True)
489 except Exception
as e:
491 str(dd)+
': unhandled exception deleting this '
492 'directory tree: '+str(e),exc_info=
True)
496 """!Iterates over all products generated by this task.
497 @note This is a abstract function that should be replaced in subclasses."""
498 if False:
yield 'hello'
501 frominfo=
None,**kwargs):
502 """!Delivers products to intercom via Product.deliver. Any
503 keyword arguments are passed on to Product.deliver. By
504 default, keep=False, which means the local copy of the file
505 may no longer exists. If frominfo is specified, it will be
507 @param keep If True, then the file may be moved to the destination
509 @param frominfo Ignored.
510 @param kwargs Passed to produtil.datastore.FileProduct.deliver()
511 @param logger a logging.Logger for log messages
512 @param missing a function called if the file to deliver does not exist.
513 It is passed the product and the basename of the file."""
514 if logger
is None: logger=self.
log()
515 logger.warning(
'Delivering products for %s'%(self.
taskname,))
519 bloc=os.path.basename(loc)
520 if os.path.exists(bloc):
521 logger.warning(
'%s: deliver product from ./%s'%(p.did,bloc))
522 p.deliver(frominfo=bloc,keep=keep,logger=logger,**kwargs)
524 logger.warning(
'%s: ./%s does not exist. Cannot deliver.'
526 if missing
is not None:
530 """!Writes the tcvitals (from self.storminfo) to the specified
532 @param filename the file to write
533 @param logger a logging.Logger for log messages"""
534 if logger
is None: logger=self.
log()
535 logger.info(
'Writing tcvitals to %s'%(repr(filename),))
536 with open(filename,
'wt')
as f:
537 f.write(self.storminfo.as_tcvitals()+
"\n")
538 assert(os.path.exists(filename))
541 """!Writes the ghost namelist to namelist_ghost.input. Note
542 that this overwrites, and then deletes, namelist.input and
543 fort.12. It will also create the domain.center and
544 storm.center files and fill them with correct locations.
545 @param filename the file to write
546 @param logger a logging.Logger for log messages"""
547 if logger
is None: logger=self.
log()
548 if hasattr(self.
_wrfghost,
'make_ghost_namelist'):
549 self._wrfghost.make_ghost_namelist(filename,logger=logger)
551 self._wrfghost.make_namelist(
'namelist_ghost.input')
556 """!Writes the analysis namelist to namelist_analysis.input.
557 Note that this overwrites, and then deletes, namelist.input
558 and fort.12. It will also create the domain.center and
559 storm.center files and fill them with correct locations.
560 @param filename the file to write
561 @param logger a logging.Logger for log messages"""
562 if logger
is None: logger=self.
log()
563 if hasattr(self.
_wrfanl,
'make_analysis_namelist'):
564 self._wrfanl.make_analysis_namelist(filename,logger=logger)
566 self._wrfanl.make_namelist(
'namelist_analysis.input')
572 """!The Product object for the parent track file."""
576 return self._parentTrack.product(self.
_trackName)
585 """!The vortex origin status."""
590 """!The status of the current cycle.
591 True if it is a cycled run.
592 False if it is a cold start."""
597 """!The previous cycle's COM directory."""
600 def prev_cycle_sid(self):
605 """!The storm intensity."""
606 return self.storminfo.wmax
610 """!The storm basin."""
611 return self.storminfo.pubbasin2
615 """!The domain center latitude."""
616 return self.conf.getfloat(
'config',
'domlat')
620 """!The domain center longitude."""
621 return self.conf.getfloat(
'config',
'domlon')
626 return self.storminfo.stormid3
630 """!The forecast hour."""
635 """!The RelocationInfo."""
636 if self.
info is not None and self.info.from_file
is not None:
640 tn=type(self).__name__.lower()
642 if tn.find(
'stage1')>0: stage=
'stage1'
643 if tn.find(
'stage2')>0: stage=
'stage2'
644 filename=os.path.join(self.
outdir,stage+
'.info')
646 assert(rinfo.from_file
is not None)
647 rinfo.read_info(filename,self.
log(
"info"))
651 def _make_plist_and_names(self):
652 """!Internal function to generate input product lists and names.
654 This is an internal implementation function that should not be
655 called directly. It returns a three-element tuple containing
656 a list of products, and a dict mapping from product to the
657 local filename, and a dict mapping from product to the copy
658 method. This is used to implement copy_inputs, to copy input
659 files to the local directory from remote tasks' Products."""
660 def copier(p,name,logger,*args):
661 deliver_file(p.location,name,logger=logger,keep=
True)
662 def linker(p,name,logger,*args):
663 make_symlink(p.location,name,force=
True,logger=logger)
670 plist=[ k
for k
in names.iterkeys() ]
671 actions=dict( (n,copier)
for n
in names.iterkeys() )
672 return ( plist, names, actions )
675 """!Copies, or makes, one or more input files."""
678 def namer(p,logger,*args):
return names[p]
679 def actor(p,name,logger,*args): action[p](p,name,logger,*args)
681 logger.info(
"Need product %s at location=%s, available=%s"%(
682 p.did,repr(p.location),p.available))
694 def set_ensda(self,ensda):
698 """!Returns the Product for the center FGAT time track file if
699 available, or otherwise the parent track file Product."""
705 """!Sets the Product for the center FGAT time track file.
706 @param centrack the center FGAT track product"""
707 if (centrack
is None):
709 "You must specify central track file")
713 """!Unsets the center FGAT time track file so that
714 get_centrack() will return the parent track file instead."""
719 centrack=property(get_centrack,set_centrack,
None,
720 """The track file for the center FGAT hour.""")
723 """!Gets the parent vortex track file, either from a specified
724 directory or from the tracker, run by a previous
725 hwrf.init.HWRFInit object's tracker member.
726 @param case 1 or 2: 1 for creating the atcfunix file, 2 for
727 the atcfunix_cen file."""
733 logger.info(
'case %d:tp %s'%(int(case),repr(tp)))
736 renamer=
lambda p,l:
'gfs-anl.atcfunix',
737 action=
lambda p,n,l: deliver_file(p.location,n,logger=l))
739 start = self.sim.simstart().strftime(
"%Y%m%d%H")
740 gfs_atcf = self.
confstr(
'gfs_atcf',
'')
742 if tp.available
and tp.location:
744 logger.warning(
'Using parent vortex from provided tracker data.')
745 logger.warning(
'%s (%s): parent vortex location'
748 deliver_file(ta_atcf,
'atcfunix',logger=logger)
750 deliver_file(ta_atcf,
'atcfunix_cen',logger=logger)
751 elif gfs_atcf
is not None and gfs_atcf!=
'':
753 "Using parent vortex from parent model's own track file")
754 basin = self.conf.syndat.pubbasin2
755 fields = self.
confstr(
'track_name',
'AVNO|PRE1|PRD1')
756 patn =
'(?=%s)(?=%s)(?=%s)' %(start, basin, fields)
757 logger.warning(
"%s: track file"%( gfs_atcf, ))
758 with open(gfs_atcf,
'r') as ifile:
759 logger.info("Parsing track for: start=%s basin=%s fields=%s"
760 %(repr(start),repr(basin),repr(fields)))
761 with open(
'atcfunix',
'w')
as ofile:
763 if re.search(patn, line):
764 logger.info(
'Keep : %s\n'%(repr(line),))
767 logger.info(
'Discard: %s\n'%(repr(line),))
768 logger.info(
'Done parsing track file.')
772 'Could not find a track file for parent vortex location.')
774 'Will proceed assuming parent vortex is at tcvitals.')
775 with open(
'atcfunix',
'a')
as o:
pass
777 def run_ext(self, cmd, echo=None, inputs=None, incopies=None,
778 outputs=
None, opt_outputs=
None):
779 """!Helper function for running Fortran programs that need
780 fort.* files for inputs and outputs.
782 Run an external command linking in fort.X files for input and
783 output. If self.redirect=True, redirect logs to a separate
786 @param cmd The command to execute. This function will use
787 "self.getexe()" on the command to find the external program to
789 @param echo If a list is passed in as the echo variable, then
790 the contents will be sent to the stdin of the program as a
793 @param inputs Input dictionary for files to link. See below.
795 @param incopies Input dictionary for files to copy. See below.
797 @param outputs Output dictionary for files to link. See below.
798 If the output is not present, a message is logged at ERROR
801 @param opt_outputs Optional outputs dictionary for files to
802 link. See below. If the outputs are not present, it is not
805 The dictionary arguments should consist of a fortran file
806 number and the source file.
809 inputs = {11:tcvitals, 12:wrfout_d01}
812 would produce symbolic links:
815 fort.12 -> wrfout_d01
818 input files can also be copied using incopies:
820 incopies = {11:tcvitals, 12:wrfout_d01}
823 would create files instead of links.
825 The outputs and opt_outputs (optional outputs) should be of the
826 dictionary as the inputs. As in:
827 outputs = {56:new_data_4x, 85:storm_radius}
828 this would mean the "fort.56" file would be renamed to "new_data_4x"
829 and the "fort.85" renamed to "storm_radius".
831 If opt_outputs is given then the fortran file is tested to see if it
832 exists and only if it does is it renamed to the output filename.
834 A log file will be created consisting of the stdout and stderr of the
835 command run. It will be named consisting of the taskname and command.
836 For example, if this is relocation stage 1 and the command is
837 hwrf_pert_ct then the log file is "rel_stage_1_hwrf_pert_ct.log" """
841 prog = self.
getexe(cmdname)
842 logf =
'%s/logs/%s_%s.log' %(self.
dest_dir,
843 self.__class__.__name__, cmdname)
849 if isinstance(s,float): echostr+=
"%g "%(s,)
850 elif isinstance(s,int): echostr+=
"%d "%(s,)
851 else: echostr+=
"%s "%(str(s),)
853 'Converted %s to %s for stdin input to fortran command.'
854 %(repr(echo),repr(echostr)))
865 if inputs
is None: inputs=empty
866 if outputs
is None: outputs=empty
867 if incopies
is None: incopies=empty
868 iof = dict(itertools.chain(inputs.items(), outputs.items(),
879 only_log_errors=
True, logger=logger)
881 if self.
confbool(
'sync_frequently',
True):
883 logger.warning(repr(cmd))
888 for k, v
in outputs.iteritems():
890 if os.path.exists(ffile):
891 deliver_file(ffile, v, keep=
False,logger=logger)
893 logger.error(
'%s: did not make file %s (would mv to %s)'
894 %(cmdname,ffile,str(v)))
898 for k, v
in opt_outputs.iteritems():
899 ffile =
'fort.' + str(k)
900 if os.path.exists(ffile):
901 deliver_file(ffile, v, keep=
False,logger=logger)
904 '%s: did not make file %s (would mv to %s).'
905 %(cmdname,ffile,str(v)))
908 for k,v
in inputs.iteritems():
909 if os.path.islink(
'fort.'+str(k)):
910 logger.info(
'%s: deleting input fort file (symlink to %s)'
915 for k,v
in incopies.iteritems():
916 if os.path.exists(
'fort.'+str(k)):
917 logger.info(
'%s: deleting input fort file (copy of %s)'
923 """!This is a HWRF task that encapsulates stage 1 of the vortex
926 def __init__(self,dstore,conf,section,sim,domains,taskname=None,**kwargs):
927 """!Stage1 constructor.
929 @param dstore,conf,section,sim,domains,taskname,kwargs
930 Passed to RelocationTask.__init__()"""
931 super(Stage1,self).
__init__(dstore,conf,section,sim,domains,
935 """!Runs the stage 1 of the relocation."""
944 if os.path.exists(dest_dir):
947 self.info.initopt=self.
confint(
'initopt',0)
948 tdrflagfile=self.conf.strinterp(
'dir',
'{com}/{stormlabel}.tdr')
949 if self.
_ensda is None and self.
confbool(
'tdrconditionalvinit',
False):
950 if isnonempty(tdrflagfile):
954 self.info.iflag_cold=0
956 self.
postmsg(
'Stage 1 running in directory: '+os.getcwd())
957 assert(
not re.match(
'\A/tmp',os.getcwd()))
967 self.
postmsg(
'Prior cycle missing. Cold start. '
968 'Continue from Stage 2.')
969 self.info.iflag_cold=1
970 self.info.warm_cold_flag=COLD
971 expect=self.
confbool(
'expect_cold_start')
973 self.
postmsg(
'No prior cycle exists, and no prior '
974 'cycle was expected. Cold start. '
975 'Continue from Stage 2.')
976 self.info.cold_ok=
True
978 msg=
'UNEXPECTED COLD START. Prior cycle data was '\
979 'missing. Continue from Stage 2. To override '\
980 'this error, set expect_cold_start=yes or '\
981 'allow_fallbacks=yes in the conf file for this '\
984 self.info.cold_ok=
False
985 if not self.
confbool(
'allow_fallbacks'):
989 if self.storminfo.wmax<14:
992 self.
postmsg(
'Storm is a weak storm. This is a '
993 'cold start. Continue from Stage 2.')
994 self.info.iflag_cold=1
995 self.info.warm_cold_flag=COLD
996 self.info.cold_ok=
True
1005 self.
postmsg(
'Cannot get prior cycle track. '
1006 'This is a cold start.')
1007 self.info.cold_warm_flag=COLD
1008 self.info.cold_ok=
False
1010 self.
postmsg(
'This is a warm start.')
1015 self.
postmsg(
'The hdas_atcfunix does not have '
1016 'all expected forecast hours.')
1017 self.
postmsg(
'The create_trak_guess cannot '
1018 'continue with relocation.')
1019 self.
postmsg(
'This is a cold start.')
1020 self.info.warm_cold_flag=COLD
1021 self.info.cold_ok=
True
1023 self.
postmsg(
'Stage 1 completed in directory: '
1026 self.info.iflag_cold=1
1028 self.info.ensda_relocate=
True
1033 self.
postmsg(
'Cannot get prior cycle ensda track.')
1040 self.info.ensda_relocate_continue=
False
1041 self.
postmsg(
'The ensda_atcfunix does not have '
1042 'all expected forecast hours.')
1043 self.
postmsg(
'Will not perform relocation for this member')
1045 self.
state=COMPLETED
1046 except Exception
as e:
1047 logger.critical(
'Stage 1 failed: '+str(e),exc_info=
True)
1050 self.info.write_info(os.path.join(self.
outdir,
'stage1.info'),
1053 """!Copy the namelist files from the preceding steps"""
1054 self.
log().
info(
'stage1 copy_namelist')
1061 """!Checks to see if all data is present from the prior cycle."""
1063 logger.warning(
'Checking for prior cycle data.')
1067 ftimestr=self.sim.simstart().strftime(
"%Y-%m-%d_%H:%M:%S")
1069 sid=self.storm_id.lower()
1071 sid=self.prev_cycle_sid.lower()
1074 id = d.get_grid_id()
1077 logger.info(
'ifile is %s' %ifile)
1078 if not isnonempty(ifile):
1080 'Prior cycle %s forecast does not exist. This is a '
1081 'cold start.'%(ftimestr,))
1083 logger.warning(
'Prior cycle data is present for time %s'%(ftimestr,))
1086 ftimestr2=t.strftime(
"%Y-%m-%d_%H:%M:%S")
1088 id = d.get_grid_id()
1091 if not isnonempty(ifile):
1093 'Fgat wrfout_d%02d file does not exist for forecast time '
1094 '%s. This is a cold start.'%(id,ftimestr2,))
1097 logger.info(
'Have wrfout_d%02d file for forecast time '
1098 '%s.'%(id,ftimestr2,))
1099 logger.warning(
'Prior cycle data is present for all fgat times')
1101 logger.warning(
'No fgat_times specified. Prior %s forecast is '
1102 'available.'%(ftimestr,))
1106 """!Runs the hwrf_3dvar to paste the relocated storm."""
1107 self.
log().
info(
'stage1 relocate_storm')
1108 fprog =
'hwrf_3dvar'
1110 ftimestr=self.sim.simstart().strftime(
"%Y-%m-%d_%H:%M:%S")
1111 ncks=self.
getexe(
'ncks',
'')
1117 checkrun(bigexe(ncks)[
'-6',s,t],logger=logger)
1121 domains=[ d
for d
in self.
domains ]
1123 sid=self.storm_id.lower()
1125 sid=self.prev_cycle_sid.lower()
1127 for domain
in domains:
1129 id = domain.get_grid_id()
1133 did=int(domain.get_grid_id())
1134 if domain.is_moad():
1136 prod=self._ensda.get_wrfinput(domain=domain,atime=self.conf.cycle)
1139 prod=self._ensda.get_wrfanl(domain=domain,atime=self.conf.cycle)
1140 logger.info(
'domain %s prod %s'%(str(domain),prod.did))
1143 wrfout =
"wrfout_d%02d" %id
1144 old_wrfout =
"old_hwrf_d%02d"%id
1146 logger.info(
'%s: file is HDF5, so I will assume it is '
1147 'compressed and convert back to 64-bit indexing '
1148 'NetCDF3.'%(ifile,))
1150 logger.critical(
'ncks not found; things will probably break')
1152 deliver_file(ifile,wrfout,keep=
True,logger=logger,copier=copier)
1154 prog = self.
getexe(fprog)
1155 log =
'%s/logs/%s_%s_d%02d.log' %(
1156 self.
dest_dir, self.__class__.__name__, fprog, id)
1162 if os.path.isfile(
'fort.50'):
1163 os.remove(
'fort.50')
1165 if os.path.isfile(
'fort.73'):
1166 os.remove(
'fort.73')
1169 """!Runs the fortran merge_nest program."""
1170 self.
log().
info(
'stage1 merge_nest')
1172 fprog =
'hwrf_merge_nest'
1174 fprog =
'hwrf_merge_enkf'
1182 ins = { 11:
'tcvitals.as',
1188 ins = { 11:
'tcvitals.as',
1193 ous = { 56:
'data_4x_hwrf',
1196 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
1199 """!Get the previous cycle's HDAS track."""
1201 logger.info(
'stage1 copy_hdas')
1208 sid=self.storm_id.lower()
1210 sid=self.prev_cycle_sid.lower()
1213 hdas_atcf =
"%s/%s.trak.hwrf.atcfunix.%s.%s" \
1215 prev_time.strftime(
"%Y%m%d%H"), dom)
1216 if not os.path.exists(hdas_atcf):
1223 logger.warning(
'Prior cycle atcf does not exist: '+hdas_atcf)
1224 logger.warning(
'Will use an empty track.')
1227 logger.critical(
'PRIOR HWRF CYCLE PROBABLY FAILED!!')
1228 logger.critical(
'Prior cycle has wrfout files, but no 12hr '
1229 'track file here: %s'
1231 logger.critical(
'Check the prior cycle JHWRF_PRODUCTS job for '
1232 'errors. Check to see if NHC received the '
1235 with open(
'hdas_atcfunix',
'wt'):
1238 deliver_file(hdas_atcf,
'hdas_atcfunix', keep=
True,logger=self.
log())
1242 """Get the previous cycle's ensemble forecast track."""
1244 logger.info(
'stage1 copy_ensda_track')
1246 ensda_atcf=self._ensda.get_track(atime=self.conf.cycle)
1247 if ensda_atcf
is None:
1248 logger.info(
'No track for member %s.'%(self._ensda.__enkfmem))
1251 fromfile=ensda_atcf.location
1252 deliver_file(fromfile,
'ensda_atcfunix',keep=
True,logger=logger)
1256 """!Checks to see if all FGAT hours have tracks of a required
1262 logger.info(
'self.info.ensda_relocate is %s'%(repr(self.info.ensda_relocate)))
1263 if self.info.ensda_relocate:
1264 atcfunix=
'ensda_atcfunix'
1266 atcfunix=
'hdas_atcfunix'
1267 with open(atcfunix,
'rt')
as ha:
1271 hour=int(rline[30:33])
1273 if abs(icyc-hour*3600)<30:
1274 logger.info(
'Found cycling interval hour %d in %s'
1278 'Found hour %s in %s'%(repr(hour),atcfunix))
1279 except (IndexError,ValueError,TypeError)
as e:
1281 'Cannot parse hour from %s line: %s'
1283 if self.
fgat_times is None or self.info.ensda_relocate:
1287 logger.warning(
'Did not find cycling interval hour %d in '
1288 'hdas_atcfunix. This is a cold start.')
1291 if not self.info.ensda_relocate:
1292 """only 3D relocation for ensemble members"""
1296 dt=to_datetime(ftime)-parent_atime
1298 fgath=int(float(round(fdt)))
1299 if fgath
not in seen:
1301 'Could not find hour %s in hdas_atcfunix'%(repr(fgath),))
1304 logger.info(
'All FGAT hours found in hdas_atcfunix')
1307 logger.warning(
'Some FGAT hours not found in hdas_atcfunix')
1308 self.info.warm_cold_flag=COLD
1317 """!Runs the fortran hwrf_trk_guess program"""
1318 self.
log().
info(
'stage1 guess_track')
1319 fprog =
'hwrf_trk_guess'
1321 '%02d'%(self.storminfo.when.hour,),
1323 if self.info.ensda_relocate:
1324 ins = { 11:
'tcvitals.as',
1325 12:
'ensda_atcfunix',
1328 ins = { 11:
'tcvitals.as',
1331 ous = { 30:
'trak.fnl.all' }
1332 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
1335 """!Runs the fortran wrf_split program."""
1336 self.
log().
info(
'stage1 split_wrf')
1337 fprog =
'hwrf_wrf_split'
1338 if self.info.ensda_relocate:
1347 int(self.info.iflag_cold),
1350 ins = { 11:
'tcvitals.as',
1355 ous = { 56:
'wrf_env',
1359 otime = self.sim.simstart().strftime(
"%Y%m%d%H")
1360 opts = { 25:
'disturbance.dat',
1361 52:
'rel_inform.'+ otime,
1362 55:
'vital_syn.' + otime,
1364 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous,
1369 """!Runs the fortran hwrf_pert_ct program."""
1370 self.
log().
info(
'stage1 pert_ct')
1371 fprog =
'hwrf_pert_ct'
1372 if self.info.ensda_relocate:
1383 ins = { 11:
'tcvitals.as',
1389 ous = { 14:
'storm_size_p',
1391 58:
'storm_pert_new',
1393 opts = { 35:
'storm_pert_step1_1' }
1394 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous,
1399 """!This is a HWRF task that encapsulates stage 2 of the vortex
1400 relocation which removes the parent model's vortex."""
1402 def __init__(self,dstore,conf,section,sim,domains,taskname=None,**kwargs):
1403 """!Stage2 constructor
1404 @param dstore,conf,section,sim,domains,taskname,kwargs
1405 Passed to the RelocationTask.__init__() """
1406 super(Stage2,self).
__init__(dstore,conf,section,sim,domains,
1410 """!Runs stage 2 of the relocation."""
1416 self.
postmsg(
'Stage 2 starting in directory: '+os.getcwd())
1417 assert(
not re.match(
'\A/tmp',os.getcwd()))
1419 self.info.read_info(
1420 os.path.join(self.
outdir,
'stage1.info'),logger=logger)
1431 remove_file(
'roughness',logger=logger)
1432 remove_file(
'roughness2',logger=logger)
1435 self.
postmsg(
'Stage 2 completed in directory: '+os.getcwd())
1436 self.
state=COMPLETED
1437 except Exception
as e:
1438 logger.critical(
'Stage 2 failed: '+str(e),exc_info=
True)
1442 self.info.write_info(os.path.join(
1443 self.
outdir,
'stage2.info'),logger=logger)
1446 """!Runs the hwrf_diffwrf_3dvar program on all inputs to
1447 create binary file for input to the Fortran programs."""
1448 self.
log().
info(
'stage2 relocate_storm')
1449 fprog =
'hwrf_3dvar'
1450 icom_dir = self.conf.getdir(
'intercom')
1452 prog = self.
getexe(
'hwrf_3dvar')
1454 id = d.get_grid_id()
1456 fin =
"wrfinput_d%02d" %id
1457 fou =
"new_gfs_d%02d" %id
1459 log =
'%s/logs/%s_%s_d%02d.log' %(
1460 self.
dest_dir, self.__class__.__name__, fprog, id)
1465 if os.path.isfile(
'fort.50'):
1466 os.remove(
'fort.50')
1468 if os.path.isfile(
'fort.73'):
1469 os.remove(
'fort.73')
1471 for id
in range(2, 4):
1473 fin =
"wrfghost_d%02d" %id
1474 fou =
"new_ght_d%02d" %id
1476 log =
'%s/logs/%s_%s_ghost_d%02d.log' %(
1477 self.
dest_dir, self.__class__.__name__, fprog, id)
1483 if os.path.isfile(
'fort.50'):
1484 os.remove(
'fort.50')
1486 if os.path.isfile(
'fort.73'):
1487 os.remove(
'fort.73')
1490 """!Runs the fortran hwrf_create_nest program."""
1491 fprog =
'hwrf_create_nest'
1495 ins = { 26:
'new_gfs_d01',
1498 ous = { 57:
'new_data_d01' }
1499 opts = { 56:
'new_data_1x' }
1500 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous,
1503 deliver_file(
'new_gfs_d01',
'new_gfs_d01_org',
1504 keep=
False, logger=logger)
1505 deliver_file(
'new_data_d01',
'new_gfs_d01',
1506 keep=
False, logger=logger)
1509 """!Runs the fortran create_trak_fnl program."""
1510 self.
log().
info(
'stage2 create_track')
1511 fprog =
'hwrf_create_trak_fnl'
1513 self.sim.simstart().strftime(
"%Y"),
1516 ins = { 11:
'tcvitals.as',
1519 ous = { 30:
'trak.fnl.all_gfs' }
1520 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
1523 """!Runs the fortran merge_nest program."""
1524 self.
log().
info(
'stage2 merge_nests')
1525 fprog =
'hwrf_merge_nest'
1533 ins = { 11:
'tcvitals.as',
1538 ous = { 56:
'data_4x_gfs',
1541 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
1544 """!Runs the fortran split_wrf program."""
1545 self.
log().
info(
'stage2 wrf_split')
1547 if os.path.isfile(
"storm_pert_new"):
1552 rel =
'rel_inform_gfs.%s' %(
1553 self.sim.simstart().strftime(
"%Y%m%d%H"))
1554 vital =
'vital_syn_gfs.%s' %(
1555 self.sim.simstart().strftime(
"%Y%m%d%H"))
1557 fprog =
'hwrf_wrf_split'
1562 self.info.iflag_cold,
1565 ins = { 11:
'tcvitals.as',
1567 30:
'trak.fnl.all_gfs',
1573 71:
'storm_pert_gfs',
1574 85:
'storm_radius_gfs',
1578 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous,
1584 """!This is a HWRF task that encapsulates stage 3 of the vortex
1585 relocation which relocates and pastes the vortexes together from
1588 def __init__(self,dstore,conf,section,sim,domains,taskname=None,**kwargs):
1589 """!Stage3 constructor.
1590 @param dstore,conf,section,sim,domains,taskname,kwargs
1591 Passed to the RelocationTask.__init__()"""
1592 super(Stage3,self).
__init__(dstore,conf,section,sim,domains,
1594 with dstore.transaction()
as t:
1621 """!Returns Product objects for the ghost domain output file
1622 for the specified domain.
1623 @param domain the domain of interest"""
1625 logger.debug(
'get_ghost',repr(domain))
1631 logger.debug(
'no prod_ghost_d02')
1639 logger.debug(
'no prod_ghost_d03',repr(self.
ghost_domains[2]))
1642 logger.info(
'get_ghost: no ghost for domain '+str(domain))
1644 def get_wrfout(self,domain):
1646 if not domain
in self.
domains:
1647 logger.info(
'Invalid domain: %s not in %s'%(
1648 str(domain),
', '.join([str(x)
for x
in self.omains])))
1650 logger.debug(
'get_wrfout',repr(domain))
1653 logger.debug(
'is domain 1',repr(self.
domains[0]))
1656 logger.debug(
'no prod_ens_wrfout_d01')
1661 logger.debug(
'is domain 2',repr(self.
domains[1]))
1664 logger.debug(
'no prod_ens_wrfout_d0',repr(self.
domains[1]))
1666 logger.debug(
'is not domain 2',repr(self.
domains[1]))
1667 logger.info(
'get_wrfout: no wrfout for domain '+str(domain))
1670 """!Returns a Product object for the wrfinput output file for
1671 the specified domain if the atime matches this object's
1673 @param atime the time of interest
1674 @param domain the domain of interest"""
1675 if not domain
in self.
sim:
return None
1676 domain=self.
sim[domain]
1677 if atime
is not None and \
1678 not within_dt_epsilon(atime,self.sim.simstart(),
1681 'wrfinput_at_time: atime=%s is not near my time %s'
1682 %(atime.strftime(
'%Y%m%d%H'),domain.strftime(
'%Y%m%d%H')))
1687 """!Returns a Product object for the wrfanl output file for the
1688 specified domain if the atime matches this objects'
1689 self.sim.simstart().
1690 @param atime the time of interest
1691 @param domain the domain of interest"""
1692 if atime
is not None and \
1693 not within_dt_epsilon(atime,self.sim.simstart(),
1696 'wrfanl_at_time: atime=%s is not near my time %s'
1697 %(atime.strftime(
'%Y%m%d%H'),domain.strftime(
'%Y%m%d%H')))
1702 """!Returns a Product object for the wrfanl output file for the
1704 @param domain the domain of interest"""
1711 """!Returns a Product object for the wrfinput output file. If
1712 a domain is specified, and is not the correct MOAD, then None
1714 @param domain the domain of interest"""
1715 if domain
is not None and domain!=self.
domains[0]:
return None
1719 """!Returns a Product for the storm radius file."""
1723 """!Returns a Product for the track file."""
1727 """!Iterates over all products, or all selected products.
1728 @param domains If an iterable of domains is given, only
1729 iterates over products for those domains."""
1734 for d
in self.
domains: domains.add(d)
1762 def _missing_product(self,prod,basename):
1763 """!Internal function that raises an exception when a product is missing.
1765 This is an internal implementation function. It should not be
1766 called directly. This is called by deliver_products when an
1767 expected input file is missing. It either returns, or raises
1768 an exception. See deliver_products for details.
1769 @param prod the Product
1770 @param basename the basename of the missing file"""
1772 'Mandatory output file %s is missing'%(repr(basename),))
1775 """!Runs stage 3 of the vortex relocation."""
1781 self.
postmsg(
'Stage 3 running in directory: '+os.getcwd())
1782 assert(
not re.match(
'\A/tmp',os.getcwd()))
1784 self.info.read_info(
1785 os.path.join(self.
outdir,
'stage1.info'),logger=logger)
1788 if not self.info.ensda_relocate:
1789 self.info.read_info(
1790 os.path.join(self.
outdir,
'stage2.info'),logger=logger)
1796 if self.info.ensda_relocate:
1798 if os.path.exists(
'storm_pert_new'):
1799 logger.info(
'have storm_pert_new (check 1)')
1803 if os.path.exists(
'storm_pert_new'):
1804 logger.info(
'have storm_pert_new (check 1)')
1805 self.info.warm_cold_flag=WARM
1807 logger.info(
'do not have storm_pert_new (check 1)')
1808 self.info.warm_cold_flag=COLD
1811 logger.info(
'intensity <20 or initopt=1')
1815 if os.path.exists(
'storm_pert_new'):
1816 logger.info(
'have storm_pert_new (check 2)')
1819 logger.info(
'do not have storm_pert_new (check 2)')
1824 remove_file(
'flag_file')
1829 self.
postmsg(
'Stage 3 completed in directory: '+os.getcwd())
1830 self.
state=COMPLETED
1831 except Exception
as e:
1832 logger.critical(
'Stage 3 failed: '+str(e),exc_info=
True)
1835 if read_info: self.info.write_info(os.path.join(
1836 self.
outdir,
'stage3.info'),logger=logger)
1845 """!Runs the portion of the relocation that is used for weak,
1847 self.
log().
info(
'stage3 cold_run')
1859 """!Runs the portion of the relocation that is run for cycled
1861 self.
log().
info(
'stage3 cycled_or_weak_run')
1863 if self.
gfs_flag>2
and os.path.isfile(
'flag_file2')
and \
1865 self.
log().
info(
'gfs_flag>2, have flag_file2 and modin is GFS')
1869 if os.path.isfile(
'flag_file'):
1870 self.
log().
info(
'have flag_file')
1872 if os.path.isfile(
'flag_file2'):
1873 self.
log().
info(
'have flag_file2')
1877 """Runs relocation for ensemble member"""
1878 self.
log().
info(
'stage3 cycled_or_weak_run')
1882 """!Runs the anl_4x programs.
1883 @param case 1 or 2: why is anl_4x being run."""
1884 self.
log().
info(
'stage3 anl_4x')
1886 fprog =
'hwrf_anl_4x'
1887 remove_file(
'flag_file')
1888 remove_file(
'flag_file2')
1895 ins = { 11:
'tcvitals.as',
1901 71:
'storm_pert_new',
1903 if isnonempty(
'trak.fnl.all'):
1904 ins[30] =
'trak.fnl.all'
1906 ins[30] =
'trak.fnl.all_gfs_cen'
1908 ins = { 11:
'tcvitals.as',
1914 71:
'storm_pert_new',
1917 ins = { 11:
'tcvitals.as',
1918 12:
'ensda_atcfunix',
1923 71:
'storm_pert_new',
1925 ous = { 36:
'wrf_env_new' }
1926 oous = { 56:
'new_data_4x' }
1928 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous,
1931 deliver_file(
'storm_radius',
'storm_radius_1', keep=
True,
1934 if not os.path.exists(
'flag_file'):
1936 'NO FLAG FILE!!! The hwrf_anl_4x program did not make '
1940 """!Runs the anl_cs_10m fortran program."""
1941 self.
log().
info(
'stage3 anl_cs_10m')
1942 fprog =
'hwrf_anl_cs'
1943 assert(self.info.iflag_cold
is not None)
1946 int(self.info.iflag_cold),
1948 axisy_47 =
'%s/hwrf_storm_cyn_axisy_47'%(self.
getdir(
'FIXhwrf'))
1949 storm_20 =
'%s/hwrf_storm_20'%(self.
getdir(
'FIXhwrf'))
1950 ins = { 11:
'tcvitals.as',
1956 inc = { 71:axisy_47,
1965 ous = { 56:
'new_data_4x' }
1968 self.
run_ext(fprog, echo=evars, inputs=ins, incopies=inc, outputs=ous)
1969 if not os.path.exists(
'flag_file'):
1971 'NO FLAG FILE!!! The hwrf_anl_cs_10m program did not make '
1975 """!Runs the anl_bogus_10m fortran program."""
1976 self.
log().
info(
'stage3 anl_bogus_10m')
1977 fprog =
'hwrf_anl_bogus'
1981 axisy_47 =
'%s/hwrf_storm_cyn_axisy_47'%(self.
getdir(
'FIXhwrf'))
1982 storm_20 =
'%s/hwrf_storm_20'%(self.
getdir(
'FIXhwrf'))
1983 ins = { 11:
'tcvitals.as',
1987 61:
'storm_pert_gfs',
1988 85:
'storm_radius_gfs',
1990 inc = { 71:axisy_47,
1999 ous = { 56:
'new_data_4x' }
2001 self.
run_ext(fprog, echo=evars, inputs=ins, incopies=inc, outputs=ous)
2004 """!Runs hwrf_pert_ct for the weak storm case."""
2005 self.
log().
info(
'stage3 pert_ct_weak')
2008 if os.path.exists(a):
2009 deliver_file(a,b,keep=
True,logger=logger)
2012 '%s: does not exist; will not copy to %s'%(a,b))
2014 cp(
'storm_pert_gfs',
'storm_pert')
2015 cp(
'storm_radius_gfs',
'storm_radius')
2016 cp(
'atcfunix',
'hdas_atcfunix')
2017 cp(
'roughness2',
'roughness1')
2019 fprog =
'hwrf_pert_ct'
2024 ins = { 11:
'tcvitals.as',
2031 ous = { 14:
'storm_size_p',
2033 58:
'storm_pert_new',
2036 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2040 """!Runs hwrf_pert_ct for the gfs vortex case."""
2041 self.
log().
info(
'stage3 pert_ct_gfs')
2044 if os.path.exists(a):
2045 deliver_file(a,b,keep=
True,logger=logger)
2048 '%s: does not exist; will not copy to %s'%(a,b))
2050 remove_file(
'flag_file',logger=logger)
2051 remove_file(
'storm_pert_new',logger=logger)
2052 remove_file(
'flag_file2',logger=logger)
2054 cp(
'storm_pert_gfs',
'storm_pert')
2055 cp(
'storm_radius_gfs',
'storm_radius')
2056 cp(
'atcfunix',
'hdas_atcfunix')
2057 cp(
'roughness2',
'roughness1')
2059 fprog =
'hwrf_pert_ct'
2064 ins = { 11:
'tcvitals.as',
2073 58:
'storm_pert_new',
2076 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2080 """!Runs the hwrf_inter_2to2 program."""
2081 self.
log().
info(
'stage3 iter_2to2')
2082 fprog =
'hwrf_inter_2to2'
2086 ins = { 11:
'tcvitals.as',
2091 ins[36] =
'new_ght_d02'
2092 ous = { 56:
'data_merge_g02' }
2094 self.
log().
info(
'gsi_d02 not run, interpolate to d02')
2095 ins[36] =
'new_gfs_d02'
2096 ous = { 56:
'data_merge_d02' }
2098 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2101 """!Runs the hwrf_inter_2to2 program again."""
2102 self.
log().
info(
'stage3 inter_2to2again')
2103 fprog =
'hwrf_inter_2to2'
2107 ins = { 11:
'tcvitals.as',
2112 self.
log().
info(
'stage3 inter_2to2')
2113 ins[36] =
'new_ght_d03'
2114 ous = { 56:
'data_merge_g03' }
2116 self.
log().
info(
'gsi_d03 not run, interpolate to d03')
2117 ins[36] =
'new_gfs_d03'
2118 ous = { 56:
'data_merge_d03' }
2120 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2123 """!Runs the hwrf_inter_4to6 program."""
2124 self.
log().
info(
'stage3 4to6')
2125 fprog=
'hwrf_inter_4to6'
2129 ins = { 11:
'tcvitals.as',
2133 85:
'storm_radius_gfs',
2135 ous = { 56:
'data_merge_d01' }
2136 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2137 deliver_file(
'storm_radius_gfs',
'storm_radius',
2138 keep=
True,logger=self.
log())
2141 """!Runs the hwrf_diffwrf_3dvar to update the output files."""
2142 self.
log().
info(
'stage3 update_3dvar')
2144 fprog =
'hwrf_3dvar'
2145 prog = self.
getexe(fprog)
2148 self.
log().
info(
'stage3 update_3dvar domain='+str(d))
2149 id = d.get_grid_id()
2151 if not self.info.ensda_relocate:
2152 ifile =
'data_merge_d%02d' %id
2153 ofile =
'wrfinput_d%02d' %id
2155 gsi =
'_gsi_d%02d'%id
2156 if getattr(self, gsi,
None)
is not None:
2157 ifile =
'data_merge_g%02d' %id
2158 ofile =
'wrfghost_d%02d' %id
2162 ifile =
'new_data_4x'
2163 ofile =
'wrfout_d02'
2165 log =
'%s/logs/%s_%s_d%02d.log' %(
2166 self.
dest_dir, self.__class__.__name__, fprog, id)
2172 """!Runs the create_trak_fnl program."""
2174 logger.info(
'stage3 create_track')
2175 fprog =
'hwrf_create_trak_fnl'
2177 self.sim.simstart().strftime(
"%Y"),
2180 ins = { 11:
'tcvitals.as',
2183 ous = { 30:
'trak.fnl.all_gfs_cen' }
2185 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2190 """!This represents all three stages of the relocate. The
2191 individual stages may be accessed by rstage1, rstage2 and rstage3.
2192 The RelocationInfo object that is shared between them can be
2193 accessed by the "info" member variable"""
2194 def __init__(self,dstore,conf,section,sim,domains,
2195 taskname_pattern=
None,**kwargs):
2196 """!Relocation constructor. Creates Stage1, Stage2 and Stage3
2198 @param dstore the produtil.datastore.Datastore for database storage
2199 @param conf the hwrf.config.HWRFConfig for configuration info
2200 @param section the configuration section to use
2201 @param sim the hwrf.wrf.WRFSimulation describing the simulation being relocated
2202 @param domains the hwrf.wrf.WRFDomains being relocated
2203 @param taskname_pattern Pattern for generating the subtask tasknames,
2204 which is passed through make_taskname()
2205 @param kwargs passed to hwrf.hwrftask.HWRFTask.__init__()
2207 assert(
'info' not in kwargs)
2212 info=self.
info,**kwargs)
2216 info=self.
info,**kwargs)
2220 info=self.
info,**kwargs)
2236 """!Creates the task name for relocation stage istage based on
2237 the pattern taskname_pattern.
2239 @param taskname_pattern The string format for the taskname,
2240 which must contain exactly one %d.
2241 @param istage the integer 1, 2 or 3 to substitute into taskname_pattern"""
2243 taskname_pattern=str(taskname_pattern)
2244 return taskname_pattern % (istage)
2248 """!This is a HWRF task that merges the WRF analysis files."""
2250 def __init__(self,dstore,conf,section,relocate,wrfinput,wrfanl,
2251 taskname=
None,gsi_d01=
None,gsi_d02=
None,gsi_d03=
None,
2252 ges_d02=
None,ges_d03=
None,**kwargs):
2253 """!Merge constructor
2254 @param dstore the produtil.datastore.Datastore for database storage
2255 @param conf the hwrf.config.HWRFConfig for configuration info
2256 @param section the configuration section to use
2257 @param relocate the Stage3 of the middle FGAT time relocation
2258 @param wrfinput The source of parent model data wrfinput files.
2259 @param wrfanl The source of parent model data wrfanl files.
2260 @param taskname the task name in the database
2261 @param gsi_d01,gsi_d02,gsi_d03 hwrf.gsi.FGATGSI classes for the
2263 @param ges_d02,ges_d03 Ghost files for the first guess to GSI.
2264 @param kwargs passed to hwrf.hwrftask.HWRFTask.__init__ """
2265 domains=relocate.domains
2266 ghost_domains=relocate.ghost_domains
2267 assert(ghost_domains
is not None)
2268 assert(domains
is not None)
2270 gsi_d01.get_wrfinput()
2272 gsi_d02.get_ghost(ghost_domains[1])
2274 gsi_d03.get_ghost(ghost_domains[2])
2277 if relocate
is not None:
2279 dstore,conf,section,relocate.sim,relocate.domains,taskname,
2280 modin=relocate._modin,wrfanl=wrfanl,
2281 wrfghost=relocate._wrfghost,wrfinput=wrfinput,
2282 ghost_domains=relocate.ghost_domains,
2283 gsi_d01=gsi_d01,gsi_d02=gsi_d02,gsi_d03=gsi_d03,**kwargs)
2286 super(Merge,self).
__init__(dstore,conf,section,kwargs[
'sim'],
2287 kwargs[
'domains'],taskname,**kwargs)
2289 with dstore.transaction()
as t:
2300 """!Sets the ges_d02 and ges_d03 first guess ghost file sources."""
2301 if (ges_d02
is None) != (ges_d03
is None):
2303 "You must specify both d02 and d03 first guess files "
2304 "OR neither. You cannot specify only one of the files.")
2308 """!Returns the wrfinput output product for the specified domain
2309 or None if no such data is available
2310 @param domain the domain of interest"""
2311 if domain
is not None and domain!=self.
domains[0]:
return None
2315 """!Returns the wrfanl output product for the specified domain
2316 and time or None if no such data is available.
2317 @param atime the time of interest
2318 @param domain the domain of interest"""
2322 """!Returns the wrfanl product for the specified domain or None
2323 if no such data is available.
2324 @param domain the domain of interest"""
2327 'Requested domain %s, which is the moad. The MOAD has '
2328 'no wrfanl file.'%(repr(domain),))
2331 self.log.error(
'ERROR: requested domain %s, which is not in '
2332 'self.domains=%s'%(repr(domain),repr(self.
domains)))
2334 """!If no relocate was given, gets the storm radius file from a
2335 fix file. Also checks to see if the storm_radius file is
2336 present and non-empty, regardless of whether it came from the
2340 storm_radius=os.path.join(self.
getdir(
'FIXhwrf'),
2341 'hwrf_storm_radius')
2343 'Could not get storm_radius from the relocate jobs.')
2345 'Will use the fix file $FIXhwrf/hwrf_storm_radius instead.')
2346 make_symlink(storm_radius,
'storm_radius',force=
True,logger=logger)
2347 if not isnonempty(
'storm_radius'):
2348 msg=
'storm_radius file is missing'
2350 raise StormRadiusError(msg)
2353 """!Runs the hwrf_blend_gsi program if first guess data was
2354 supplied to the constructor."""
2357 logger.warning(
"First guess not supplied to Merge.__init__. "
2358 "Disabling hwrf_blend_gsi.")
2365 prog = self.
getexe(
'hwrf_3dvar')
2368 diffme=[ [
'gsiges_d0%d'%d,
'new_ges_d0%d'%d],
2369 [
'wrfghost_d0%d'%d,
'anl_ght_d0%d'%d] ]
2370 for infile,outfile
in diffme:
2371 log =
'%s/logs/%s_%s_blend_gsi_diff.log' %(
2372 self.
dest_dir, self.__class__.__name__, outfile)
2378 inputs={11:
'tcvitals.as',
2379 26:
'anl_ght_d0%d'%d,
2380 36:
'new_ges_d0%d'%d},
2381 outputs={56:
'new_ght_d0%d'%d})
2382 log =
'%s/logs/%s_blend_gsi_update_d0%d.log' %(
2383 self.
dest_dir, self.__class__.__name__,d)
2384 infile=
'wrfghost_d0%d'%d
2385 outfile=
'new_ght_d0%d'%d
2392 """!Iterates over output products
2393 @param domains if present, only the products for these listed domains will
2395 if domains
is None: domains=self.
domains
2402 """!Runs the merge."""
2409 logger=self.
log())
as dir:
2410 self.
postmsg(
'Merge running in directory: '+os.getcwd())
2411 assert(
not re.match(
'\A/tmp',os.getcwd()))
2416 if self.conf.getbool(
'config',
'blend_innercore'):
2419 self.
postmsg(
'Ran GSI blending.')
2421 self.
postmsg(
'Skipped GSI blending.')
2439 logger.warning(
'Not running inter_2to6 because GSI is '
2440 'disabled for domains 2 & 3')
2444 self.
postmsg(
'Merge running in directory: '+os.getcwd())
2445 except Exception
as e:
2446 logger.critical(
'Merge failed: '+str(e),exc_info=
True)
2449 def _make_plist_and_names(self):
2450 """!Internal function to generate input product lists and names.
2452 This is an internal implementation function that should not be
2453 called directly. It returns a three-element tuple containing
2454 a list of products, and a dict mapping from product to the
2455 local filename, and a dict mapping from product to the copy
2456 method. This is used to implement copy_inputs, to copy input
2457 files to the local directory from remote tasks' Products.
2459 This overrides the superclass _make_plist_and_names to add the
2460 guess and wrfghost products."""
2462 def copier(p,name,logger,*args):
2463 deliver_file(p.location,name,logger=logger,keep=
True)
2464 def linker(p,name,logger,*args):
2465 make_symlink(p.location,name,force=
True,logger=logger)
2492 plist=[ k
for k
in names.iterkeys() ]
2493 actions=dict( (n,copier)
for n
in names.iterkeys() )
2494 return ( plist, names, actions )
2497 """!Runs the hwrf_diffwrf_3dvar for all domains."""
2499 logger.info(
'relocate storm')
2500 fprog =
'hwrf_3dvar'
2501 icom_dir = self.conf.getdir(
'intercom')
2502 prog = self.
getexe(
'hwrf_3dvar')
2504 for id
in range(2, 4):
2506 fin =
"gsiges_d%02d" %id
2507 fou =
"new_ges_d%02d" %id
2509 if not isnonempty(fou):
2510 log =
'%s/logs/%s_%s_gsiges_d%02d.log' %(
2511 self.
dest_dir, self.__class__.__name__, fprog, id)
2516 if os.path.isfile(
'fort.50'):
2517 os.remove(
'fort.50')
2519 if os.path.isfile(
'fort.73'):
2520 os.remove(
'fort.73')
2523 id = d.get_grid_id()
2525 fin =
"wrfinput_d%02d" %id
2526 fou =
"new_gfs_d%02d" %id
2528 fou =
'new_hdas_d01'
2530 log =
'%s/logs/%s_%s_d%02d.log' %(
2531 self.
dest_dir, self.__class__.__name__, fprog, id)
2536 if os.path.isfile(
'fort.50'):
2537 os.remove(
'fort.50')
2539 if os.path.isfile(
'fort.73'):
2540 os.remove(
'fort.73')
2542 for id
in range(2, 4):
2544 fin =
"wrfghost_d%02d" %id
2545 fou =
"new_ght_d%02d" %id
2547 log =
'%s/logs/%s_%s_ghost_d%02d.log' %(
2548 self.
dest_dir, self.__class__.__name__, fprog, id)
2553 if os.path.isfile(
'fort.50'):
2554 os.remove(
'fort.50')
2556 if os.path.isfile(
'fort.73'):
2557 os.remove(
'fort.73')
2560 """!Runs the hwrf_inter_2to1 Fortran program to interpolate fields."""
2562 fprog =
'hwrf_inter_2to1'
2566 ins = { 26:
'new_ght_d%02d' %domain,
2567 36:
'new_gfs_d%02d' %domain,
2569 ous = { 56:
'data_merge_d%02d' %domain }
2570 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2573 """!Runs the hwrf_inter_2to1 Fortran program to interpolate fields."""
2574 self.
log().
info(
'inter_2to1ges')
2576 fprog =
'hwrf_inter_2to1'
2580 ins = { 26:
'new_ges_d%02d' %domain,
2581 36:
'new_gfs_d%02d' %domain,
2583 ous = { 56:
'ges_merge_d%02d' %domain }
2584 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2585 deliver_file(
'wrfinput_d02',
'wrfges_d02', keep=
True)
2587 fprog =
'hwrf_3dvar'
2588 prog = self.
getexe(fprog)
2590 ifile =
'ges_merge_d02'
2591 ofile =
'wrfges_d02'
2597 """!Runs the hwrf_inter_2to2 Fortran program to interpolate fields."""
2599 fprog =
'hwrf_inter_2to2'
2603 ins = { 26:
'new_ght_d03',
2607 ous = { 56:
'data_merge_d02' }
2608 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2611 """!Interpolates gsi_d02 analysis increment to d03 and
2612 adds the increment to d03 first guess"""
2616 fprog =
'hwrf_inter_2to2'
2620 ins = { 21:
'new_ges_d02',
2625 ous = { 56:
'data_merge_d03' }
2626 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2629 """!Runs the hwrf_inter_2to6 Fortran program to interpolate fields."""
2631 fprog =
'hwrf_inter_2to6'
2636 ins = { 26:
'new_gfs_d02',
2640 ous = { 56:
'data_merge_d01' }
2642 ins[36] =
'new_ght_d02'
2644 ins[36] =
'new_ght_d03'
2645 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2648 """!Runs the hwrf_inter_3to2 Fortran program to interpolate fields."""
2650 fprog =
'hwrf_inter_2to6'
2655 ins = { 26:
'new_gfs_d03',
2660 ous = { 56:
'data_merge_g02' }
2661 self.
run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2662 deliver_file(
'data_merge_g02',
'new_ght_d02', keep=
True)
2663 deliver_file(
'wrfghost_d02',
'newghost_d02', keep=
True)
2665 fprog =
'hwrf_3dvar'
2666 prog = self.
getexe(fprog)
2669 ifile =
'new_ght_d02'
2670 ofile =
'newghost_d02'
2676 """!Runs the hwrf_diffwrf_3dvar program to update the output
2678 self.
log().
info(
'update_3dvar')
2680 fprog =
'hwrf_3dvar'
2681 prog = self.
getexe(fprog)
2687 'Not updating MOAD: GSI is disabled for domains 2 & 3')
2689 id = d.get_grid_id()
2691 ifile =
'data_merge_d%02d' %id
2692 ofile =
'wrfinput_d%02d' %id
2694 log =
'%s/logs/%s_%s_d%02d.log' %(
2695 self.
dest_dir, self.__class__.__name__, fprog, id)
def __init__
Creates a new RelocationInfo object by reading in the specified *.info file.
Change directory, handle temporary directories.
This module provides a set of utility functions to do filesystem operations.
def netcdfver(filename)
What is the NetCDF version of this file?
def create_track(self)
Runs the fortran create_trak_fnl program.
def anl_bogus_10m(self)
Runs the anl_bogus_10m fortran program.
def confstrinterp(self, string, section=None, kwargs)
Alias for self.icstr for backward compatibility.
warm_cold_flag
The constants WARM, COLD or None to indicate warm vs.
def inter_2to2(self)
Runs the hwrf_inter_2to2 program.
from_file
The file that was read in.
def write_vitals
Writes the tcvitals (from self.storminfo) to the specified file.
def __str__(self)
A Pythonic string representation of this object.
def pert_ct_weak(self)
Runs hwrf_pert_ct for the weak storm case.
info
A RelocationInfo object to trade relocation information with other stages of the relocation.
def run(self)
Runs the stage 1 of the relocation.
def getexe
Alias for hwrf.config.HWRFConfig.get() for the "exe" section.
def inter_2to2(self)
Runs the hwrf_inter_2to2 Fortran program to interpolate fields.
def storm_id(self)
The storm ID.
def get_wrfinput
Returns the wrfinput output Product for the specified domain, or None if no such domain is known...
def guess_track(self)
Runs the fortran hwrf_trk_guess program.
def redirect(self)
Should subprograms' outputs be redirected to separate files?
Handles file locking using Python "with" blocks.
taskname
Read-only property: the name of this task.
def write_info
Writes this object's relocation information to the specified *.info file.
A subclass of Product that represents file delivery.
The base class of tasks run by the HWRF system.
def copy_namelist(self)
Copy the namelist files from the preceding steps.
def wait_for_products
Waits for products to be available and performs an action on them.
def blend_gsi(self)
Runs the hwrf_blend_gsi program if first guess data was supplied to the constructor.
cold_ok
Set to True if the relocation intentionally vetoes warm starting.
Raised when the relocation could not find the prior cycle's 6hr forecast, but it expected to be able ...
def set_ges(self, ges_d02, ges_d03)
Sets the ges_d02 and ges_d03 first guess ghost file sources.
def remove_file
Deletes the specified file.
This is a HWRF task that encapsulates stage 3 of the vortex relocation which relocates and pastes the...
def products(self)
Iterates over all products generated by this task.
def get_wrfinput(self, domain)
Returns the wrfinput output product for the specified domain or None if no such data is available...
def make_taskname(self, taskname_pattern, istage)
Creates the task name for relocation stage istage based on the pattern taskname_pattern.
def copy_inputs(self)
Copies, or makes, one or more input files.
def merge_nest(self)
Runs the fortran merge_nest program.
def __init__(self, dstore, conf, section, sim, domains, taskname=None, kwargs)
Stage1 constructor.
def ensda_relocate_run(self)
def products
Iterates over all products, or all selected products.
This is a HWRF task that encapsulates stage 2 of the vortex relocation which removes the parent model...
def make_ghost_namelist
Writes the ghost namelist to namelist_ghost.input.
def check_atcf_hours(self)
Checks to see if all FGAT hours have tracks of a required minimum length.
def fortcopy(forts, basedir=None, logger=None, only_log_errors=False, kwargs)
A convenience function for copying files to local fort.N files for various integers N using deliver_f...
def inter_4to6(self)
Runs the hwrf_inter_4to6 program.
def checkrun(arg, logger=None, kwargs)
This is a simple wrapper round run that raises ExitStatusException if the program exit status is non-...
rstage2
Stage2 of the relocaiton.
def relocate_storm(self)
Runs the hwrf_diffwrf_3dvar program on all inputs to create binary file for input to the Fortran prog...
rstage1
Stage1 of the relocation.
def set_centrack(self, centrack)
Sets the Product for the center FGAT time track file.
def read_info
Reads the relocation information into this object from the specified filename.
def get_wrfanl(self, domain)
Returns the wrfanl output Product for this Task for the specified domain or None if no such product e...
sim
The hwrf.wrf.WRFSimulation describing the WRF simulation.
def relocate_storm(self)
Runs the hwrf_diffwrf_3dvar for all domains.
def create_nest(self)
Runs the fortran hwrf_create_nest program.
def merge_nests(self)
Runs the fortran merge_nest program.
def openmp
Sets the number of OpenMP threads for the specified program.
def wrfanl_at_time(self, atime, domain)
Returns a Product object for the wrfanl output file for the specified domain if the atime matches thi...
def inter_2to6(self)
Runs the hwrf_inter_2to6 Fortran program to interpolate fields.
def confbool
Alias for self.conf.getbool for section self.section.
Raised when an impossible configuration is requested.
def warm_cold_str(self)
This is the opposite of the make_warm_cold routine: it returns "COLD", "WARM", or "None" for the cons...
def run(self)
Runs stage 3 of the vortex relocation.
def delete_temp(self)
Deletes all temporary files created by the relocation jobs.
def run_ext
Helper function for running Fortran programs that need fort.
def _missing_product(self, prod, basename)
Internal function that raises an exception when a product is missing.
Base class of tasks run by HWRF.
A shell-like syntax for running serial, MPI and OpenMP programs.
This is a HWRF task that forms the base class for all vortex relocation tasks, including the Merge...
def inter_2to1(self, domain)
Runs the hwrf_inter_2to1 Fortran program to interpolate fields.
def copy_hdas(self)
Get the previous cycle's HDAS track.
def inter_2to2_again(self)
Runs the hwrf_inter_2to2 program again.
def wrfanl_at_time(self, atime, domain)
Returns the wrfanl output product for the specified domain and time or None if no such data is availa...
def getdir
Alias for hwrf.config.HWRFConfig.get() for the "dir" section.
ghost_domains
The list of ghost domains passed to the constructor.
initopt
Initialization flag variable for the relocation.
def __init__(self, dstore, conf, section, sim, domains, taskname=None, kwargs)
Stage2 constructor.
outdir
The directory in which this task should deliver its final output.
def update_3dvar(self)
Runs the hwrf_diffwrf_3dvar to update the output files.
domains
The list of domains from sim that match the domains with the same name provided to the constructor...
def isnonempty(filename)
Returns True if the filename refers to an existent file that is non-empty, and False otherwise...
def get_track(self)
Returns a Product for the track file.
def update_3dvar(self)
Runs the hwrf_diffwrf_3dvar program to update the output domains.
Stores products and tasks in an sqlite3 database file.
location
Read-write property, an alias for getlocation() and setlocation().
def weak_cold_run(self)
Runs the portion of the relocation that is used for weak, cold storms.
This is a HWRF task that encapsulates stage 1 of the vortex relocation.
def wrfinput_at_time(self, atime, domain)
Returns a Product object for the wrfinput output file for the specified domain if the atime matches t...
This subclass of TempDir takes a directory name, instead of generating one automatically.
def makedirs
Make a directory tree, working around filesystem bugs.
Time manipulation and other numerical routines.
def center_lon(self)
The domain center longitude.
def make_analysis_namelist
Writes the analysis namelist to namelist_analysis.input.
def split_wrf(self)
Runs the fortran wrf_split program.
def get_ghost(self, domain)
Returns Product objects for the ghost domain output file for the specified domain.
def create_track(self)
Runs the create_trak_fnl program.
def fortlink
This is a convenience routine that makes many symbolic links to fort.N files for various integers N u...
def get_ghost(self, domain)
Returns the wrfghost output Product for this Task for the specified domain.
def copy_ensda_track(self)
def confint
Alias for self.conf.getint for section self.section.
centrack
The track file for the center FGAT hour.
def run(self)
Runs the merge.
cycling_interval
The positive datetime.timedelta time between cycles.
def __init__(self, dstore, conf, section, sim, domains, taskname=None, kwargs)
Stage3 constructor.
def products
Iterates over output products.
This module provides two different ways to generate Fortran namelist files from HWRFConfig sections: ...
def get_storm_radius(self)
Returns a Product for the storm radius file.
dt_epsilon
An epsilon value for time equality comparisons.
def scrub(self)
Should temporary files be deleted as soon as they are not needed?
Raised when a relocation program did not produce an expected output file.
modin
The input model: GFS or GDAS1.
def log
Obtain a logging domain.
def wrfanl_at_time(self, atime, domain)
Returns the wrfanl output file for the specified time and domain, or None if no such file exists...
def relocate_storm(self)
Runs the hwrf_3dvar to paste the relocated storm.
info
The RelocationInfo with relocation information to trade between stages.
dest_dir
Delivery directory for outputs.
def cycled_or_weak_run(self)
Runs the portion of the relocation that is run for cycled or weak storms.
def inter_3to2(self)
Runs the hwrf_inter_3to2 Fortran program to interpolate fields.
iflag_cold
An int 0 or 1 used by several of the Fortran relocation programs to trigger based on warm or cold sta...
def __init__(self, dstore, conf, section, sim, domains, taskname_pattern=None, kwargs)
Relocation constructor.
Passes information about relocation status between relocation stages.
def get_wrfanl(self, domain)
Returns a Product object for the wrfanl output file for the specified domain.
def check_storm_radius(self)
If no relocate was given, gets the storm radius file from a fix file.
def storm_basin(self)
The storm basin.
def __init__(self, dstore, conf, section, sim, domains, taskname=None, modin='GDAS1', wrfanl=None, wrfghost=None, wrfinput=None, parentTrack=None, trackName='track0', ghost_domains=None, dest_dir=None, gsi_d02=None, gsi_d03=None, gsi_d01=None, cycling_interval=-6 *3600, info=None, fgat_times=None, centrack=None, ensda=None, kwargs)
RelocationTask constructor.
def __init__(self, dstore, conf, section, relocate, wrfinput, wrfanl, taskname=None, gsi_d01=None, gsi_d02=None, gsi_d03=None, ges_d02=None, ges_d03=None, kwargs)
Merge constructor.
fgat_times
The list of FGAT hours.
def create_atcf(self, case)
Gets the parent vortex track file, either from a specified directory or from the tracker, run by a previous hwrf.init.HWRFInit object's tracker member.
def vortex(self)
The vortex origin status.
def make_warm_cold
Returns the module-level constants COLD or WARM, or None, for the specified string value...
def fhr(self)
The forecast hour.
def wrf_split(self)
Runs the fortran split_wrf program.
def rinfo(self)
The RelocationInfo.
def inter_2to1ges(self, domain)
Runs the hwrf_inter_2to1 Fortran program to interpolate fields.
Exceptions raised by the hwrf package.
def exe(name, kwargs)
Returns a prog.ImmutableRunner object that represents a large serial program that must be run on a co...
def center_lat(self)
The domain center latitude.
def confstr
Alias for self.conf.getstr for section self.section.
def deliver_products(self, missing=None, logger=None, keep=False, frominfo=None, kwargs)
Delivers products to intercom via Product.deliver.
def postmsg(self, message, args, kwargs)
same as produtil.log.jlogger.info()
def run(self)
Runs stage 2 of the relocation.
def wrfinput_at_time(self, atime, domain)
Returns the wrfinput output file for the specified time and domain, or returns None if no such file e...
This is a HWRF task that merges the WRF analysis files.
def del_centrack(self)
Unsets the center FGAT time track file so that get_centrack() will return the parent track file inste...
def storm_intensity(self)
The storm intensity.
def warm(self)
The status of the current cycle.
cold_ok
Set to True if the relocation intentionally vetoes warm starting.
def find_exe
Searches the $PATH or a specified iterable of directory names to find an executable file with the giv...
def anl_cs_10m(self)
Runs the anl_cs_10m fortran program.
def get_wrfinput
Returns a Product object for the wrfinput output file.
def prev_cycle_dir(self)
The previous cycle's COM directory.
def _make_plist_and_names(self)
Internal function to generate input product lists and names.
def get_centrack(self)
Returns the Product for the center FGAT time track file if available, or otherwise the parent track f...
gfs_flag
Initialization flag variable relating to parent model vortex usage.
def bigexe(name, kwargs)
Alias for exe() for backward compatibility.
def get_wrfanl(self, domain)
Returns the wrfanl product for the specified domain or None if no such data is available.
def inter_2to3(self)
Interpolates gsi_d02 analysis increment to d03 and adds the increment to d03 first guess...
def anl_4x
Runs the anl_4x programs.
rstage3
Stage3 of the relocation.
def check_prior_cycle(self)
Checks to see if all data is present from the prior cycle.
def pert_ct_gfs(self)
Runs hwrf_pert_ct for the gfs vortex case.
def copy_fixed(self)
Copies the fixed files to the local directory.
This represents all three stages of the relocate.
def parent_track(self)
The Product object for the parent track file.
def pert_ct(self)
Runs the fortran hwrf_pert_ct program.