1 """!Creates the initial HWRF directory structure, loads information into each job.
3 This module is used to create the initial HWRF conf file in the
4 first HWRF job via the hwrf.launcher.launch(). The hwrf.launcher.load()
5 then reloads that configuration. The launch() function does more than
6 just create the conf file though. It parses the tcvitals, creates
7 several initial files and directories and runs a sanity check on the
10 The HWRFLauncher class is used in place of an hwrf.config.HWRFConfig
11 throughout the HWRF system. It can be used as a drop-in replacement
12 for an hwrf.config.HWRFConfig, but has additional features needed to
13 support sanity checks, initial creation of the HWRF system and
14 tcvitals generation."""
18 __all__=[
'load',
'launch',
'HWRFLauncher',
'parse_launch_args',
'multistorm_parse_args']
20 import os, re, sys, collections, random
24 from random
import Random
30 from hwrf.exceptions import HWRFDirInsane,HWRFStormInsane,HWRFCycleInsane, \
31 HWRFVariableInsane,HWRFInputInsane,HWRFScriptInsane,HWRFExecutableInsane,\
32 HWRFFixInsane,HWRFArchiveInsane,HWRFConfigInsane
35 """This is the multistorm argument parser. It is really just a wrapper around
38 The last Element of the returned list is the launch args for the Fake storm.
40 From the original arguments, returns a new list of launch args for all
41 the storms in a multistorm run. The SID and optional config.startfile
42 from the original sys.argv[1:] list are replaced with a storm id and a
43 config.startfile (if present) from the MULTISTORM_SIDS.
44 The following multistorm conf options are also added to each storm.
45 config.fakestormid=, config.multistorm_sids=,config.multistorm_priority_sid=,
46 config.multistorm_sids=, General structure of the returned list.
47 [[storm1, arg1, ..argN], ..[stormN, arg1, ..argN], [storm00, arg1, ..argN]]
50 args -- a copy of the initial command line args, excluding sys.argv[0]
52 case_root,parm,infiles,stids,fake_stid,multistorm_priority_sid,moreopts[]
59 startfile_idx = [args.index(arg)
for arg
in args
if 'config.startfile' in arg]
61 if len(startfile_idx) > 1:
62 logger.error(
'Exiting, More than 1 config.startfile= parameter in the argument list.')
68 assert(msids
is not None)
71 if fake_stid != msids[0]:
72 multistorm_priority_sid = msids[0]
73 elif len(multistorm_sids) > 1:
74 multistorm_priority_sid = msids[1]
77 multistorm_priority_sid = msids[0]
79 if fake_stid
in msids:
80 msids.remove(fake_stid)
82 multistorm_all_sids = list(msids)
83 multistorm_all_sids.append(fake_stid)
85 args.append(
'config.fakestormid=' + fake_stid)
86 args.append(
'config.multistorm_priority_sid=' + multistorm_priority_sid)
87 args.append(
'config.multistorm_sids=' +
' '.join(msids))
89 logger.info(
'Setting up hwrf to run as a multi storm with sids: %s' %(msids))
90 logger.info(
'HWRF multistorm: The priority sid is: %s'%(multistorm_priority_sid))
91 logger.info(
'HWRF multistorm: The multistorm fake storm id is: %s' %(fake_stid))
103 sid_passedin = args[0]
104 for i, stormid
in enumerate(multistorm_all_sids):
105 multistorms.append(args[:])
106 multistorms[i][0] = stormid
108 if sid_passedin
in multistorms[i][startfile_idx[0]]:
109 multistorms[i][startfile_idx[0]]= \
110 args[startfile_idx[0]].replace(sid_passedin,stormid)
112 multistorms[i][startfile_idx[0]]= args[startfile_idx[0]] + str(stormid)
114 for i, storm_args
in enumerate(multistorms):
115 (case_root,parm,infiles,stid,moreopt) = \
119 moreopts.append(moreopt)
120 for confbn
in [
'hwrf_3km.conf',
'hwrf_multistorm.conf' ]:
121 confy= os.path.join(parm, confbn)
122 if not os.path.exists(confy):
123 logger.error(confy+
': conf file does not exist.')
125 elif not os.path.isfile(confy):
126 logger.error(confy+
': conf file is not a regular file.')
130 confy+
': conf file is empty. Will continue anyway.')
131 logger.info(
'Conf input: '+repr(confy))
132 infiles.append(confy)
134 return (case_root,parm,infiles,stids,fake_stid,multistorm_priority_sid,moreopts)
136 def multistorm_priority(args, basins, logger, usage, PARMhwrf=None, prelaunch=None):
141 YMDH=cyc.strftime(
'%Y%m%d%H')
142 (case_root,parm,infiles,stid,moreopt) = \
144 conf =
launch(infiles,cyc,stid,moreopt,case_root,
145 init_dirs=
False,prelaunch=prelaunch,
147 syndatdir=conf.getdir(
'syndat')
148 vitpattern=conf.getstr(
'config',
'vitpattern',
'syndat_tcvitals.%Y')
149 vitfile=os.path.join(syndatdir,cyc.strftime(vitpattern))
150 multistorm=conf.getbool(
'config',
'run_multistorm',
False)
152 rv.readfiles(vitfile, raise_all=
False)
153 rv.delete_invest_duplicates()
155 rv.discard_except(
lambda v: v.YMDH==YMDH)
156 rv.discard_except(
lambda v: v.basin1
in basins)
158 rv.discard_except(
lambda v: v.basin1!=
'E' or (v.basin1==
'E' and v.lon>=-140))
160 rv.sort_by_function(rv.hrd_multistorm_sorter)
162 sid = v.as_tcvitals().split()[1]
165 logger.info(
'No storms for cycle: '+cyc.strftime(
'%Y%m%d%H'))
171 """!Parsed arguments to scripts that launch the HWRF system.
173 This is the argument parser for the exhwrf_launch.py and
174 hwrf_driver.py scripts. It parses the storm ID and later
175 arguments (in args). Earlier arguments are parsed by the scripts
176 themselves. If something goes wrong, this function calls
177 sys.exit(1) or sys.exit(2).
179 The arguments depend on if PARMhwrf=None or not.
183 StormID CASE_ROOT /path/to/parm [options]
185 StormID CASE_ROOT [options]
188 * StormID --- three character storm identifier (ie.: 12L for Katrina)
189 * CASE_ROOT -- HISTORY or FORECAST
190 * /path/to/parm - path to the parm directory, which contains the
194 * section.variable=value --- set this value in this section, no matter what
195 * /path/to/file.conf --- read this conf file after the default conf files.
197 Later conf files override earlier ones. The conf files read in
199 * parm/hwrf_input.conf
201 * parm/hwrf_holdvars.conf
202 * parm/hwrf_basic.conf
205 @param args the script arguments, after script-specific ones are removed
206 @param logger a logging.Logger for log messages
207 @param usage a function called to provide a usage message
208 @param PARMhwrf the directory with *.conf files"""
209 if len(args)<2
or ( PARMhwrf
is None and len(args)<3):
215 if not re.match(
'^[0-9][0-9][ABCELPQSW]$',stid):
216 logger.error(
'%s: invalid storm id. Must be a three character '
217 'storm ID such as 90L or 13W'%(stid,))
220 logger.info(
'Running Storm ID is '+repr(stid))
223 case_root=args[1].upper()
224 if case_root==
'HISTORY':
226 elif case_root==
'FORECAST':
229 logger.error(
'%s: invalid case root. Must be HISTORY for '
230 'retrospective runs or FORECAST for real-time runs.'
233 logger.info(
'Case root is '+repr(case_root))
238 if not os.path.exists(parm):
239 logger.error(parm+
': parm directory does not exist')
241 elif not os.path.isdir(parm):
242 logger.error(parm+
': parm directory is not a directory')
244 logger.info(
'Scan %d optional arguments.'%(len(args)-3))
248 logger.info(
'Scan %d optional arguments.'%(len(args)-1))
250 parm=os.path.realpath(parm)
253 infiles=[ os.path.join(parm,
'hwrf_input.conf'),
254 os.path.join(parm,
'hwrf.conf'),
255 os.path.join(parm,
'hwrf_holdvars.conf'),
256 os.path.join(parm,
'hwrf_basic.conf'),
257 os.path.join(parm,
'system.conf')
262 moreopt=collections.defaultdict(dict)
263 for iarg
in xrange(len(args)):
264 logger.info(args[iarg])
266 (?P<section>[a-zA-Z][a-zA-Z0-9_]*)
268 =(?P<value>.*)$''',args[iarg])
270 logger.info(
'Set [%s] %s = %s'%(
271 m.group(
'section'),m.group(
'option'),
272 repr(m.group(
'value'))))
273 moreopt[m.group(
'section')][m.group(
'option')]=m.group(
'value')
274 elif os.path.exists(args[iarg]):
275 logger.info(
'%s: read this conf file'%(args[iarg],))
276 infiles.append(args[iarg])
279 logger.error(
'%s: invalid argument. Not an config option '
280 '(a.b=c) nor a conf file.'%(args[iarg],))
285 if not os.path.exists(file):
286 logger.error(file+
': conf file does not exist.')
288 elif not os.path.isfile(file):
289 logger.error(file+
': conf file is not a regular file.')
293 file+
': conf file is empty. Will continue anyway.')
294 logger.info(
'Conf input: '+repr(file))
295 return (case_root,parm,infiles,stid,moreopt)
298 """!Loads the HWRFLauncher created by the launch() function.
300 Creates an HWRFConfig object for an HWRF workflow that was
301 previously initialized by hwrf.launcher.launch. The only argument
302 is the name of the config file produced by the launch command.
304 @param filename The storm*.conf file created by launch()"""
311 run_multistorm=conf.getbool(
'config',
'run_multistorm',
False)
312 run_multistorm_00flag =
False
314 fakestormid=conf.getstr(
'config',
'fakestormid',
'nofakeid')
315 if fakestormid ==
'nofakeid':
316 msg =
"Looks like you are trying to run a multistorm but "\
317 "no fake storm id is defined. This will happen if there are "\
318 "no real storm ids specified for a multistorm run. "\
319 "Either provide a list of storms OR Set 'run_multistorm=no' "\
320 "in hwrf_basic.conf and check if you are setting the 'MULTISTORM' "\
321 "env var in either, the rocoto/runhwrf_wrapper or global.vars.ksh, "\
322 "and launcher_wrapper, if running the stand alone wrappers."
323 raise HWRFConfigInsane(msg)
324 this_stormid=conf.getstr(
'config',
'STID',
'nosid')
325 if fakestormid == this_stormid:
326 run_multistorm_00flag =
True
329 assert(cycle
is not None)
330 strcycle=cycle.strftime(
'%Y%m%d%H')
331 logger.info(
'Running cycle: '+cycle.strftime(
'%Y%m%d%H'))
333 WORKhwrf=conf.getdir(
'WORKhwrf')
335 tmpvit=os.path.join(WORKhwrf,
'tmpvit')
336 logger.info(tmpvit+
': read vitals for current cycle')
338 with open(tmpvit,
'rt')
as f:
341 logger.info(
'Current cycle vitals: '+syndat.as_tcvitals())
343 oldvit=os.path.join(WORKhwrf,
'oldvit')
344 logger.info(oldvit+
': read vitals for prior cycle')
345 with open(oldvit,
'rt')
as f:
347 oldsyndat=oldsyndat[0]
348 logger.info(
'Prior cycle vitals: '+oldsyndat.as_tcvitals())
350 conf.set_storm(syndat,oldsyndat)
352 if run_multistorm_00flag:
353 _load_multistorm(fakestormid,conf,logger)
358 def _load_multistorm(fakestormid,conf,logger):
359 """Do not call this. It is an internal implementation routine.
360 It is only used internally and is called during the fakestorm of
363 Adds the additional storms of a multistorm run to the HWRFConfig
366 assert(conf.getbool(
'config',
'run_multistorm',
False))
367 multistorm_sids = conf.getstr(
'config',
'multistorm_sids').split()
368 logger.info(
'Multistorm - fakestorm run %s: Adding storm info '
369 'for storms: %s'%(fakestormid,multistorm_sids))
371 WORKhwrf4fake=conf.getdir(
'WORKhwrf')
373 syndat_multistorm = []
374 oldsyndat_multistorm = []
376 for i,stormid
in enumerate(multistorm_sids):
377 WORKhwrf4real = WORKhwrf4fake.replace(fakestormid,stormid)
381 tmpvit=os.path.join(WORKhwrf4real,
'tmpvit')
382 logger.info(tmpvit+
': Multistorm %s: read vitals for current cycle'%(stormid))
383 with open(tmpvit,
'rt')
as f:
385 logger.info(
'Multistorm %s: Current cycle vitals: %s'%(
386 stormid,str(syndat_multistorm[i].as_tcvitals())))
388 oldvit=os.path.join(WORKhwrf4real,
'oldvit')
389 logger.info(oldvit+
': Multistorm %s: read vitals for prior cycle'%(stormid))
390 with open(oldvit,
'rt')
as f:
392 logger.info(
'Multistorm %s: Prior cycle vitals: %s'%(
393 stormid,str(oldsyndat_multistorm[i].as_tcvitals())))
400 conf.set_storm_multistorm(multistorm_sids,syndat_multistorm,oldsyndat_multistorm)
402 def launch(file_list,cycle,stid,moreopt,case_root,init_dirs=True,
403 prelaunch=
None, fakestorm=
False, fakestorm_conf=
None,
405 """!Initializes the directory structure for a new HWRF workflow.
407 This function runs sanity checks on the HWRF installation and the
408 arguments to this function. If a cycle is supplied, it then calls
409 a prelaunch function, and then generates the configuration file
410 and initial directory structure.
412 You can run this function in a special mode that just reads the
413 conf file, without specifying a cycle, or making directories. To
414 do that, send cycle=None and init_dirs=False. That mode is used
415 by the script that prepares the rocoto XML file for a multi-cycle
418 @returns the full path to the conf file that is created as a
419 result. That conf file should be passed in to the load() function
420 at the beginning of every job.
422 @param file_list a list of conf files to read
423 @param cycle the cycle to run; anything accepted by to_datetime
424 @param stid the three character storm identifier for the storm to run.
425 For example, stid=11L is the eleventh storm of the season in the
426 Atlantic basin. Although this argument is optional, the single
427 storm HWRF workflow will fail if stid is not provided.
428 @param moreopt a dict of dicts with additional options to set. This
429 maps section name to option to value.
430 @param case_root HISTORY for retrospective mode, FORECAST for real-time
431 @param init_dirs True if the initial directories should be created,
432 @param prelaunch a function to call on the configuration before
433 writing it to disk. Takes as arguments: conf,logger,cycle
434 Note that the logger or cycle may be None. The conf is the
435 configuration object that will be written. """
437 for filename
in file_list:
438 if not isinstance(filename,basestring):
439 raise TypeError(
'First input to hwrf.config.for_initial_job '
440 'must be a list of strings.')
444 logger.info(
'FAKESTORM: ' +repr(fakestorm))
445 logger.info(
'FAKESTORM CONF: ' +repr(fakestorm_conf))
446 logger.info(
'GLOBAL STORM NUM: ' +repr(storm_num))
448 if cycle
is not None:
449 conf.cycle=to_datetime(cycle)
450 logger.info(
'Caller wants to launch a %s run of cycle %s storm %s.'
451 %(case_root,conf.cycle.strftime(
'%Y%m%d%H'),stid))
453 logger.info(
'Caller wants to launch a %s run of storm %s.'
455 conf.add_section(
'holdvars')
456 conf.set(
'holdvars',
'CASE_ROOT',case_root)
457 conf.set(
'config',
'case_root',case_root)
458 if case_root==
'HISTORY':
459 conf.set(
'config',
'fcsthist',
'hist')
460 conf.set(
'config',
'realtime',
'false')
463 conf.set(
'config',
'fcsthist',
'fcst')
464 conf.set(
'config',
'realtime',
'true')
467 for filename
in file_list:
468 logger.info(
"%s: parse this file"%(filename,))
472 input_catalog=conf.get(
'config',
'input_catalog',
'hwrfdata')
473 if input_catalog==
'hwrfdata':
474 fcst_catalog=conf.get(
'config',
'fcst_catalog')
475 conf.set(
'config',
'input_catalog',fcst_catalog)
476 jlogger.info(
"FORECAST mode, so changing input_catalog to %s"
477 %(repr(fcst_catalog),))
479 if moreopt
is not None:
480 for section,options
in moreopt.iteritems():
481 if not conf.has_section(section):
482 conf.add_section(section)
483 for option,value
in options.iteritems():
484 logger.info(
'Override: %s.%s=%s'
485 %(section,option,repr(value)))
486 conf.set(section,option,value)
487 conf.guess_default_values()
488 cycling_interval=conf.getfloat(
'config',
'cycling_interval',6.0)
489 cycling_interval=-abs(cycling_interval*3600.0)
490 if cycle
is not None:
491 other_cycle=to_datetime_rel(cycling_interval,conf.cycle)
493 if stid
is not None and cycle
is not None and not fakestorm:
494 revit=conf.read_tcvitals_and_messages(other_cycle=other_cycle)
495 conf.gen_vitals(stid,cycling_interval,revit)
496 elif stid
is not None and cycle
is not None and fakestorm:
497 revit=conf.read_fake_tcvitals()
498 conf.gen_vitals(stid,cycling_interval,revit)
507 griblockdir=conf.getstr(
'regribber',
'griblockdir',
'')
511 logger.info(
'Expand certain [dir] values to ensure availability '
512 'before vitals parsing.')
513 for var
in (
'WORKhwrf',
'HOMEhwrf',
'com' ):
514 expand=conf.getstr(
'dir',var)
515 logger.info(
'Replace [dir] %s with %s'%(var,expand))
516 conf.set(
'dir',var,expand)
518 dtcgsi=os.path.join(conf.getdir(
'HOMEhwrf'),
'sorc/GSI')
519 if os.path.exists(dtcgsi):
520 if os.path.isdir(dtcgsi):
521 logger.info(
'%s: community GSI is checked out, use '
522 'community gsi fix files '%(dtcgsi,))
523 conf.set(
'dir',
'FIXgsi',os.path.join(dtcgsi,
'fix'))
526 conf.decide_domain_center()
527 loc=conf.getdir(
'domlocfile')
528 logger.info(
'%s: Writing domain center.'%(loc,))
529 with open(loc,
'wt')
as f:
531 conf.getfloat(
'config',
'domlat'),
532 conf.getfloat(
'config',
'domlon')))
534 if prelaunch
is not None:
535 prelaunch(conf,logger,cycle)
537 confloc=conf.getloc(
'CONFhwrf')
538 logger.info(
'%s: write hwrf.conf here'%(confloc,))
539 with open(confloc,
'wt')
as f:
542 with open(os.path.join(conf.getdir(
'WORKhwrf'),
'PDY'),
'wt')
as f:
543 f.write(conf.strinterp(
544 'config',
'cyc={HH}\nPDY={YMD}\nYMDH={YMDH}\n'))
547 sfile = os.path.join(fakestorm_conf.strinterp(
'dir',
'{com}'),
548 'storm%d.conf' %storm_num)
549 logger.info(
'%s: write STORM conf here'%(sfile,))
550 with open(sfile,
'wt')
as f:
556 """!A replacement for the hwrf.config.HWRFConfig used throughout
557 the HWRF system. You should never need to instantiate one of
558 these --- the launch() and load() functions do that for you. This
559 class is the underlying implementation of most of the
560 functionality described in launch() and load()"""
562 """!Creates a new HWRFLauncher
563 @param conf The configuration file."""
564 super(HWRFLauncher,self).
__init__(conf)
572 This is intended to return the one letter basin, numeric storm
573 ID and year for the specified storm number (1-10).
575 @bug The hwrf.launcher.HWRFLauncher.storm_for_stormnum() is
576 not implemented and should probably be removed."""
579 """!Decide the outermost domain's center.
581 If the domain center is not already set in the [config]
582 section domlat and domlon variables, decides the domain center
583 using the hwrf.storminfo.StormInfo.hwrf_domain_center routine.
584 @param logger the logging.Logger for log messages."""
585 if logger
is None: logger=self.
log()
588 cenla=self.
getfloat(
'config',
'domlat')
589 cenlo=self.
getfloat(
'config',
'domlon')
590 logger.info(
'Domain center is already set to lat=%g lon=%g'
593 (cenlo, cenla) = self.syndat.hwrf_domain_center(logger)
594 self.
set(
'config',
'domlat',cenla)
595 self.
set(
'config',
'domlon',cenlo)
596 logger.info(
'Decided on domain center lat=%g lon=%g'%(cenla,cenlo))
599 """!Decides the location of the vitals file.
601 Decides the location of the vitfile that should be read in by
602 read_precleaned_vitfile. Optionally, you can specify the
603 storm number (1-10) of the storm whose vitals should be read
604 in. Otherwise, a reasonable guess will be made.
605 @param storm_num the index of the storm from 1-10
606 @returns the vitals path"""
607 if storm_num
is not None:
608 storm_num=int(storm_num)
609 vitfile=os.path.join(self.
getdir(
'WORKhwrf'),
610 'storm%d.vitals'%(storm_num,))
612 stormlabel=self.
getstr(
'config',
'stormlabel',
'storm1')
613 vitfile=os.path.join(self.
getdir(
'WORKhwrf'),
614 '%s.vitals'%(stormlabel,))
622 """ Intended use is for the multistorm fake storm. Same as the
623 read_tcvitals_and_messages method except the vitals are
624 from fakestorm_vitals in hwrf_multistorm.conf. basd on the arguments."""
629 default_fakestorm_vitals =
'NHC 00L FAKE ' +\
630 self._cycle.strftime(
'%Y%m%d %H%M') +\
631 ' 250N 800W -99 -99 -999 -999 -099 -9 -99 -999 -999 -999 -999 M'
633 if fakestorm_vitals
is None:
634 fakestorm_vitals=self.
getstr(
'config',
'fakestorm_vitals',default_fakestorm_vitals)
636 if fakestorm_vitals == default_fakestorm_vitals:
637 logger.info(
'Using default fakestorm vitals: %s'%(default_fakestorm_vitals))
638 inputs.append(fakestorm_vitals)
640 revital.readvitals(inputs,raise_all=
False)
644 include_messages=
True,other_cycle=
None):
645 """!Reads in the tcvitals file and message files.
647 Reads in the tcvitals files for the current cycle and
648 optionally another cycle, which may be in the same file. Also
649 reads in message files if requested. Cleans the result up and
650 returns it as an hwrf.revital.Revital object.
652 @param vitdir optional: the directory in which to find the tcvitals.
653 Default: [dir] section syndat variable.
655 @param vitpattern optional: passed into strftime to generate the
656 name of the vitals file within vitdir. Default: [conf]
657 section vitpattern variable, or syndat_tcvitals.%Y if
660 @param include_messages optional flag: if True, attempts to find
661 the hurricane message files, and includes them in the
662 list of files to read in. Default: True.
664 @param other_cycle optional: another cycle whose vitals file
665 should also be parsed. This can be anything accepted by
666 to_datetime_rel(...,self.cycle). This is intended to
667 allow year-crossing cycling, such as a January 1, 00:00
668 UTC cycle that is a warm start off of a prior December
669 31, 18:00 UTC cycle. If the other_cycle's vitals file
670 is the same as the one from self.cycle, then the file is
673 @return an hwrf.revital.Revital with the vitals data"""
678 vitdir=self.
getdir(
'syndat')
679 if vitpattern
is None:
680 vitpattern=self.
getstr(
'config',
'vitpattern',
681 'syndat_tcvitals.%Y')
682 logger.info(
'VITDIR: %s' %(vitdir))
683 file1=os.path.join(vitdir,self._cycle.strftime(vitpattern))
685 if other_cycle
is not None:
686 other_cycle=to_datetime_rel(other_cycle,self.
_cycle)
687 file2=os.path.join(vitdir,other_cycle.strftime(vitpattern))
693 mdir=self.
getdir(
'mesagdir',
'')
694 if mdir
is None or mdir==
'':
695 if 'mesagdir' in ENV:
698 mdir=
'/com/hur/'+ENV[
'envir']+
'/inpdata'
700 mdir=
'/com/hur/prod/inpdata'
703 nstorms_filename=os.path.join(mdir,
'nstorms')
706 with open(nstorms_filename,
'rt')
as nstorms_file:
707 dat=nstorms_file.readline()
709 except (EnvironmentError,ValueError,TypeError)
as e:
710 logger.error(
'%s: error reading: %s. Will read all storms.'%(
711 nstorms_filename,str(e)),exc_info=
True)
712 for imessage
in xrange(nstorms):
713 file=os.path.join(mdir,
'message%d'%(imessage+1,))
714 if os.path.exists(file):
717 self.
log().info(
'read vitals from: '+
','.join(inputs))
719 revital.readfiles(inputs,raise_all=
False)
723 """!Sets the storm that is to be run.
725 Sets the syndat and oldsyndat member variables, and several
726 related options in the [config] section, to the storm in the
727 provided tcvitals or message file data.
729 * config.STID --- The three character storm id (ie.: 12L) of
731 * config.stnum --- the numeric part of config.STID
732 * config.basin1 --- the basin part of STID (ie.: the L in 12L)
733 * config.basin1lc --- the lower-case version of config.basin1
735 @param syndat the hwrf.storminfo.StormInfo for this cycle's vitals
736 @param oldsyndat the hwrf.storminfo.StormInfo for the prior cycle"""
738 if oldsyndat
is not None:
740 self.
set_options(
'config',STID=syndat.stormid3,stnum=syndat.stnum,
741 basin1=syndat.basin1,basin1lc=syndat.basin1lc)
742 self.__dict__[
'syndat']=syndat.copy()
743 if oldsyndat
is not None:
744 self.__dict__[
'oldsyndat']=oldsyndat.copy()
748 """This is meant to be an internal implementation function and
749 should not be called directly. This is meant to only be used internally
750 by the fakestorm of a multistorm run.
752 Adds the syndat_multstorm and oldsyndat_multistorm member
753 variables for the fake storm. They contain the StormInfo objects
754 for all the storm in a multistorm run from the provided tcvitals
755 or message file data.
757 It is ultimately used for access to each storm's lat/lon
758 information in a multistorm run. This is needed for the
759 swcorner calculation for all the "stormNouter" storms.
768 oldsyndat_fromcopy = []
772 for index
in range(len(multistorm_real_sids)):
774 if oldsyndat4multistorm[index]
is not None:
776 syndat_fromcopy.append(syndat4multistorm[index].copy())
778 if oldsyndat4multistorm[index]
is not None:
779 oldsyndat_fromcopy.append(oldsyndat4multistorm[index].copy())
781 oldsyndat_fromcopy.append(
None)
783 self.__dict__[
'syndat_multistorm']=syndat_fromcopy
784 self.__dict__[
'oldsyndat_multistorm']=oldsyndat_fromcopy
787 """!Sets the random seed for ensemble perturbations.
789 Automatically decides a random seed for the tcvitals
790 perturbation, based on the storm number, basin and cycle. The
791 number and basin used are before the invest renumbering
794 @param loud If loud=True (the default), then a message is sent
795 to the jlogfile via postmsg with the seed, and information
796 about the calculation that went into it."""
798 icycle=int(self.cycle.strftime(
'%Y%m%d%H'))
800 cbasin=str(si.basin1).upper()
802 seed=icycle ^ istnum ^ ibasin
805 'Automatic perturbation seed calculation: '
806 '%d %d%s => seed = %d^%d^ord("%s") = %d^%d^%d = %d'%(
807 icycle,istnum,cbasin,
808 icycle,istnum,cbasin,
809 icycle,istnum,ibasin,
813 def gen_vitals(self,STID,cycling_interval,revital,storm_num=None):
814 """!Generate tcvitals files
816 Given an hwrf.revital.Revital object, preferably from
817 read_precleaned_vitfile or read_tcvitals_and_messages,
818 searches for the specified storm's vitals. Creates the files
819 that are expected to exist in the WORKhwrf directory. The
820 filenames are based off of the vitbase variable, but with
821 various suffixes appended. This function should only be
822 called once per workflow, per storm.
824 @param STID the three character stormid (12L)
825 @param cycling_interval seconds between HWRF cycles (6*3600)
826 @param revital The hwrf.revital.Revital with tcvitals data
827 @param storm_num The storm index 1-10"""
829 stnum=int(STID[0:2],10)
831 strcycle=self._cycle.strftime(
'%Y%m%d%H')
835 if cycling_interval<0:
836 cycling_interval=-cycling_interval
837 cycling_interval=cycling_interval/3600.0
839 strprior=prior.strftime(
'%Y%m%d%H')
840 logger.info(
'gen_vitals: cycle=%s interval=%s prior=%s STID=%s'%(
841 repr(self.
cycle),repr(cycling_interval),repr(prior),
844 def keep_condition(vit):
845 return vit.stormid3.upper()==STID
or \
846 (
'old_stormid3' in vit.__dict__
and
847 vit.old_stormid3.upper()==STID )
850 logger.info(
'%s: Not renumbering invests because %d>=50.'
852 unrenumbered=revital.copy()
853 unrenumbered.discard_except(keep_condition)
854 unrenumbered.clean_up_vitals()
855 renumbered=unrenumbered
857 logger.info(
'%s: Renumber and unrenumber invests.'%(STID,))
858 unrenumbered=revital.copy()
859 unrenumbered.renumber(unrenumber=
True)
860 unrenumbered.discard_except(keep_condition)
861 unrenumbered.clean_up_vitals()
862 renumbered=unrenumbered.copy()
863 renumbered.swap_numbers()
864 renumbered.clean_up_vitals()
865 unrenumbered.mirror_renumbered_vitals()
866 unrenumbered.clean_up_vitals()
869 for vit
in renumbered.each(STID):
875 'Error: cannot find %s cycle %s'%(STID,strcycle))
876 logger.info(
'syndat='+syndat.as_tcvitals())
880 ens=self.
getint(
'config',
'ENS',99)
881 ensize=self.
getint(
'ensemble',
'ensize',20)
882 if ens>0
and ens<99
and ens<=ensize:
883 seedmethod=self.
getstr(
'ensemble',
'tcvitals_seed')
884 if seedmethod==
'auto':
887 seed=self.
getint(
'ensemble',
'tcvitals_seed')
888 vmax_pert=self.
getint(
'ensemble',
'vmax_pert')
894 logger.info(
'ENS perturbations: %s sum %s'%(
895 (
', '.join([repr(s)
for s
in vperts]) ),
898 'ENS %d (of %d) wind perturbation %d m/s'
903 'ENS %d (of %d) wind perturbation disabled'%(ens,ensize))
906 'ENS %d (of %d) is not a perturbed ensemble member; '
907 'not perturbing wind.'%(ens,ensize))
912 for vit
in unrenumbered.each(STID,old=
True):
913 if vit.when!=prior:
continue
914 if oldsyndat
is not None and oldsyndat.stnum<50:
915 logger.info(
'%s %s: not checking these vitals for data on '
916 'disk since I found a non-invest number %s '
917 'already with data on disk'
918 %(str(vit.stormid3),str(vit.YMDH),
919 str(oldsyndat.stormid3)))
922 'config',
'{HISTCHECK}',atime=prior,ftime=prior,
923 oldvit=vit.__dict__,vit=syndat.__dict__)
924 if os.path.exists(checkfile):
925 logger.info(
'%s: exists'%(checkfile,))
926 logger.info(
'%s %s: prior is %s %s and has data on disk'%
927 (STID,strcycle,vit.stormid3,strprior))
930 logger.info(
'%s: does not exist'%(checkfile,))
931 logger.info(
'%s %s: prior could be %s %s but there is '
933 (STID,strcycle,vit.stormid3,strprior))
934 if oldsyndat
is None:
935 if nodatasyndat
is not None and nodatasyndat.stnum<50:
936 logger.info(
'%s %s: not using as backup since I found a '
937 'non-invest number %s already'
938 %(str(vit.stormid3),str(vit.YMDH),
939 str(nodatasyndat.stormid3)))
943 self.
set(
'config',
'expect_cold_start',
'no')
944 if oldsyndat
is None:
945 logger.info(
'%s %s: no storm IDs for prior cycle have data '
946 'on disk.'%(STID,strcycle))
947 if nodatasyndat
is not None:
948 oldsyndat=nodatasyndat
949 logger.info(
'%s %s: will use %s %s as prior cycle storm.'
950 %(STID,strcycle,oldsyndat.stormid3,strprior))
951 logger.info(
'prior vitals: '+oldsyndat.as_tcvitals())
953 logger.warning(
'No prior syndat available. This is a cold '
954 'start. I will extrapolate vitals.')
955 oldsyndat=syndat-cycling_interval
956 logger.warning(
'extrapolated vitals: %s'
957 %(oldsyndat.as_tcvitals()))
958 self.
set(
'config',
'expect_cold_start',
'yes')
960 logger.info(
'%s %s prior cycle on disk for %s %s'
961 %(STID,strcycle,oldsyndat.stormid3,strprior))
962 logger.info(
'prior cycle on disk: '+oldsyndat.as_tcvitals())
967 vitbasedir=os.path.dirname(vitbase)
970 logger.info(
'Reformat vitals...')
971 filename=vitbase+
'.allids'
973 filename+
': write unrenumbered vitals with all storm IDs')
974 with open(filename,
'wt')
as vitalsout:
975 for vit
in unrenumbered.each(stormid=STID,old=
True):
976 print>>vitalsout, vit.as_tcvitals()
977 filename=vitbase+
'.renumberlog'
978 logger.info(filename+
': write renumberlog with my storm ID')
979 logger.info(vitbase+
': write renumbered vitals')
980 with open(filename,
'wt')
as renumberlog:
981 with open(vitbase,
'wt')
as vitalsout:
982 renumbered.print_vitals(vitalsout,renumberlog=renumberlog,
983 stormid=STID,format=
'tcvitals')
984 filename=vitbase+
'.oldid'
985 logger.info(filename+
': write vitals with original ID')
986 with open(filename,
'wt')
as vitalsout:
987 for vit
in renumbered.each(stormid=STID):
988 print>>vitalsout, vit.old().as_tcvitals()
990 filename=os.path.join(self.
getdir(
'WORKhwrf'),
'tmpvit')
991 logger.info(filename+
': write current cycle vitals here')
992 with open(filename,
'wt')
as tmpvit:
993 print>>tmpvit, self.syndat.as_tcvitals()
995 filename=os.path.join(self.
getdir(
'WORKhwrf'),
'oldvit')
996 logger.info(filename+
': write prior cycle vitals here')
997 with open(filename,
'wt')
as tmpvit:
998 print>>tmpvit, self.oldsyndat.as_tcvitals()
1001 """!Runs a sanity check on the ensemble configurations.
1005 1. If the GEFS-based forecast ensemble is in use, a valid
1006 ensemble ID is chosen.
1007 2. If a valid ensemble ID is chosen, the GEFS-based forecast
1009 3. The user does not enable both the GEFS-based forecast
1010 ensemble and the GFS-based DA ensemble.
1011 4. If the GFS-based DA ensemble is in use, at least thirty
1012 members are chosen, and no more than eighty.
1013 @param enset a set of ensemble ids
1014 @param logger a logging.Logger for log messages"""
1016 has_gefs_members=
False
1017 has_deterministic=
False
1021 if iens>=0
and iens<=20:
1022 has_gefs_members=
True
1024 has_deterministic=
True
1026 raise HWRFConfigInsane(
1027 "Invalid ensemble ID %s: must be 00-20 or 99"
1030 if has_deterministic
and has_gefs_members:
1031 raise HWRFConfigInsane(
1032 "You cannot run the GFS-based deterministic HWRF (ENS=99) "
1033 "and GEFS-based hwrf (ENS=00 through 20) in the same "
1036 is_fcst_ens=self.
getbool(
'config',
'is_forecast_ensemble',
False)
1037 fcst_ens=has_gefs_members
1038 da_ens=self.
getbool(
'config',
'run_ensemble_da')
1040 if (fcst_ens
or is_fcst_ens)
and da_ens:
1041 raise HWRFConfigInsane(
1043 You cannot run both the GFS-based DA ensemble (ENS=99
1044 run_ensemble_da=yes) and GEFS-based forecast ensemble (ENS=00 through
1045 20, run_ensemble_da=no). Turn one of them off.
1047 To run the GEFS-based HWRF ensemble with no data assimilation, you
1048 must set the ensemble ID to one or more numbers from 00-20 and specify
1049 the hwrf_ensemble_$YYYY override file:
1051 ./run_hwrf.py 01-20 2015 03W FORECAST ../parm/hwrf_ensemble_2014.conf
1053 To run the deterministic HWRF with ensemble covariances from six hour
1054 forecasts of HWRF off of the GFS ENKF, do this:
1056 ./run_hwrf.py 2015 03W FORECAST config.run_ensemble_da=yes
1058 You cannot do both.""")
1060 if is_fcst_ens!=fcst_ens:
1061 raise HWRFConfigInsane(
1063 When running the GEFS-based HWRF ensemble, you must set the ensemble
1064 ID to one or more numbers from 00-20 and specify the
1065 hwrf_ensemble_$YYYY override file:
1067 ./run_hwrf.py 01-20 2015 03W FORECAST ../parm/hwrf_ensemble_2014.conf
1069 To run the deterministic HWRF, do neither:
1071 ./run_hwrf.py 2015 03W FORECAST
1075 ensda_size=self.
getint(
'hwrf_da_ens',
'ensda_size',0)
1077 raise HWRFConfigInsane(
1078 "You must use at least 30 members when running the GFS "
1079 "ENKF based HWRF DA ensemble. You only requested %d."
1082 raise HWRFConfigInsane(
1083 "You cannot use more than 80 members when running the GFS"
1084 " ENKF based HWRF DA ensemble. You requested %d."
1088 """!Runs a sanity check on the archiving settings.
1089 @param logger a logging.Logger for log messages"""
1090 if not self.
getbool(
'sanity',
'check_archive',
True):
return
1091 archive=self.
getloc(
'archive',
'NONE')
1092 if archive.lower()==
'none':
1093 if logger
is not None:
1094 logger.info(
'Archiving is disabled: archive=none')
1097 adir=os.path.dirname(archive[5:])
1099 if archive[0:5]==
'hpss:' or archive[0:5]==
'hpsz:':
1100 logger.info(
'Cannot hsi -P ls / so skipping archive check.')
1101 elif archive[0:5]==
'disk:':
1102 if os.path.exists(adir):
1103 if os.path.isdir(adir):
1104 logger.info(
'%s: disk archive directory exists and is a '
1105 'directory.'%(adir,))
1107 msg=
'%s: disk archive directory is not a '\
1108 'directory '%(adir,)
1110 raise HWRFArchiveInsane(msg)
1112 logger.info(
'%s: disk archive directory does not exist'
1116 msg=
'%s: Invalid archive method %s'%(archive,archive[0:4])
1118 raise HWRFArchiveInsane(msg)
1120 if not self.
getbool(
'archive',
'mkdir',
False):
1121 msg=
'%s: archive directory is missing and [archive] mkdir '\
1122 'is disabled. Archive job would fail. Set [config] '\
1123 'archive=none to disable archiving OR set [archive] '\
1124 'mkdir=yes to make archive directory or disable the '\
1125 'archive sanity check with [sanity] check_archive=no'\
1128 raise HWRFArchiveInsane(msg)
1131 """!Runs sanity checks related to config files.
1133 Sanity checks the provided *.conf files. For example, some
1134 config files are incompatible with others, and some must be
1135 loaded in a specific order.
1136 @param logger the logging.Logger for log messages"""
1138 if self.
getbool(
'prelaunch',
'hwrf_43lev_conf',
False)
and \
1139 self.
getbool(
'prelaunch',
'hwrf_3km_conf',
False)
and \
1140 self.
getstr(
'prelaunch',
'last_of_43lev_3km',
'OOO')==
'43lev':
1141 msg=
"When using 43lev and 3km configurations together, you "\
1142 "must load hwrf_43lev.conf BEFORE hwrf_3km.conf. "\
1143 "Otherwise, the model will use the wrong timestep."
1144 if logger
is not None: logger.error(msg)
1148 """!Runs sanity checks related to coupling. Should be runnable
1149 with or without a specified cycle.
1151 @param logger A logging.Logger for log messages"""
1153 run_ocean=self.
getbool(
'config',
'run_ocean',
True)
1154 run_wave=self.
getbool(
'config',
'run_wave',
False)
1155 atmos=self.
getstr(
'config',
'atmos_model',
'unspecified')
1156 ocean=self.
getstr(
'config',
'ocean_model',
'unspecified')
1157 wave=self.
getstr(
'config',
'wave_model',
'unspecified')
1160 msg=
'The atmos_model must be WRF not '+repr(atmos)
1162 if run_ocean
and ocean!=
'HYCOM' and ocean!=
'POM':
1163 msg=
'The ocean_model must be POM or HYCOM not '+repr(ocean)
1165 if run_wave
and wave!=
'WW3':
1166 msg=
'The wave_model must be WW3 not '+repr(wave)
1169 if run_ocean
or run_wave:
1170 dtstr=self.
getstr(
'wrf',
'dt')
1171 ntrack=self.
getint(
'namelist_outer',
'physics.ntrack')
1172 nphs=self.
getint(
'namelist_outer',
'physics.nphs')
1174 dtc_atmos=ntrack*nphs*dt/3
1175 dtc_cpl=self.
getint(
'wrfexe',
'dt_c')
1176 if dtc_atmos!=dtc_cpl:
1177 msg=
'Coupler timestep %s is not equal to atmospheric '\
1178 'coupling timestep %s. (ATM dtc = ntrack(namelist_'\
1179 'outer)*nphs(namelist_outer)*dt(wrf)/3 = %s*%s*%s/3 = '\
1180 '%s != %s coupler dtc)'
1181 msg=msg%(str(dtc_cpl),str(dtc_atmos),str(ntrack),str(nphs),
1182 str(dt),str(dtc_atmos),str(dtc_cpl))
1185 msg=
'Coupling configuration is incorrect. See earlier '\
1186 'error messages for details.'
1188 raise HWRFConfigInsane(
'Coupling configuration is incorrect.')
1191 """!Runs all sanity checks that are not dependent on the cycle.
1193 Runs any sanity checks that are possible without knowing
1194 the cycle that is to be run. This is intended to be used by
1195 the workflow automation system (rocoto, ecflow, etc.) to make
1196 sure everything is functional before starting any jobs.
1197 @param enset a set of ensemble ids
1198 @param logger the logging.Logger for log messages"""
1200 for dirvar
in (
'HOMEhwrf',
'EXEChwrf',
'EXhwrf',
'USHhwrf',
1201 'FIXhwrf',
'PARMhwrf',
'utilexec' ):
1202 logger.debug(
'%s: check this dir variable'%(dirvar,))
1203 thedir=self.
getdir(dirvar)
1208 checkme=os.path.join(self.
getdir(
'USHhwrf'),
'hwrf',
'launcher.py')
1209 myfile=os.path.realpath(__file__)
1210 if myfile[-4:]==
'.pyc': myfile=myfile[0:-1]
1212 raise HWRFScriptInsane(
1213 '%s: The ush/hwrf/launcher.py does not exist, which is '
1214 'impossible because it is running now. Check your paths '
1215 'and EXPT.'%(checkme,))
1216 if not os.path.samefile(checkme,myfile):
1217 raise HWRFScriptInsane(
1218 '%s: not the same as the launcher.py that is running now '
1219 '(%s) -- check your paths and EXPT.'%(checkme,myfile))
1226 if enset
is not None:
1230 """!Ensures the forecast length is valid.
1231 @param logger the logging.Logger for log messages"""
1232 iflen=self.
getint(
'config',
'forecast_length',126)
1234 raise HWRFConfigInsane(
"The forecast length must be at least "
1235 "12hrs (you specified %dhrs)"%iflen)
1237 raise HWRFConfigInsane(
"The forecast length must divisible by "
1238 "6hrs (you specified %dhrs)"%iflen)
1241 """!Runs a sanity check on the provided directory paths.
1243 Checks to make sure the specified directory exists and can be
1244 read and executed. If writable=True, also checks to see if it
1245 can be written. The dirvar is an explanation of what the
1246 directory relates to, for example HOMEhwrf.
1247 @param thedir a directory to check
1248 @param dirvar the variable that will be set to this directory (such as PARMhwrf, USHhwrf, etc.)
1249 @param writable Do we need to write to this directory?
1250 @param logger the logging.Logger for log messages"""
1251 if logger
is None: logger=self.
log(
'sanity.checker')
1252 logger.info(
'%s: check directory %s'%(dirvar,thedir))
1253 if not os.path.exists(thedir):
1255 %(dirvar,thedir),thedir)
1257 if not os.access(thedir,os.W_OK):
1259 %(dirvar,thedir),thedir)
1260 if not os.access(thedir,os.R_OK):
1262 %(dirvar,thedir),thedir)
1263 if not os.access(thedir,os.X_OK):
1265 %(dirvar,thedir),thedir)
1268 """!Checks to see if the sanity checks can be run. In essence,
1269 this is a sanity check of the sanity check routines.
1270 @param logger the logging.Logger for log messages"""
1272 raise HWRFConfigInsane(
1273 'The [sanity] section is missing from the HWRF conf files.')
1279 """!Sanity checks the fix file version.
1281 Gets the expected fix file version from [sanity] fix_version.
1282 Raises HWRFConfigInsane if there is an error while getting it.
1283 @param logger the logging.Logger for log messages"""
1285 fix_version=self.
getstr(
'sanity',
'fix_version',
'nope')
1286 if fix_version==
'nope':
1287 raise HWRFConfigInsane(
1288 'The [sanity] section fix_version is not set.')
1290 fix_version=int(fix_version)
1291 except (ValueError, TypeError)
as e:
1292 raise HWRFConfigInsane(
1293 'The [sanity] section fix_version is not a number.')
1294 if fix_version > 20991231:
1295 raise HWRFConfigInsane(
1296 'The [sanity] section fix_version has an implausible value '
1297 '%d (>20991231)'%fix_version)
1298 if fix_version < 20040131:
1299 raise HWRFConfigInsane(
1300 'The [sanity] section fix_version has an implausible value '
1301 '%d (<20140131)'%fix_version)
1305 """!Sanity checks the fix files.
1307 Checks to see if the fix files are available and match the
1308 expected fix file version.
1309 @param logger the logging.Logger for log messages"""
1311 if not self.
getbool(
'sanity',
'check_fix',
True):
1312 if logger
is not None:
1314 'Skipping fix file check: [sanity] check_fix=no')
1318 if logger
is not None:
1319 logger.info(
'Want fix file version %d'%fix_version)
1320 datestamp=os.path.join(self.
getdir(
'FIXhwrf'),
'hwrf_fix_datestamp')
1321 logger.info(
'check fix version: '+datestamp)
1323 if logger
is not None: logger.error(msg)
1324 raise HWRFFixInsane(msg)
1326 with open(datestamp,
'rt')
as f:
1330 if version>20991231:
1331 complain(
'%s: The fix file version datestamp %d is '
1332 'implausible (>201991231)'%(datestamp,version))
1333 elif version<20040131:
1334 complain(
'%s: The fix file version datestamp %d is '
1335 'implausible (<20140131)'%(datestamp,version))
1336 elif version<fix_version:
1337 complain(
'%s: The fix file version is too old. Expected '
1338 '%d, got %d'%(datestamp,fix_version,version))
1339 elif version!=fix_version:
1341 '%s: The fix file version (%d) does not match '
1342 'the expected version (%d). It is a newer '
1343 'version, so I will try to run.'%(
1344 datestamp,version,fix_version))
1345 if logger
is not None: logger.warning(msg)
1346 produtil.log.jlogger.warning(msg)
1348 logger.info(
'fix version %d matches'%version)
1350 except (KeyError,TypeError,EnvironmentError,ValueError)
as e:
1351 complain(
'%s: fix files failed a sanity check: %s'%(
1355 """!Sanity checks some of the executables.
1357 Checks to see if a few of the executables are available. This
1358 is not an exhaustive check: most executables are not checked.
1359 This check is just to see if the user forgot to install
1360 executables entirely.
1361 @param logger the logging.Logger for log messages"""
1363 if not self.
getbool(
'sanity',
'check_exec',
True):
1364 if logger
is not None:
1366 'Skipping executable check: [sanity] check_exec=no')
1374 msg=
'%s: %s: %s'%(exe,loc,why)
1375 if logger
is not None:
1376 if exe==
'gsi' and os.environ.get(
'PARAFLAG',
'YES')!=
'NO':
1378 '''GSI EXECUTABLE IS MISSING:
1380 If you are not NCO, and you are on Jet, Zeus or WCOSS, the latest
1381 developmental version of the HWRF GSI, maintained by Mingjing Tong,
1382 can be found at these locations:
1384 WCOSS: /hwrf/save/emc.hurpara/EMCGSI/hwrf_gsi
1385 Zeus: /scratch1/portfolios/NCEPDEV/hwrf/save/hurpara/EMCGSI/hwrf_gsi
1386 Jet: /mnt/pan2/projects/hwrfv3/hurrun/EMCGSI/hwrf_gsi
1388 Just link or copy the src/global_gsi executable to exec/hwrf_gsi
1389 in your installation directory:
1391 ln -s /path/to/GSI_HWRF/src/global_gsi %s
1393 If you are on another machine, you will need to check out and build
1394 GSI from either the EMC or DTC repositories, then build and install
1399 \\O\\ THE HWRF TEAM /O/
1402 logger.critical(msg)
1403 raise HWRFExecutableInsane(msg)
1405 checkme=[
'wrf',
'gettrk',
'post',
'real_nmm',
'mpiserial' ,
1406 'hwrf_geogrid',
'tar',
'hwrf_nhc_products',
1409 run_gsi=self.
getbool(
'config',
'run_gsi',
True)
1410 run_ocean=self.
getbool(
'config',
'run_ocean',
True)
1411 run_relocation=self.
getbool(
'config',
'run_relocation',
True)
1413 run_wave=self.
getbool(
'config',
'run_wave',
False)
1415 if run_relocation: checkme.append(
'hwrf_wrf_split')
1416 if run_gsi: checkme.append(
'gsi')
1417 if run_ocean: checkme.append(
'hwrf_ocean_fcst')
1418 if run_wave: checkme.insert(0,
'ww3_shel')
1426 complain(
'cannot find in $PATH')
1428 if not os.path.exists(loc): complain(
'executable does not exist')
1429 if os.path.getsize(loc)<=0: complain(
'executable is empty')
1430 if not os.path.isfile(loc): complain(
'executable is not a file')
1431 if not os.access(loc,os.X_OK): complain(
'cannot execute')
1434 """!Runs nearly all sanity checks.
1436 Runs simple sanity checks on the HWRF installation directory
1437 and configuration to make sure everything looks okay. May
1438 throw a wide variety of exceptions if sanity checks fail."""
1439 logger=self.
log(
'sanity.checker')
1440 for dirvar
in (
'WORKhwrf',
'com' ):
1441 logger.info(
'%s: check this dir variable'%(dirvar,))
1442 thedir=self.
getdir(dirvar)
1446 enset.add(self.
get(
'config',
'ENS',
'99'))
1450 CONFhwrf=self.
getdir(
'CONFhwrf')
1451 logger.info(
'Try to load configuration file %s'%(CONFhwrf,))
1454 logger.info(
'Compare new and old vitals')
1455 if 'syndat' in self.__dict__
and self.syndat.stormid3 != \
1456 redo.syndat.stormid3:
1458 "New directory has the wrong stormid: correct=%s conf=%s"
1459 %(self.syndat.stormid3,redo.syndat.stormid3))
1460 if self.
cycle!=redo.cycle:
1462 'New directory has the wrong cycle: correct=%s conf=%s'
1463 %(self.cycle.strftime(
'%Y%m%d%H'),
1464 redo.cycle.strftime(
'%Y%m%d%H')))
1466 case_root=redo.getstr(
'config',
'case_root').upper()
1467 input_catalog=redo.getstr(
'config',
'input_catalog')
1468 logger.info(
'Case root is %s and input catalog is %s'
1469 %(repr(case_root),repr(input_catalog)))
1471 if case_root==
'HISTORY':
1472 if not self.
getbool(
'sanity',
'check_input',
True):
1474 'Input check is disabled: [sanity] check_input=False. '
1475 'Skipping input checks.')
1476 elif self.
get(
'config',
'input_catalog')==
'hwrfdata':
1478 '[config] input_catalog=hwrfdata -- skipping input '
1479 'check: will fetch input instead.')
1481 in_item=self.
getstr(
'sanity',
'input_item',
'gfs')
1482 in_dataset=self.
getstr(
'sanity',
'input_dataset',
'gfs_sfcanl')
1483 logger.info(
'Retrospective mode. Check for %s %s file.'%(
1484 in_item,in_dataset))
1486 there=ic.locate(in_dataset,in_item,redo.cycle)
1488 raise HWRFInputInsane(
'Could not locate %s %s file.'
1489 %(in_dataset,in_item))
1490 if not os.path.exists(there):
1491 raise HWRFInputInsane(
1492 '%s %s file does not exist: %s'%(
1493 in_dataset,in_item,there))
1494 if not isnonempty(there):
1495 raise HWRFInputInsane(
'%s %s file is empty: %s'%(
1496 in_dataset,in_item,there))
1497 elif case_root==
'FORECAST':
1498 logger.info(
'Real-time mode. Will skip data checks.')
1500 raise HWRFVariableInsane(
1501 'config.case_root must be HISTORY or FORECAST not %s'
1502 %(repr(case_root),))
1508 """!Sanity checks the data assimilation.
1509 @param logger the logging.Logger for log messages"""
1510 run_gsi=self.
getbool(
'config',
'run_gsi',
False)
1511 run_ensda=self.
getbool(
'config',
'run_ensemble_da',
False)
1512 run_ensreloc=self.
getbool(
'config',
'run_ens_relocation',
False)
1515 logger.warning(
'You cannot run ENSDA without GSI. Disabling ENSDA.')
1519 logger.warning(
'You cannot run ENSDA relocation without ENSDA. '
1520 'Disabling ENSDA relocation.')
1522 self.
set(
'config',
'run_gsi',
'yes' if run_gsi
else 'no')
1523 self.
set(
'config',
'run_ensemble_da',
'yes' if run_ensda
else 'no')
1524 self.
set(
'config',
'run_ens_relocation',
'yes' if run_ensreloc
else 'no')
1527 """!Sanity checks the hwrf_expt module.
1529 Loads the hwrf_expt module, runs its init_module routine, and
1530 then runts its sanity_check, passing the specified logger.
1531 @param logger the logging.Logger for log messages"""
1533 if not self.
getbool(
'sanity',
'check_expt',
True):
1534 if logger
is not None:
1536 'Skipping hwrf_expt check: [sanity] check_expt=no')
1539 logger.info(
'Export [config] CONFhwrf to the environment '
1540 'variable $CONFhwrf.')
1541 os.environ[
'CONFhwrf'] = self.
getstr(
'config',
'CONFhwrf')
1542 logger.info(
'Attempt to load hwrf_expt module.')
1544 logger.info(
'Attempt to initialize hwrf_expt module.')
1546 logger.info(
"Run the hwrf_expt module's own sanity_check routine.")
1550 """!Tries to guess default values for many configuration settings.
1552 Tries to set default values for some mandatory conf options.
1553 The default values come from either other options or from
1554 environment variables. If no suitable default can be found,
1555 execution will continue, but later jobs may fail.
1557 Config options are as follows. If $VAR appears, that refers
1560 * config.cycle --- the cycle to run as a ten digit date (2014091418)
1561 Taken from cycle if present, otherwise $YMDH
1563 * config.storm_num --- the storm number as a priority 1 to 5.
1564 Taken from $storm_num or uses the default of 1.
1566 * config.stormlabel --- "storm" with the storm number appended
1567 (ie.: storm5 if storm_num=5).
1569 * dir.HOMEhwrf --- set to HOMEhwrf or $HOMEhwrf
1571 * dir.WORKhwrf --- set to WORKhwrf or $WORKhwrf
1573 * dir.syndat --- tcvitals directory. Default: $COMINARCH
1575 * config.input_catalog --- input catalog (conf section) name.
1576 Default: $INPUT_CATALOG or "hwrfdata"
1578 * config.PARAFLAG --- NO if you are NCEP Central Operations
1579 (NCO), and YES otherwise. This is used to turn on or off
1580 DBNet alerts and other NCO-specific options.
1582 In addition, the following directories are added to the [dir]
1585 * USHhwrf --- the location of ush scripts and the parent
1586 directory of the hwrf, pom and produtil packages
1587 * FIXhwrf --- the location of the HWRF fix directory
1588 * JOBhwrf --- the location of the HWRF jobs directory. This is
1589 not needed by normal users.
1590 * EXhwrf --- the location of the HWRF scripts directory
1591 * PARMhwrf --- the location of the HWRF parm/ directory
1592 * EXEChwrf --- the location of the HWRF exec/ directory
1593 * utilexec --- the location of the HWRF nwport/exec or
1594 /nwprod/exec directory
1596 If set, these variables will be copied to the [config] section:
1597 * EXPT --- optional: the experiment identifier, which must be
1598 alphanumeric, and can contain underscores. Default: HWRF
1599 * SUBEXPT --- optional: the subexperiment identifier, which
1600 must be alphanumeric, and can contain underscores.
1601 Default: set to value of EXPT """
1604 PARAFLAG=( ENV.get(
'PARAFLAG',
'YES')!=
'NO' )
1606 def set_default(section,option,default,env1=None,env2=None):
1608 if env1
is not None and env1
in ENV:
1609 self.
set(section,option,ENV[env1])
1610 elif env2
is not None and env2
in ENV:
1611 self.
set(section,option,ENV[env2])
1612 elif default
is not None:
1613 self.
set(section,option,str(default))
1616 'Cannot find suitable default for [%s] option %s'%(
1619 set_default(
'config',
'case_root',
'HISTORY',
'CASE_ROOT')
1620 set_default(
'config',
'EXPT',
'HWRF',
'EXPT')
1621 set_default(
'config',
'SUBEXPT',
'{EXPT}',
'SUBEXPT')
1622 set_default(
'dir',
'HOMEhwrf',
None,
'HOMEhwrf')
1623 set_default(
'dir',
'WORKhwrf',
None,
'WORKhwrf',
'DATA')
1624 set_default(
'config',
'datastore',
'{WORKhwrf}/hwrf_state.sqlite3')
1625 set_default(
'config',
'storm_num',
'1',
'storm_num')
1626 set_default(
'config',
'stormlabel',
'storm{storm_num}')
1627 set_default(
'config',
'input_catalog',
'hwrfdata',
'INPUT_CATALOG')
1628 set_default(
'dir',
'syndat',
None,
'COMINARCH')
1629 set_default(
'dir',
'com',
None,
'COMOUT')
1630 set_default(
'config',
'PARAFLAG',
'YES',
'PARAFLAG')
1634 self.
cycle=ENV[
'YMDH']
1638 NWPROD=
'NWPROD',ENV[
'NWPROD']
1639 elif 'envir' in ENV
and os.path.exists(
'/nw'+ENV[
'envir']):
1640 NWPROD=
'/nw'+ENV[
'envir']
1642 NWPROD=
'{HOMEhwrf}/nwport'
1644 def dirset(evar,deff,parent='{HOMEhwrf}'):
1646 self._conf.set(
'dir',evar,ENV[evar])
1647 elif not self._conf.has_option(
'dir',evar):
1648 self._conf.set(
'dir',evar,parent+
'/'+deff.lower())
1650 dirset(
'FIXhwrf',
'fix')
1651 dirset(
'USHhwrf',
'ush')
1652 dirset(
'EXhwrf',
'scripts')
1653 dirset(
'EXEChwrf',
'exec')
1654 dirset(
'JOBhwrf',
'jobs')
1655 dirset(
'PARMhwrf',
'parm')
1656 dirset(
'utilexec',
'util/exec',NWPROD)
1659 """!Creates the com/storm*.holdvars.txt file
1661 Creates the storm*.holdvars.txt file needed by the old
1662 ksh-based scripts. This is done for backward compatibility
1663 only. The two arguments (part1 and part2) are two files to
1664 pass through self.strinterp and then into the holdvars file.
1665 Part 1 is mandatory, but part2 is optional. It also fills in
1666 a few custom derived variables:
1668 * cap_run_gsi --- capitalized version of [config] section run_gsi
1669 * cap_run_relocation --- capitalized version of [config]
1670 section run_relocation
1671 * holdvars_model --- "COUPLED" if [config] section run_ocean is
1672 true, and "ATMOS" if it is false.
1673 @param part1 The first input file to read
1674 @param part2 The second input file to read or None to disable"""
1675 assert(isinstance(part1,basestring))
1678 gsi_flag=self.
getbool(
'config',
'run_gsi')
1679 self.
set(
'holdvars',
'cap_run_gsi',(
'YES' if gsi_flag
else 'NO'))
1681 reloc_flag=self.
getbool(
'config',
'run_relocation')
1682 self.
set(
'holdvars',
'cap_run_relocation',
1683 (
'YES' if reloc_flag
else 'NO'))
1685 ocean_flag=self.
getbool(
'config',
'run_ocean')
1686 self.
set(
'holdvars',
'holdvars_model',
1687 (
'COUPLED' if ocean_flag
else 'ATMOS'))
1695 io_pergrp_str=self.
getstr(
'runwrf',
'nio_tasks_per_group',
'0')
1696 io_groups=self.
getint(
'runwrf',
'nio_groups',0)
1697 io_pergrp_cs=io_pergrp_str.strip().strip(
',').strip().split(
',')
1698 io_pergrp_ss=io_pergrp_str.strip().strip(
',').strip().split()
1699 if len(io_pergrp_cs) > 1:
1700 io_pergrp=int(io_pergrp_cs[0])
1701 elif len(io_pergrp_ss) > 1:
1702 io_pergrp=int(io_pergrp_ss[0])
1704 io_pergrp=self.
getint(
'runwrf',
'nio_tasks_per_group',0)
1706 io_groups=self.
getint(
'runwrf',
'nio_groups',0)
1707 io_servers = (io_pergrp*io_groups)>0
1709 self.
set(
'holdvars',
'IO_SERVERS',
1710 (
'YES' if io_servers
else 'NO'))
1711 self.
set(
'holdvars',
'IOSRV_PERGRP',
'%d'%io_pergrp)
1712 self.
set(
'holdvars',
'IOSRV_GROUPS',
'%d'%io_groups)
1714 with open(self.
strinterp(
'dir',part1),
'rt')
as f:
1716 out.append(self.
strinterp(
'holdvars',line.rstrip()))
1717 if part2
is not None:
1718 with open(self.
strinterp(part2),
'rt')
as f:
1720 out.append(self.
strinterp(line.rstrip()))
1721 return '\n'.join(out) +
'\n'
This module provides a set of utility functions to do filesystem operations.
def getexe
query the "exe" section
def load(filename)
Loads the HWRFLauncher created by the launch() function.
Raised when a directory is unspecified, missing or invalid.
def set_storm(self, syndat, oldsyndat)
Sets the storm that is to be run.
def __init__
Creates a new HWRFLauncher.
def sanity_check_expt(self, logger)
Sanity checks the hwrf_expt module.
def read_tcvitals_and_messages
Reads in the tcvitals file and message files.
def to_fraction
Converts an object or two to a fraction.
def to_datetime_rel(d, rel)
Converts objects to a datetime relative to another datetime.
Raised when configuration files were specified in the wrong order.
def touch
Open the file for append and set mtime and atime.
def sanity_check_directory
Runs a sanity check on the provided directory paths.
def init_module
Initializes the HWRF object structure.
def strinterp(self, sec, string, kwargs)
perform string expansion
def getfloat
get a float value
def parse_tcvitals
Reads data from a tcvitals file.
Defines the Revital class which manipulates tcvitals files.
def sanity_check(logger)
Runs a sanity check on this module's contents.
def sanity_check_ensemble
Runs a sanity check on the ensemble configurations.
Defines StormInfo and related functions for interacting with vitals ATCF data.
A shell-like syntax for running serial, MPI and OpenMP programs.
def postmsg(message)
Sends the message to the jlogfile logging stream at level INFO.
def sanity_check_archive
Runs a sanity check on the archiving settings.
A replacement for the hwrf.config.HWRFConfig used throughout the HWRF system.
def log
returns a logging.Logger object
def get
get the value of an option from a section
def sanity_check(self)
Runs nearly all sanity checks.
def isnonempty(filename)
Returns True if the filename refers to an existent file that is non-empty, and False otherwise...
def getloc
search the config, exe and dir sections in that order
def getstr
get a string value
Raised when the configuration had a different cycle than expected.
def to_datetime(d)
Converts the argument to a datetime.
def parse_launch_args
Parsed arguments to scripts that launch the HWRF system.
a class that contains configuration information
def launch
Initializes the directory structure for a new HWRF workflow.
def makedirs
Make a directory tree, working around filesystem bugs.
Time manipulation and other numerical routines.
def guess_default_values(self)
Tries to guess default values for many configuration settings.
def randint_zeromean
Generates "count" numbers uniformly distributed between -imax and imax, inclusive, with a mean of zero.
Raised when the configuration had a different storm than expected.
def set(self, section, key, value)
set a config option
def sanity_check_fix_files
Sanity checks the fix files.
def getint
get an integer value
def sanity_check_da(self, logger)
Sanity checks the data assimilation.
parses UNIX conf files and makes the result readily available
def storm_for_stormnum(self)
Not implemented.
def decide_domain_center
Decide the outermost domain's center.
def tcautoseed
Sets the random seed for ensemble perturbations.
def timeless_sanity_check
Runs all sanity checks that are not dependent on the cycle.
def sanity_check_coupling
Runs sanity checks related to coupling.
def sanity_check_executables
Sanity checks some of the executables.
def getbool
get a bool value
Exceptions raised by the hwrf package.
def sanity_check_config_files
Runs sanity checks related to config files.
This should be raised when the user requests a specific storm or cycle of a storm and no such vitals ...
def has_section(self, sec)
does this section exist?
def make_holdvars
Creates the com/storm*.holdvars.txt file.
def getdir
query the "dir" section
def has_option(self, sec, opt)
is this option set?
def sanity_get_fix_version
Sanity checks the fix file version.
def multistorm_parse_args
def set_storm_multistorm(self, multistorm_real_sids, syndat4multistorm, oldsyndat4multistorm)
def find_exe
Searches the $PATH or a specified iterable of directory names to find an executable file with the giv...
def set_options(self, section, kwargs)
set values of several options in a section
def sanity_check_sanity_check
Checks to see if the sanity checks can be run.
def sanity_check_forecast_length
Ensures the forecast length is valid.
def choose_vitbase
Decides the location of the vitals file.
cycle
the analysis cycle, a datetime.datetime object
Storm vitals information from ATCF, B-deck, tcvitals or message files.
def gen_vitals
Generate tcvitals files.
This class reads one or more tcvitals files and rewrites them as requested.
def timestrinterp(self, sec, string, ftime, atime=None, kwargs)
performs string expansion, including time variables