HWRF  trunk@4391
launcher.py
1 """!Creates the initial HWRF directory structure, loads information into each job.
2 
3 This module is used to create the initial HWRF conf file in the
4 first HWRF job via the hwrf.launcher.launch(). The hwrf.launcher.load()
5 then reloads that configuration. The launch() function does more than
6 just create the conf file though. It parses the tcvitals, creates
7 several initial files and directories and runs a sanity check on the
8 whole setup.
9 
10 The HWRFLauncher class is used in place of an hwrf.config.HWRFConfig
11 throughout the HWRF system. It can be used as a drop-in replacement
12 for an hwrf.config.HWRFConfig, but has additional features needed to
13 support sanity checks, initial creation of the HWRF system and
14 tcvitals generation."""
15 
16 ##@var __all__
17 # All symbols exported by "from hwrf.launcher import *"
18 __all__=['load','launch','HWRFLauncher','parse_launch_args','multistorm_parse_args']
19 
20 import os, re, sys, collections, random
23 
24 from random import Random
25 from produtil.fileop import isnonempty
26 from produtil.run import run, exe
27 from produtil.log import jlogger
28 from hwrf.numerics import to_datetime_rel, to_datetime
29 from hwrf.config import HWRFConfig
30 from hwrf.exceptions import HWRFDirInsane,HWRFStormInsane,HWRFCycleInsane, \
31  HWRFVariableInsane,HWRFInputInsane,HWRFScriptInsane,HWRFExecutableInsane,\
32  HWRFFixInsane,HWRFArchiveInsane,HWRFConfigInsane
33 
34 def multistorm_parse_args(msids, args, logger, usage, PARMhwrf=None, wrapper=False):
35  """This is the multistorm argument parser. It is really just a wrapper around
36  parse_launch_args().
37 
38  The last Element of the returned list is the launch args for the Fake storm.
39 
40  From the original arguments, returns a new list of launch args for all
41  the storms in a multistorm run. The SID and optional config.startfile
42  from the original sys.argv[1:] list are replaced with a storm id and a
43  config.startfile (if present) from the MULTISTORM_SIDS.
44  The following multistorm conf options are also added to each storm.
45  config.fakestormid=, config.multistorm_sids=,config.multistorm_priority_sid=,
46  config.multistorm_sids=, General structure of the returned list.
47  [[storm1, arg1, ..argN], ..[stormN, arg1, ..argN], [storm00, arg1, ..argN]]
48 
49  INPUT:
50  args -- a copy of the initial command line args, excluding sys.argv[0]
51  RETURNS:
52  case_root,parm,infiles,stids,fake_stid,multistorm_priority_sid,moreopts[]
53  """
54 
55  # See if the optional config.startfile argument is present and get its index.
56  # startfile_idx is a list of indexes in the args_multistorm list that have
57  # a 'config.startfile' substring. There should only be one or none.
58  # if there are none, then startfile_idx = [], an empty list.
59  startfile_idx = [args.index(arg) for arg in args if 'config.startfile' in arg]
60 
61  if len(startfile_idx) > 1:
62  logger.error('Exiting, More than 1 config.startfile= parameter in the argument list.')
63  sys.exit(2)
64 
65  # MULTISTORM Requirement-The fakestorm will be defined as "00L".
66  fake_stid = '00L'
67 
68  assert(msids is not None)
69 
70  # Best guess at priority storm id
71  if fake_stid != msids[0]:
72  multistorm_priority_sid = msids[0]
73  elif len(multistorm_sids) > 1:
74  multistorm_priority_sid = msids[1]
75  else:
76  #Else, running multistorm with no storm, only the fake storm.
77  multistorm_priority_sid = msids[0]
78 
79  if fake_stid in msids:
80  msids.remove(fake_stid)
81 
82  multistorm_all_sids = list(msids)
83  multistorm_all_sids.append(fake_stid)
84 
85  args.append('config.fakestormid=' + fake_stid)
86  args.append('config.multistorm_priority_sid=' + multistorm_priority_sid)
87  args.append('config.multistorm_sids=' + ' '.join(msids))
88 
89  logger.info('Setting up hwrf to run as a multi storm with sids: %s' %(msids))
90  logger.info('HWRF multistorm: The priority sid is: %s'%(multistorm_priority_sid))
91  logger.info('HWRF multistorm: The multistorm fake storm id is: %s' %(fake_stid))
92 
93 
94  # Setup arguments for each storm, as if this script was called individually for each storm.
95  # Update the storm id and startfile arguments for each storm.
96  # [[storm1, arg1, ..argN], ..[stormN, arg1, ..argN], [storm00, arg1, ..argN]]
97  multistorms = []
98  stids = []
99  moreopts = []
100  # Used to build the start files for a multistorm when called from the wrappers.
101  # ie. if "00L." passed in, it is replace in the startfile name in the loop below
102  # for each storm.
103  sid_passedin = args[0]
104  for i, stormid in enumerate(multistorm_all_sids):
105  multistorms.append(args[:])
106  multistorms[i][0] = stormid
107  if startfile_idx:
108  if sid_passedin in multistorms[i][startfile_idx[0]]:
109  multistorms[i][startfile_idx[0]]= \
110  args[startfile_idx[0]].replace(sid_passedin,stormid)
111  else:
112  multistorms[i][startfile_idx[0]]= args[startfile_idx[0]] + str(stormid)
113 
114  for i, storm_args in enumerate(multistorms):
115  (case_root,parm,infiles,stid,moreopt) = \
116  parse_launch_args(storm_args,logger,usage,PARMhwrf)
117 
118  stids.append(stid)
119  moreopts.append(moreopt)
120  for confbn in [ 'hwrf_3km.conf', 'hwrf_multistorm.conf' ]:
121  confy= os.path.join(parm, confbn)
122  if not os.path.exists(confy):
123  logger.error(confy+': conf file does not exist.')
124  sys.exit(2)
125  elif not os.path.isfile(confy):
126  logger.error(confy+': conf file is not a regular file.')
127  sys.exit(2)
128  elif not produtil.fileop.isnonempty(confy):
129  logger.warning(
130  confy+': conf file is empty. Will continue anyway.')
131  logger.info('Conf input: '+repr(confy))
132  infiles.append(confy)
133 
134  return (case_root,parm,infiles,stids,fake_stid,multistorm_priority_sid,moreopts)
135 
136 def multistorm_priority(args, basins, logger, usage, PARMhwrf=None, prelaunch=None):
137 
138  storms = list()
139  strcycle=args[0]
140  cyc=hwrf.numerics.to_datetime(strcycle)
141  YMDH=cyc.strftime('%Y%m%d%H')
142  (case_root,parm,infiles,stid,moreopt) = \
143  parse_launch_args(args[1:],logger,usage,PARMhwrf)
144  conf = launch(infiles,cyc,stid,moreopt,case_root,
145  init_dirs=False,prelaunch=prelaunch,
146  fakestorm=True)
147  syndatdir=conf.getdir('syndat')
148  vitpattern=conf.getstr('config','vitpattern','syndat_tcvitals.%Y')
149  vitfile=os.path.join(syndatdir,cyc.strftime(vitpattern))
150  multistorm=conf.getbool('config','run_multistorm',False) #ADDED BY THIAGO TO DETERMINE IF "run_multistorm=true".
151  rv=hwrf.revital.Revital(logger=logger)
152  rv.readfiles(vitfile, raise_all=False)
153  rv.delete_invest_duplicates()
154  rv.clean_up_vitals()
155  rv.discard_except(lambda v: v.YMDH==YMDH)
156  rv.discard_except(lambda v: v.basin1 in basins)
157  if multistorm:
158  rv.discard_except(lambda v: v.basin1!='E' or (v.basin1=='E' and v.lon>=-140)) #ADDED BY THIAGO: HRD's new rule for East-pac storms only.
159  rv.clean_up_vitals()
160  rv.sort_by_function(rv.hrd_multistorm_sorter)
161  for v in rv:
162  sid = v.as_tcvitals().split()[1]
163  storms.append(sid)
164  if len(storms) == 0:
165  logger.info('No storms for cycle: '+cyc.strftime('%Y%m%d%H'))
166  produtil.fileop.touch(os.path.join(conf.getdir('com'),
167  'no_storms.txt'))
168  return(storms)
169 
170 def parse_launch_args(args,logger,usage,PARMhwrf=None):
171  """!Parsed arguments to scripts that launch the HWRF system.
172 
173  This is the argument parser for the exhwrf_launch.py and
174  hwrf_driver.py scripts. It parses the storm ID and later
175  arguments (in args). Earlier arguments are parsed by the scripts
176  themselves. If something goes wrong, this function calls
177  sys.exit(1) or sys.exit(2).
178 
179  The arguments depend on if PARMhwrf=None or not.
180 
181  @code{.py}
182  If PARMhwrf is None:
183  StormID CASE_ROOT /path/to/parm [options]
184  Otherwise:
185  StormID CASE_ROOT [options]
186  @endcode
187 
188  * StormID --- three character storm identifier (ie.: 12L for Katrina)
189  * CASE_ROOT -- HISTORY or FORECAST
190  * /path/to/parm - path to the parm directory, which contains the
191  default conf files.
192 
193  Options:
194  * section.variable=value --- set this value in this section, no matter what
195  * /path/to/file.conf --- read this conf file after the default conf files.
196 
197  Later conf files override earlier ones. The conf files read in
198  are:
199  * parm/hwrf_input.conf
200  * parm/hwrf.conf
201  * parm/hwrf_holdvars.conf
202  * parm/hwrf_basic.conf
203  * parm/system.conf
204 
205  @param args the script arguments, after script-specific ones are removed
206  @param logger a logging.Logger for log messages
207  @param usage a function called to provide a usage message
208  @param PARMhwrf the directory with *.conf files"""
209  if len(args)<2 or ( PARMhwrf is None and len(args)<3):
210  usage(logger=logger)
211  sys.exit(2)
212 
213  # Get the storm ID:
214  stid=args[0].upper()
215  if not re.match('^[0-9][0-9][ABCELPQSW]$',stid):
216  logger.error('%s: invalid storm id. Must be a three character '
217  'storm ID such as 90L or 13W'%(stid,))
218  sys.exit(2)
219 
220  logger.info('Running Storm ID is '+repr(stid))
221 
222  # Get the case root (real-time vs. retrospective):
223  case_root=args[1].upper()
224  if case_root=='HISTORY':
225  real_time=False
226  elif case_root=='FORECAST':
227  real_time=True
228  else:
229  logger.error('%s: invalid case root. Must be HISTORY for '
230  'retrospective runs or FORECAST for real-time runs.'
231  %(case_root,))
232  sys.exit(2)
233  logger.info('Case root is '+repr(case_root))
234 
235  # Find the parm directory
236  if PARMhwrf is None:
237  parm=args[2]
238  if not os.path.exists(parm):
239  logger.error(parm+': parm directory does not exist')
240  sys.exit(2)
241  elif not os.path.isdir(parm):
242  logger.error(parm+': parm directory is not a directory')
243  sys.exit(2)
244  logger.info('Scan %d optional arguments.'%(len(args)-3))
245  args=args[3:]
246  else:
247  parm=PARMhwrf
248  logger.info('Scan %d optional arguments.'%(len(args)-1))
249  args=args[2:]
250  parm=os.path.realpath(parm)
251 
252  # Standard conf files:
253  infiles=[ os.path.join(parm,'hwrf_input.conf'),
254  os.path.join(parm,'hwrf.conf'),
255  os.path.join(parm,'hwrf_holdvars.conf'),
256  os.path.join(parm,'hwrf_basic.conf'),
257  os.path.join(parm,'system.conf')
258  ]
259 
260  # Now look for any option and conf file arguments:
261  bad=False
262  moreopt=collections.defaultdict(dict)
263  for iarg in xrange(len(args)):
264  logger.info(args[iarg])
265  m=re.match('''(?x)
266  (?P<section>[a-zA-Z][a-zA-Z0-9_]*)
267  \.(?P<option>[^=]+)
268  =(?P<value>.*)$''',args[iarg])
269  if m:
270  logger.info('Set [%s] %s = %s'%(
271  m.group('section'),m.group('option'),
272  repr(m.group('value'))))
273  moreopt[m.group('section')][m.group('option')]=m.group('value')
274  elif os.path.exists(args[iarg]):
275  logger.info('%s: read this conf file'%(args[iarg],))
276  infiles.append(args[iarg])
277  else:
278  bad=True
279  logger.error('%s: invalid argument. Not an config option '
280  '(a.b=c) nor a conf file.'%(args[iarg],))
281  if bad:
282  sys.exit(2)
283 
284  for file in infiles:
285  if not os.path.exists(file):
286  logger.error(file+': conf file does not exist.')
287  sys.exit(2)
288  elif not os.path.isfile(file):
289  logger.error(file+': conf file is not a regular file.')
290  sys.exit(2)
291  elif not produtil.fileop.isnonempty(file):
292  logger.warning(
293  file+': conf file is empty. Will continue anyway.')
294  logger.info('Conf input: '+repr(file))
295  return (case_root,parm,infiles,stid,moreopt)
296 
297 def load(filename):
298  """!Loads the HWRFLauncher created by the launch() function.
299 
300  Creates an HWRFConfig object for an HWRF workflow that was
301  previously initialized by hwrf.launcher.launch. The only argument
302  is the name of the config file produced by the launch command.
303 
304  @param filename The storm*.conf file created by launch()"""
305  conf=HWRFLauncher()
306  conf.read(filename)
307  logger=conf.log()
308 
309  # Multistorm - jtf
310  # run_multistorm_00flag identifies the fakestorm of a multistorm.
311  run_multistorm=conf.getbool('config','run_multistorm',False)
312  run_multistorm_00flag = False
313  if run_multistorm:
314  fakestormid=conf.getstr('config','fakestormid','nofakeid')
315  if fakestormid == 'nofakeid':
316  msg = "Looks like you are trying to run a multistorm but "\
317  "no fake storm id is defined. This will happen if there are "\
318  "no real storm ids specified for a multistorm run. "\
319  "Either provide a list of storms OR Set 'run_multistorm=no' "\
320  "in hwrf_basic.conf and check if you are setting the 'MULTISTORM' "\
321  "env var in either, the rocoto/runhwrf_wrapper or global.vars.ksh, "\
322  "and launcher_wrapper, if running the stand alone wrappers."
323  raise HWRFConfigInsane(msg)
324  this_stormid=conf.getstr('config','STID','nosid')
325  if fakestormid == this_stormid:
326  run_multistorm_00flag = True
327 
328  cycle=conf.cycle
329  assert(cycle is not None)
330  strcycle=cycle.strftime('%Y%m%d%H')
331  logger.info('Running cycle: '+cycle.strftime('%Y%m%d%H'))
332 
333  WORKhwrf=conf.getdir('WORKhwrf')
334 
335  tmpvit=os.path.join(WORKhwrf,'tmpvit')
336  logger.info(tmpvit+': read vitals for current cycle')
337  #syndat is a StormInfo object
338  with open(tmpvit,'rt') as f:
339  syndat=hwrf.storminfo.parse_tcvitals(f,logger,raise_all=True)
340  syndat=syndat[0]
341  logger.info('Current cycle vitals: '+syndat.as_tcvitals())
342 
343  oldvit=os.path.join(WORKhwrf,'oldvit')
344  logger.info(oldvit+': read vitals for prior cycle')
345  with open(oldvit,'rt') as f:
346  oldsyndat=hwrf.storminfo.parse_tcvitals(f,logger,raise_all=True)
347  oldsyndat=oldsyndat[0]
348  logger.info('Prior cycle vitals: '+oldsyndat.as_tcvitals())
349 
350  conf.set_storm(syndat,oldsyndat)
351 
352  if run_multistorm_00flag:
353  _load_multistorm(fakestormid,conf,logger)
354 
355  return conf
356 
357 # Multistorm - jtf
358 def _load_multistorm(fakestormid,conf,logger):
359  """Do not call this. It is an internal implementation routine.
360  It is only used internally and is called during the fakestorm of
361  a multistorm run.
362 
363  Adds the additional storms of a multistorm run to the HWRFConfig
364  object.
365  """
366  assert(conf.getbool('config','run_multistorm',False))
367  multistorm_sids = conf.getstr('config','multistorm_sids').split()
368  logger.info('Multistorm - fakestorm run %s: Adding storm info '
369  'for storms: %s'%(fakestormid,multistorm_sids))
370 
371  WORKhwrf4fake=conf.getdir('WORKhwrf')
372 
373  syndat_multistorm = []
374  oldsyndat_multistorm = []
375 
376  for i,stormid in enumerate(multistorm_sids):
377  WORKhwrf4real = WORKhwrf4fake.replace(fakestormid,stormid)
378 
379  #parse_tcvitals returns a 1 element list with element[0] being the StormInfo object.
380  #That is we append [0] for each storm in a multistorm below.
381  tmpvit=os.path.join(WORKhwrf4real,'tmpvit')
382  logger.info(tmpvit+': Multistorm %s: read vitals for current cycle'%(stormid))
383  with open(tmpvit,'rt') as f:
384  syndat_multistorm.append(hwrf.storminfo.parse_tcvitals(f,logger,raise_all=True)[0])
385  logger.info('Multistorm %s: Current cycle vitals: %s'%(
386  stormid,str(syndat_multistorm[i].as_tcvitals())))
387 
388  oldvit=os.path.join(WORKhwrf4real,'oldvit')
389  logger.info(oldvit+': Multistorm %s: read vitals for prior cycle'%(stormid))
390  with open(oldvit,'rt') as f:
391  oldsyndat_multistorm.append(hwrf.storminfo.parse_tcvitals(f,logger,raise_all=True)[0])
392  logger.info('Multistorm %s: Prior cycle vitals: %s'%(
393  stormid,str(oldsyndat_multistorm[i].as_tcvitals())))
394 
395  # TODO: CRITICAL, go back and consider sorting or better using a dictionary.
396  # Though you can determine the stormid from the StormInfo object.
397  # There is no guarantee that oldsyndat and syndat are in sync 1:1 in the lists.
398  # consider that rational throughout.
399 
400  conf.set_storm_multistorm(multistorm_sids,syndat_multistorm,oldsyndat_multistorm)
401 
402 def launch(file_list,cycle,stid,moreopt,case_root,init_dirs=True,
403  prelaunch=None, fakestorm=False, fakestorm_conf=None,
404  storm_num=None):
405  """!Initializes the directory structure for a new HWRF workflow.
406 
407  This function runs sanity checks on the HWRF installation and the
408  arguments to this function. If a cycle is supplied, it then calls
409  a prelaunch function, and then generates the configuration file
410  and initial directory structure.
411 
412  You can run this function in a special mode that just reads the
413  conf file, without specifying a cycle, or making directories. To
414  do that, send cycle=None and init_dirs=False. That mode is used
415  by the script that prepares the rocoto XML file for a multi-cycle
416  workflow.
417 
418  @returns the full path to the conf file that is created as a
419  result. That conf file should be passed in to the load() function
420  at the beginning of every job.
421 
422  @param file_list a list of conf files to read
423  @param cycle the cycle to run; anything accepted by to_datetime
424  @param stid the three character storm identifier for the storm to run.
425  For example, stid=11L is the eleventh storm of the season in the
426  Atlantic basin. Although this argument is optional, the single
427  storm HWRF workflow will fail if stid is not provided.
428  @param moreopt a dict of dicts with additional options to set. This
429  maps section name to option to value.
430  @param case_root HISTORY for retrospective mode, FORECAST for real-time
431  @param init_dirs True if the initial directories should be created,
432  @param prelaunch a function to call on the configuration before
433  writing it to disk. Takes as arguments: conf,logger,cycle
434  Note that the logger or cycle may be None. The conf is the
435  configuration object that will be written. """
436  # TODO: add fakestorm description and use <jtf>
437  for filename in file_list:
438  if not isinstance(filename,basestring):
439  raise TypeError('First input to hwrf.config.for_initial_job '
440  'must be a list of strings.')
441  conf=HWRFLauncher()
442  logger=conf.log()
443 
444  logger.info('FAKESTORM: ' +repr(fakestorm))
445  logger.info('FAKESTORM CONF: ' +repr(fakestorm_conf))
446  logger.info('GLOBAL STORM NUM: ' +repr(storm_num))
447 
448  if cycle is not None:
449  conf.cycle=to_datetime(cycle)
450  logger.info('Caller wants to launch a %s run of cycle %s storm %s.'
451  %(case_root,conf.cycle.strftime('%Y%m%d%H'),stid))
452  else:
453  logger.info('Caller wants to launch a %s run of storm %s.'
454  %(case_root,stid))
455  conf.add_section('holdvars')
456  conf.set('holdvars','CASE_ROOT',case_root)
457  conf.set('config','case_root',case_root)
458  if case_root=='HISTORY':
459  conf.set('config','fcsthist','hist')
460  conf.set('config','realtime','false')
461  hist=True
462  else:
463  conf.set('config','fcsthist','fcst')
464  conf.set('config','realtime','true')
465  hist=False
466 
467  for filename in file_list:
468  logger.info("%s: parse this file"%(filename,))
469  conf.read(filename)
470 
471  if not hist:
472  input_catalog=conf.get('config','input_catalog','hwrfdata')
473  if input_catalog=='hwrfdata':
474  fcst_catalog=conf.get('config','fcst_catalog')
475  conf.set('config','input_catalog',fcst_catalog)
476  jlogger.info("FORECAST mode, so changing input_catalog to %s"
477  %(repr(fcst_catalog),))
478 
479  if moreopt is not None:
480  for section,options in moreopt.iteritems():
481  if not conf.has_section(section):
482  conf.add_section(section)
483  for option,value in options.iteritems():
484  logger.info('Override: %s.%s=%s'
485  %(section,option,repr(value)))
486  conf.set(section,option,value)
487  conf.guess_default_values()
488  cycling_interval=conf.getfloat('config','cycling_interval',6.0)
489  cycling_interval=-abs(cycling_interval*3600.0)
490  if cycle is not None:
491  other_cycle=to_datetime_rel(cycling_interval,conf.cycle)
492 
493  if stid is not None and cycle is not None and not fakestorm:
494  revit=conf.read_tcvitals_and_messages(other_cycle=other_cycle)
495  conf.gen_vitals(stid,cycling_interval,revit)
496  elif stid is not None and cycle is not None and fakestorm:
497  revit=conf.read_fake_tcvitals()
498  conf.gen_vitals(stid,cycling_interval,revit)
499 
500  # rocoto does not initialize the dirs, it returns here.
501  if not init_dirs:
502  return conf
503 
504  produtil.fileop.makedirs(conf.getdir('com'),logger=logger)
505  produtil.fileop.makedirs(conf.getdir('WORKhwrf'),logger=logger)
506  produtil.fileop.makedirs(conf.getdir('lockdir'),logger=logger)
507  griblockdir=conf.getstr('regribber','griblockdir','')
508  if griblockdir:
509  produtil.fileop.makedirs(griblockdir,logger=logger)
510 
511  logger.info('Expand certain [dir] values to ensure availability '
512  'before vitals parsing.')
513  for var in ( 'WORKhwrf', 'HOMEhwrf', 'com' ):
514  expand=conf.getstr('dir',var)
515  logger.info('Replace [dir] %s with %s'%(var,expand))
516  conf.set('dir',var,expand)
517 
518  dtcgsi=os.path.join(conf.getdir('HOMEhwrf'),'sorc/GSI')
519  if os.path.exists(dtcgsi):
520  if os.path.isdir(dtcgsi):
521  logger.info('%s: community GSI is checked out, use '
522  'community gsi fix files '%(dtcgsi,))
523  conf.set('dir','FIXgsi',os.path.join(dtcgsi,'fix'))
524 
525  if stid is not None:
526  conf.decide_domain_center()
527  loc=conf.getdir('domlocfile')
528  logger.info('%s: Writing domain center.'%(loc,))
529  with open(loc,'wt') as f:
530  f.write("%g\n%g\n"%(
531  conf.getfloat('config','domlat'),
532  conf.getfloat('config','domlon')))
533 
534  if prelaunch is not None:
535  prelaunch(conf,logger,cycle)
536 
537  confloc=conf.getloc('CONFhwrf')
538  logger.info('%s: write hwrf.conf here'%(confloc,))
539  with open(confloc,'wt') as f:
540  conf.write(f)
541 
542  with open(os.path.join(conf.getdir('WORKhwrf'),'PDY'),'wt') as f:
543  f.write(conf.strinterp(
544  'config','cyc={HH}\nPDY={YMD}\nYMDH={YMDH}\n'))
545 
546  if fakestorm_conf:
547  sfile = os.path.join(fakestorm_conf.strinterp('dir','{com}'),
548  'storm%d.conf' %storm_num)
549  logger.info('%s: write STORM conf here'%(sfile,))
550  with open(sfile,'wt') as f:
551  conf.write(f)
552 
553  return conf
554 
556  """!A replacement for the hwrf.config.HWRFConfig used throughout
557  the HWRF system. You should never need to instantiate one of
558  these --- the launch() and load() functions do that for you. This
559  class is the underlying implementation of most of the
560  functionality described in launch() and load()"""
561  def __init__(self,conf=None):
562  """!Creates a new HWRFLauncher
563  @param conf The configuration file."""
564  super(HWRFLauncher,self).__init__(conf)
565  self._cycle=None
566  ##@var _cycle
567  # The cycle for this HWRF forecast.
568 
570  """!Not implemented.
571 
572  This is intended to return the one letter basin, numeric storm
573  ID and year for the specified storm number (1-10).
574 
575  @bug The hwrf.launcher.HWRFLauncher.storm_for_stormnum() is
576  not implemented and should probably be removed."""
577  pass;
578  def decide_domain_center(self,logger=None):
579  """!Decide the outermost domain's center.
580 
581  If the domain center is not already set in the [config]
582  section domlat and domlon variables, decides the domain center
583  using the hwrf.storminfo.StormInfo.hwrf_domain_center routine.
584  @param logger the logging.Logger for log messages."""
585  if logger is None: logger=self.log()
586  if self.has_option('config','domlat') and \
587  self.has_option('config','domlon'):
588  cenla=self.getfloat('config','domlat')
589  cenlo=self.getfloat('config','domlon')
590  logger.info('Domain center is already set to lat=%g lon=%g'
591  %(cenla,cenlo))
592  return
593  (cenlo, cenla) = self.syndat.hwrf_domain_center(logger)
594  self.set('config','domlat',cenla)
595  self.set('config','domlon',cenlo)
596  logger.info('Decided on domain center lat=%g lon=%g'%(cenla,cenlo))
597 
598  def choose_vitbase(self,storm_num=None):
599  """!Decides the location of the vitals file.
600 
601  Decides the location of the vitfile that should be read in by
602  read_precleaned_vitfile. Optionally, you can specify the
603  storm number (1-10) of the storm whose vitals should be read
604  in. Otherwise, a reasonable guess will be made.
605  @param storm_num the index of the storm from 1-10
606  @returns the vitals path"""
607  if storm_num is not None:
608  storm_num=int(storm_num)
609  vitfile=os.path.join(self.getdir('WORKhwrf'),
610  'storm%d.vitals'%(storm_num,))
611  else:
612  stormlabel=self.getstr('config','stormlabel','storm1')
613  vitfile=os.path.join(self.getdir('WORKhwrf'),
614  '%s.vitals'%(stormlabel,))
615  return vitfile
616 
617  # This was created for the hwrf multistorm basin scale implementation.
618  # Needed so hwrf could be run with no storms and also to
619  # more easily setup the fake storm directories and other config
620  # parameters dependent on having a vitals dictionary.
621  def read_fake_tcvitals(self, fakestorm_vitals=None):
622  """ Intended use is for the multistorm fake storm. Same as the
623  read_tcvitals_and_messages method except the vitals are
624  from fakestorm_vitals in hwrf_multistorm.conf. basd on the arguments."""
625 
626  logger=self.log()
627  inputs=list()
628 
629  default_fakestorm_vitals = 'NHC 00L FAKE ' +\
630  self._cycle.strftime('%Y%m%d %H%M') +\
631  ' 250N 800W -99 -99 -999 -999 -099 -9 -99 -999 -999 -999 -999 M'
632 
633  if fakestorm_vitals is None:
634  fakestorm_vitals=self.getstr('config','fakestorm_vitals',default_fakestorm_vitals)
635 
636  if fakestorm_vitals == default_fakestorm_vitals:
637  logger.info('Using default fakestorm vitals: %s'%(default_fakestorm_vitals))
638  inputs.append(fakestorm_vitals)
639  revital=hwrf.revital.Revital(logger=logger)
640  revital.readvitals(inputs,raise_all=False)
641  return revital
642 
643  def read_tcvitals_and_messages(self,vitdir=None,vitpattern=None,
644  include_messages=True,other_cycle=None):
645  """!Reads in the tcvitals file and message files.
646 
647  Reads in the tcvitals files for the current cycle and
648  optionally another cycle, which may be in the same file. Also
649  reads in message files if requested. Cleans the result up and
650  returns it as an hwrf.revital.Revital object.
651 
652  @param vitdir optional: the directory in which to find the tcvitals.
653  Default: [dir] section syndat variable.
654 
655  @param vitpattern optional: passed into strftime to generate the
656  name of the vitals file within vitdir. Default: [conf]
657  section vitpattern variable, or syndat_tcvitals.%Y if
658  missing.
659 
660  @param include_messages optional flag: if True, attempts to find
661  the hurricane message files, and includes them in the
662  list of files to read in. Default: True.
663 
664  @param other_cycle optional: another cycle whose vitals file
665  should also be parsed. This can be anything accepted by
666  to_datetime_rel(...,self.cycle). This is intended to
667  allow year-crossing cycling, such as a January 1, 00:00
668  UTC cycle that is a warm start off of a prior December
669  31, 18:00 UTC cycle. If the other_cycle's vitals file
670  is the same as the one from self.cycle, then the file is
671  only read once.
672 
673  @return an hwrf.revital.Revital with the vitals data"""
674  ENV=os.environ
675  logger=self.log()
676  inputs=list()
677  if vitdir is None:
678  vitdir=self.getdir('syndat')
679  if vitpattern is None:
680  vitpattern=self.getstr('config','vitpattern',
681  'syndat_tcvitals.%Y')
682  logger.info('VITDIR: %s' %(vitdir))
683  file1=os.path.join(vitdir,self._cycle.strftime(vitpattern))
684  inputs.append(file1)
685  if other_cycle is not None:
686  other_cycle=to_datetime_rel(other_cycle,self._cycle)
687  file2=os.path.join(vitdir,other_cycle.strftime(vitpattern))
688  if file2!=file1:
689  inputs.append(file2)
690 
691  if include_messages:
692  # Try to guess the location of the message files:
693  mdir=self.getdir('mesagdir','')
694  if mdir is None or mdir=='':
695  if 'mesagdir' in ENV:
696  mdir=ENV['mesagdir']
697  elif 'envir' in ENV:
698  mdir='/com/hur/'+ENV['envir']+'/inpdata'
699  else:
700  mdir='/com/hur/prod/inpdata'
701 
702  # Add the messages to the input files:
703  nstorms_filename=os.path.join(mdir,'nstorms')
704  nstorms=7
705  try:
706  with open(nstorms_filename,'rt') as nstorms_file:
707  dat=nstorms_file.readline()
708  nstorms=int(dat)
709  except (EnvironmentError,ValueError,TypeError) as e:
710  logger.error('%s: error reading: %s. Will read all storms.'%(
711  nstorms_filename,str(e)),exc_info=True)
712  for imessage in xrange(nstorms):
713  file=os.path.join(mdir,'message%d'%(imessage+1,))
714  if os.path.exists(file):
715  inputs.append(file)
716 
717  self.log().info('read vitals from: '+','.join(inputs))
718  revital=hwrf.revital.Revital(logger=logger)
719  revital.readfiles(inputs,raise_all=False)
720  return revital
721 
722  def set_storm(self,syndat,oldsyndat):
723  """!Sets the storm that is to be run.
724 
725  Sets the syndat and oldsyndat member variables, and several
726  related options in the [config] section, to the storm in the
727  provided tcvitals or message file data.
728 
729  * config.STID --- The three character storm id (ie.: 12L) of
730  the storm to run.
731  * config.stnum --- the numeric part of config.STID
732  * config.basin1 --- the basin part of STID (ie.: the L in 12L)
733  * config.basin1lc --- the lower-case version of config.basin1
734 
735  @param syndat the hwrf.storminfo.StormInfo for this cycle's vitals
736  @param oldsyndat the hwrf.storminfo.StormInfo for the prior cycle"""
737  assert(isinstance(syndat,hwrf.storminfo.StormInfo))
738  if oldsyndat is not None:
739  assert(isinstance(oldsyndat,hwrf.storminfo.StormInfo))
740  self.set_options('config',STID=syndat.stormid3,stnum=syndat.stnum,
741  basin1=syndat.basin1,basin1lc=syndat.basin1lc)
742  self.__dict__['syndat']=syndat.copy()
743  if oldsyndat is not None:
744  self.__dict__['oldsyndat']=oldsyndat.copy()
745 
746  # Multitorm - jtf
747  def set_storm_multistorm(self,multistorm_real_sids,syndat4multistorm,oldsyndat4multistorm):
748  """This is meant to be an internal implementation function and
749  should not be called directly. This is meant to only be used internally
750  by the fakestorm of a multistorm run.
751 
752  Adds the syndat_multstorm and oldsyndat_multistorm member
753  variables for the fake storm. They contain the StormInfo objects
754  for all the storm in a multistorm run from the provided tcvitals
755  or message file data.
756 
757  It is ultimately used for access to each storm's lat/lon
758  information in a multistorm run. This is needed for the
759  swcorner calculation for all the "stormNouter" storms.
760  """
761  # TODO: Rethink, Is this necessary, why are we doing these .copy() <jtf>
762  # Just add them to the dictionary ?
763  # Didn't have time to consider this, I just mirrored the behavior
764  # of def set_storm, and treated the logic as a black box.
765  # Not sure why we are creating a copy of the StormInfo object, just to
766  # assign it to a dictionary key.
767  syndat_fromcopy = []
768  oldsyndat_fromcopy = []
769  # TODO: Better, rather then a list make it a dictionary with sid as the key <jtf>
770  # TODO: CRITICAL, consider is syndat and old syndat always in sync 1:1, make it a dictionary <jtf>
771  # TODO: CRITICAL, think thru the case where oldsyndat is None <jtf>
772  for index in range(len(multistorm_real_sids)):
773  assert(isinstance(syndat4multistorm[index],hwrf.storminfo.StormInfo))
774  if oldsyndat4multistorm[index] is not None:
775  assert(isinstance(oldsyndat4multistorm[index],hwrf.storminfo.StormInfo))
776  syndat_fromcopy.append(syndat4multistorm[index].copy())
777 
778  if oldsyndat4multistorm[index] is not None:
779  oldsyndat_fromcopy.append(oldsyndat4multistorm[index].copy())
780  else:
781  oldsyndat_fromcopy.append(None)
782 
783  self.__dict__['syndat_multistorm']=syndat_fromcopy
784  self.__dict__['oldsyndat_multistorm']=oldsyndat_fromcopy
785 
786  def tcautoseed(self,loud=True):
787  """!Sets the random seed for ensemble perturbations.
788 
789  Automatically decides a random seed for the tcvitals
790  perturbation, based on the storm number, basin and cycle. The
791  number and basin used are before the invest renumbering
792  (self.syndat.old()).
793 
794  @param loud If loud=True (the default), then a message is sent
795  to the jlogfile via postmsg with the seed, and information
796  about the calculation that went into it."""
797  si=self.syndat.old() # storminfo before renumbering
798  icycle=int(self.cycle.strftime('%Y%m%d%H'))
799  istnum=int(si.stnum)
800  cbasin=str(si.basin1).upper()
801  ibasin=ord(cbasin)
802  seed=icycle ^ istnum ^ ibasin # ^ is bitwise exclusive or (XOR)
803  if loud:
805  'Automatic perturbation seed calculation: '
806  '%d %d%s => seed = %d^%d^ord("%s") = %d^%d^%d = %d'%(
807  icycle,istnum,cbasin,
808  icycle,istnum,cbasin,
809  icycle,istnum,ibasin,
810  seed))
811  return seed
812 
813  def gen_vitals(self,STID,cycling_interval,revital,storm_num=None):
814  """!Generate tcvitals files
815 
816  Given an hwrf.revital.Revital object, preferably from
817  read_precleaned_vitfile or read_tcvitals_and_messages,
818  searches for the specified storm's vitals. Creates the files
819  that are expected to exist in the WORKhwrf directory. The
820  filenames are based off of the vitbase variable, but with
821  various suffixes appended. This function should only be
822  called once per workflow, per storm.
823 
824  @param STID the three character stormid (12L)
825  @param cycling_interval seconds between HWRF cycles (6*3600)
826  @param revital The hwrf.revital.Revital with tcvitals data
827  @param storm_num The storm index 1-10"""
828  logger=self.log()
829  stnum=int(STID[0:2],10)
830  STID=STID.upper()
831  strcycle=self._cycle.strftime('%Y%m%d%H')
832  syndat=None
833  oldsyndat=None
834 
835  if cycling_interval<0:
836  cycling_interval=-cycling_interval
837  cycling_interval=cycling_interval/3600.0
838  prior=hwrf.numerics.to_datetime_rel(-cycling_interval*3600.,self._cycle)
839  strprior=prior.strftime('%Y%m%d%H')
840  logger.info('gen_vitals: cycle=%s interval=%s prior=%s STID=%s'%(
841  repr(self.cycle),repr(cycling_interval),repr(prior),
842  repr(STID)))
843 
844  def keep_condition(vit):
845  return vit.stormid3.upper()==STID or \
846  ( 'old_stormid3' in vit.__dict__ and
847  vit.old_stormid3.upper()==STID )
848 
849  if stnum>=50:
850  logger.info('%s: Not renumbering invests because %d>=50.'
851  %(STID,stnum))
852  unrenumbered=revital.copy()
853  unrenumbered.discard_except(keep_condition)
854  unrenumbered.clean_up_vitals()
855  renumbered=unrenumbered
856  else:
857  logger.info('%s: Renumber and unrenumber invests.'%(STID,))
858  unrenumbered=revital.copy()
859  unrenumbered.renumber(unrenumber=True)
860  unrenumbered.discard_except(keep_condition)
861  unrenumbered.clean_up_vitals()
862  renumbered=unrenumbered.copy()
863  renumbered.swap_numbers()
864  renumbered.clean_up_vitals()
865  unrenumbered.mirror_renumbered_vitals()
866  unrenumbered.clean_up_vitals()
867 
868  # Find the current cycle's vitals:
869  for vit in renumbered.each(STID):
870  if vit.when==self._cycle:
871  syndat=vit
872 
873  if syndat is None:
875  'Error: cannot find %s cycle %s'%(STID,strcycle))
876  logger.info('syndat='+syndat.as_tcvitals())
877  self.set_storm(syndat,None)
878 
879  # Perturb the current cycle's vitals if requested.
880  ens=self.getint('config','ENS',99)
881  ensize=self.getint('ensemble','ensize',20)
882  if ens>0 and ens<99 and ens<=ensize:
883  seedmethod=self.getstr('ensemble','tcvitals_seed')
884  if seedmethod=='auto':
885  seed=self.tcautoseed()
886  else:
887  seed=self.getint('ensemble','tcvitals_seed')
888  vmax_pert=self.getint('ensemble','vmax_pert')
889  if vmax_pert>0:
890  rand=Random()
891  rand.seed(seed)
892  vperts=hwrf.numerics.randint_zeromean(ensize,vmax_pert,rand)
893  vpert=vperts[ens-1]
894  logger.info('ENS perturbations: %s sum %s'%(
895  ( ', '.join([repr(s) for s in vperts]) ),
896  repr(sum(vperts)) ))
898  'ENS %d (of %d) wind perturbation %d m/s'
899  %(ens,ensize,vpert))
900  syndat.wmax+=vpert
901  else:
903  'ENS %d (of %d) wind perturbation disabled'%(ens,ensize))
904  else:
906  'ENS %d (of %d) is not a perturbed ensemble member; '
907  'not perturbing wind.'%(ens,ensize))
908 
909  # Find the prior cycle's vitals. First pass: look for a cycle
910  # whose data that actually exists on disk.
911  nodatasyndat=None
912  for vit in unrenumbered.each(STID,old=True):
913  if vit.when!=prior: continue # wrong cycle
914  if oldsyndat is not None and oldsyndat.stnum<50:
915  logger.info('%s %s: not checking these vitals for data on '
916  'disk since I found a non-invest number %s '
917  'already with data on disk'
918  %(str(vit.stormid3),str(vit.YMDH),
919  str(oldsyndat.stormid3)))
920  else:
921  checkfile=self.timestrinterp(
922  'config','{HISTCHECK}',atime=prior,ftime=prior,
923  oldvit=vit.__dict__,vit=syndat.__dict__)
924  if os.path.exists(checkfile):
925  logger.info('%s: exists'%(checkfile,))
926  logger.info('%s %s: prior is %s %s and has data on disk'%
927  (STID,strcycle,vit.stormid3,strprior))
928  oldsyndat=vit
929  else:
930  logger.info('%s: does not exist'%(checkfile,))
931  logger.info('%s %s: prior could be %s %s but there is '
932  'no data on disk'%
933  (STID,strcycle,vit.stormid3,strprior))
934  if oldsyndat is None:
935  if nodatasyndat is not None and nodatasyndat.stnum<50:
936  logger.info('%s %s: not using as backup since I found a '
937  'non-invest number %s already'
938  %(str(vit.stormid3),str(vit.YMDH),
939  str(nodatasyndat.stormid3)))
940  else:
941  nodatasyndat=vit
942 
943  self.set('config','expect_cold_start','no')
944  if oldsyndat is None:
945  logger.info('%s %s: no storm IDs for prior cycle have data '
946  'on disk.'%(STID,strcycle))
947  if nodatasyndat is not None:
948  oldsyndat=nodatasyndat
949  logger.info('%s %s: will use %s %s as prior cycle storm.'
950  %(STID,strcycle,oldsyndat.stormid3,strprior))
951  logger.info('prior vitals: '+oldsyndat.as_tcvitals())
952  else:
953  logger.warning('No prior syndat available. This is a cold '
954  'start. I will extrapolate vitals.')
955  oldsyndat=syndat-cycling_interval # extrapolate vitals
956  logger.warning('extrapolated vitals: %s'
957  %(oldsyndat.as_tcvitals()))
958  self.set('config','expect_cold_start','yes')
959  else:
960  logger.info('%s %s prior cycle on disk for %s %s'
961  %(STID,strcycle,oldsyndat.stormid3,strprior))
962  logger.info('prior cycle on disk: '+oldsyndat.as_tcvitals())
963 
964  self.set_storm(syndat,oldsyndat)
965  vitbase=self.choose_vitbase(storm_num)
966 
967  vitbasedir=os.path.dirname(vitbase)
968  produtil.fileop.makedirs(vitbasedir,logger=logger)
969 
970  logger.info('Reformat vitals...')
971  filename=vitbase+'.allids'
972  logger.info(
973  filename+': write unrenumbered vitals with all storm IDs')
974  with open(filename,'wt') as vitalsout:
975  for vit in unrenumbered.each(stormid=STID,old=True):
976  print>>vitalsout, vit.as_tcvitals()
977  filename=vitbase+'.renumberlog'
978  logger.info(filename+': write renumberlog with my storm ID')
979  logger.info(vitbase+': write renumbered vitals')
980  with open(filename,'wt') as renumberlog:
981  with open(vitbase,'wt') as vitalsout:
982  renumbered.print_vitals(vitalsout,renumberlog=renumberlog,
983  stormid=STID,format='tcvitals')
984  filename=vitbase+'.oldid'
985  logger.info(filename+': write vitals with original ID')
986  with open(filename,'wt') as vitalsout:
987  for vit in renumbered.each(stormid=STID):
988  print>>vitalsout, vit.old().as_tcvitals()
989 
990  filename=os.path.join(self.getdir('WORKhwrf'),'tmpvit')
991  logger.info(filename+': write current cycle vitals here')
992  with open(filename,'wt') as tmpvit:
993  print>>tmpvit, self.syndat.as_tcvitals()
994 
995  filename=os.path.join(self.getdir('WORKhwrf'),'oldvit')
996  logger.info(filename+': write prior cycle vitals here')
997  with open(filename,'wt') as tmpvit:
998  print>>tmpvit, self.oldsyndat.as_tcvitals()
999 
1000  def sanity_check_ensemble(self,enset,logger=None):
1001  """!Runs a sanity check on the ensemble configurations.
1002 
1003  Checks that:
1004 
1005  1. If the GEFS-based forecast ensemble is in use, a valid
1006  ensemble ID is chosen.
1007  2. If a valid ensemble ID is chosen, the GEFS-based forecast
1008  ensemble is in use.
1009  3. The user does not enable both the GEFS-based forecast
1010  ensemble and the GFS-based DA ensemble.
1011  4. If the GFS-based DA ensemble is in use, at least thirty
1012  members are chosen, and no more than eighty.
1013  @param enset a set of ensemble ids
1014  @param logger a logging.Logger for log messages"""
1015 
1016  has_gefs_members=False
1017  has_deterministic=False
1018  has_invalid=False
1019  for ens in enset:
1020  iens=int(ens,10)
1021  if iens>=0 and iens<=20:
1022  has_gefs_members=True
1023  elif iens==99:
1024  has_deterministic=True
1025  else:
1026  raise HWRFConfigInsane(
1027  "Invalid ensemble ID %s: must be 00-20 or 99"
1028  %(repr(ens),))
1029 
1030  if has_deterministic and has_gefs_members:
1031  raise HWRFConfigInsane(
1032  "You cannot run the GFS-based deterministic HWRF (ENS=99) "
1033  "and GEFS-based hwrf (ENS=00 through 20) in the same "
1034  "workflow.")
1035 
1036  is_fcst_ens=self.getbool('config','is_forecast_ensemble',False)
1037  fcst_ens=has_gefs_members
1038  da_ens=self.getbool('config','run_ensemble_da')
1039 
1040  if (fcst_ens or is_fcst_ens) and da_ens:
1041  raise HWRFConfigInsane(
1042  """
1043 You cannot run both the GFS-based DA ensemble (ENS=99
1044 run_ensemble_da=yes) and GEFS-based forecast ensemble (ENS=00 through
1045 20, run_ensemble_da=no). Turn one of them off.
1046 
1047 To run the GEFS-based HWRF ensemble with no data assimilation, you
1048 must set the ensemble ID to one or more numbers from 00-20 and specify
1049 the hwrf_ensemble_$YYYY override file:
1050 
1051  ./run_hwrf.py 01-20 2015 03W FORECAST ../parm/hwrf_ensemble_2014.conf
1052 
1053 To run the deterministic HWRF with ensemble covariances from six hour
1054 forecasts of HWRF off of the GFS ENKF, do this:
1055 
1056  ./run_hwrf.py 2015 03W FORECAST config.run_ensemble_da=yes
1057 
1058 You cannot do both.""")
1059 
1060  if is_fcst_ens!=fcst_ens:
1061  raise HWRFConfigInsane(
1062  """
1063 When running the GEFS-based HWRF ensemble, you must set the ensemble
1064 ID to one or more numbers from 00-20 and specify the
1065 hwrf_ensemble_$YYYY override file:
1066 
1067  ./run_hwrf.py 01-20 2015 03W FORECAST ../parm/hwrf_ensemble_2014.conf
1068 
1069 To run the deterministic HWRF, do neither:
1070 
1071  ./run_hwrf.py 2015 03W FORECAST
1072 """)
1073 
1074  if da_ens:
1075  ensda_size=self.getint('hwrf_da_ens','ensda_size',0)
1076  if(ensda_size<30):
1077  raise HWRFConfigInsane(
1078  "You must use at least 30 members when running the GFS "
1079  "ENKF based HWRF DA ensemble. You only requested %d."
1080  %ensda_size)
1081  if(ensda_size>80):
1082  raise HWRFConfigInsane(
1083  "You cannot use more than 80 members when running the GFS"
1084  " ENKF based HWRF DA ensemble. You requested %d."
1085  %ensda_size)
1086 
1087  def sanity_check_archive(self,logger=None):
1088  """!Runs a sanity check on the archiving settings.
1089  @param logger a logging.Logger for log messages"""
1090  if not self.getbool('sanity','check_archive',True): return
1091  archive=self.getloc('archive','NONE')
1092  if archive.lower()=='none':
1093  if logger is not None:
1094  logger.info('Archiving is disabled: archive=none')
1095  return
1096 
1097  adir=os.path.dirname(archive[5:])
1098  missing=False
1099  if archive[0:5]=='hpss:' or archive[0:5]=='hpsz:':
1100  logger.info('Cannot hsi -P ls / so skipping archive check.')
1101  elif archive[0:5]=='disk:':
1102  if os.path.exists(adir):
1103  if os.path.isdir(adir):
1104  logger.info('%s: disk archive directory exists and is a '
1105  'directory.'%(adir,))
1106  else:
1107  msg='%s: disk archive directory is not a '\
1108  'directory '%(adir,)
1109  logger.warning(msg)
1110  raise HWRFArchiveInsane(msg)
1111  else:
1112  logger.info('%s: disk archive directory does not exist'
1113  %(adir,))
1114  missing=True
1115  else:
1116  msg='%s: Invalid archive method %s'%(archive,archive[0:4])
1117  logger.error(msg)
1118  raise HWRFArchiveInsane(msg)
1119  if missing:
1120  if not self.getbool('archive','mkdir',False):
1121  msg='%s: archive directory is missing and [archive] mkdir '\
1122  'is disabled. Archive job would fail. Set [config] '\
1123  'archive=none to disable archiving OR set [archive] '\
1124  'mkdir=yes to make archive directory or disable the '\
1125  'archive sanity check with [sanity] check_archive=no'\
1126  %(archive,)
1127  logger.warning(msg)
1128  raise HWRFArchiveInsane(msg)
1129 
1130  def sanity_check_config_files(self,logger=None):
1131  """!Runs sanity checks related to config files.
1132 
1133  Sanity checks the provided *.conf files. For example, some
1134  config files are incompatible with others, and some must be
1135  loaded in a specific order.
1136  @param logger the logging.Logger for log messages"""
1137 
1138  if self.getbool('prelaunch','hwrf_43lev_conf',False) and \
1139  self.getbool('prelaunch','hwrf_3km_conf',False) and \
1140  self.getstr('prelaunch','last_of_43lev_3km','OOO')=='43lev':
1141  msg="When using 43lev and 3km configurations together, you "\
1142  "must load hwrf_43lev.conf BEFORE hwrf_3km.conf. "\
1143  "Otherwise, the model will use the wrong timestep."
1144  if logger is not None: logger.error(msg)
1146 
1147  def sanity_check_coupling(self,logger=None):
1148  """!Runs sanity checks related to coupling. Should be runnable
1149  with or without a specified cycle.
1150 
1151  @param logger A logging.Logger for log messages"""
1152  msg=None
1153  run_ocean=self.getbool('config','run_ocean',True)
1154  run_wave=self.getbool('config','run_wave',False)
1155  atmos=self.getstr('config','atmos_model','unspecified')
1156  ocean=self.getstr('config','ocean_model','unspecified')
1157  wave=self.getstr('config','wave_model','unspecified')
1158 
1159  if atmos!='WRF':
1160  msg='The atmos_model must be WRF not '+repr(atmos)
1161  logger.error(msg)
1162  if run_ocean and ocean!='HYCOM' and ocean!='POM':
1163  msg='The ocean_model must be POM or HYCOM not '+repr(ocean)
1164  logger.error(msg)
1165  if run_wave and wave!='WW3':
1166  msg='The wave_model must be WW3 not '+repr(wave)
1167  logger.error(msg)
1168 
1169  if run_ocean or run_wave:
1170  dtstr=self.getstr('wrf','dt')
1171  ntrack=self.getint('namelist_outer','physics.ntrack')
1172  nphs=self.getint('namelist_outer','physics.nphs')
1173  dt=hwrf.numerics.to_fraction(dtstr)
1174  dtc_atmos=ntrack*nphs*dt/3 # should be a Fraction
1175  dtc_cpl=self.getint('wrfexe','dt_c')
1176  if dtc_atmos!=dtc_cpl:
1177  msg='Coupler timestep %s is not equal to atmospheric '\
1178  'coupling timestep %s. (ATM dtc = ntrack(namelist_'\
1179  'outer)*nphs(namelist_outer)*dt(wrf)/3 = %s*%s*%s/3 = '\
1180  '%s != %s coupler dtc)'
1181  msg=msg%(str(dtc_cpl),str(dtc_atmos),str(ntrack),str(nphs),
1182  str(dt),str(dtc_atmos),str(dtc_cpl))
1183  logger.error(msg)
1184  if msg is not None:
1185  msg='Coupling configuration is incorrect. See earlier '\
1186  'error messages for details.'
1187  logger.error(msg)
1188  raise HWRFConfigInsane('Coupling configuration is incorrect.')
1189 
1190  def timeless_sanity_check(self,enset=None,logger=None):
1191  """!Runs all sanity checks that are not dependent on the cycle.
1192 
1193  Runs any sanity checks that are possible without knowing
1194  the cycle that is to be run. This is intended to be used by
1195  the workflow automation system (rocoto, ecflow, etc.) to make
1196  sure everything is functional before starting any jobs.
1197  @param enset a set of ensemble ids
1198  @param logger the logging.Logger for log messages"""
1199 
1200  for dirvar in ( 'HOMEhwrf', 'EXEChwrf', 'EXhwrf', 'USHhwrf',
1201  'FIXhwrf', 'PARMhwrf', 'utilexec' ):
1202  logger.debug('%s: check this dir variable'%(dirvar,))
1203  thedir=self.getdir(dirvar)
1204  self.sanity_check_directory(thedir,dirvar,False,logger)
1205 
1206  # Make sure the hwrf.launcher exists, and is the same as this
1207  # one.
1208  checkme=os.path.join(self.getdir('USHhwrf'),'hwrf','launcher.py')
1209  myfile=os.path.realpath(__file__)
1210  if myfile[-4:]=='.pyc': myfile=myfile[0:-1]
1211  if not produtil.fileop.isnonempty(checkme):
1212  raise HWRFScriptInsane(
1213  '%s: The ush/hwrf/launcher.py does not exist, which is '
1214  'impossible because it is running now. Check your paths '
1215  'and EXPT.'%(checkme,))
1216  if not os.path.samefile(checkme,myfile):
1217  raise HWRFScriptInsane(
1218  '%s: not the same as the launcher.py that is running now '
1219  '(%s) -- check your paths and EXPT.'%(checkme,myfile))
1220  self.sanity_check_forecast_length(logger)
1221  self.sanity_check_executables(logger)
1222  self.sanity_check_fix_files(logger)
1223  self.sanity_check_config_files(logger)
1224  self.sanity_check_coupling(logger)
1225  self.sanity_check_da(logger)
1226  if enset is not None:
1227  self.sanity_check_ensemble(enset,logger)
1228 
1229  def sanity_check_forecast_length(self,logger=None):
1230  """!Ensures the forecast length is valid.
1231  @param logger the logging.Logger for log messages"""
1232  iflen=self.getint('config','forecast_length',126)
1233  if iflen<12:
1234  raise HWRFConfigInsane("The forecast length must be at least "
1235  "12hrs (you specified %dhrs)"%iflen)
1236  if iflen%6 != 0:
1237  raise HWRFConfigInsane("The forecast length must divisible by "
1238  "6hrs (you specified %dhrs)"%iflen)
1239 
1240  def sanity_check_directory(self,thedir,dirvar,writable=True,logger=None):
1241  """!Runs a sanity check on the provided directory paths.
1242 
1243  Checks to make sure the specified directory exists and can be
1244  read and executed. If writable=True, also checks to see if it
1245  can be written. The dirvar is an explanation of what the
1246  directory relates to, for example HOMEhwrf.
1247  @param thedir a directory to check
1248  @param dirvar the variable that will be set to this directory (such as PARMhwrf, USHhwrf, etc.)
1249  @param writable Do we need to write to this directory?
1250  @param logger the logging.Logger for log messages"""
1251  if logger is None: logger=self.log('sanity.checker')
1252  logger.info('%s: check directory %s'%(dirvar,thedir))
1253  if not os.path.exists(thedir):
1254  raise HWRFDirInsane('%s: directory does not exist: %s'
1255  %(dirvar,thedir),thedir)
1256  if writable:
1257  if not os.access(thedir,os.W_OK):
1258  raise HWRFDirInsane('%s: cannot write directory: %s'
1259  %(dirvar,thedir),thedir)
1260  if not os.access(thedir,os.R_OK):
1261  raise HWRFDirInsane('%s: cannot read directory: %s'
1262  %(dirvar,thedir),thedir)
1263  if not os.access(thedir,os.X_OK):
1264  raise HWRFDirInsane('%s: cannot execute directory: %s'
1265  %(dirvar,thedir),thedir)
1266 
1267  def sanity_check_sanity_check(self,logger=None):
1268  """!Checks to see if the sanity checks can be run. In essence,
1269  this is a sanity check of the sanity check routines.
1270  @param logger the logging.Logger for log messages"""
1271  if not self.has_section('sanity'):
1272  raise HWRFConfigInsane(
1273  'The [sanity] section is missing from the HWRF conf files.')
1274  # Checking the fix_version is a further check of the [sanity]
1275  # section:
1276  self.sanity_get_fix_version(logger)
1277 
1278  def sanity_get_fix_version(self,logger=None):
1279  """!Sanity checks the fix file version.
1280 
1281  Gets the expected fix file version from [sanity] fix_version.
1282  Raises HWRFConfigInsane if there is an error while getting it.
1283  @param logger the logging.Logger for log messages"""
1284 
1285  fix_version=self.getstr('sanity','fix_version','nope')
1286  if fix_version=='nope':
1287  raise HWRFConfigInsane(
1288  'The [sanity] section fix_version is not set.')
1289  try:
1290  fix_version=int(fix_version)
1291  except (ValueError, TypeError) as e:
1292  raise HWRFConfigInsane(
1293  'The [sanity] section fix_version is not a number.')
1294  if fix_version > 20991231:
1295  raise HWRFConfigInsane(
1296  'The [sanity] section fix_version has an implausible value '
1297  '%d (>20991231)'%fix_version)
1298  if fix_version < 20040131:
1299  raise HWRFConfigInsane(
1300  'The [sanity] section fix_version has an implausible value '
1301  '%d (<20140131)'%fix_version)
1302  return fix_version
1303 
1304  def sanity_check_fix_files(self,logger=None):
1305  """!Sanity checks the fix files.
1306 
1307  Checks to see if the fix files are available and match the
1308  expected fix file version.
1309  @param logger the logging.Logger for log messages"""
1310 
1311  if not self.getbool('sanity','check_fix',True):
1312  if logger is not None:
1313  logger.info(
1314  'Skipping fix file check: [sanity] check_fix=no')
1315  return
1316 
1317  fix_version=self.sanity_get_fix_version(logger)
1318  if logger is not None:
1319  logger.info('Want fix file version %d'%fix_version)
1320  datestamp=os.path.join(self.getdir('FIXhwrf'),'hwrf_fix_datestamp')
1321  logger.info('check fix version: '+datestamp)
1322  def complain(msg):
1323  if logger is not None: logger.error(msg)
1324  raise HWRFFixInsane(msg)
1325  try:
1326  with open(datestamp,'rt') as f:
1327  line=f.readline()
1328  line=line.rstrip()
1329  version=int(line)
1330  if version>20991231:
1331  complain('%s: The fix file version datestamp %d is '
1332  'implausible (>201991231)'%(datestamp,version))
1333  elif version<20040131:
1334  complain('%s: The fix file version datestamp %d is '
1335  'implausible (<20140131)'%(datestamp,version))
1336  elif version<fix_version:
1337  complain('%s: The fix file version is too old. Expected '
1338  '%d, got %d'%(datestamp,fix_version,version))
1339  elif version!=fix_version:
1340  msg=(
1341  '%s: The fix file version (%d) does not match '
1342  'the expected version (%d). It is a newer '
1343  'version, so I will try to run.'%(
1344  datestamp,version,fix_version))
1345  if logger is not None: logger.warning(msg)
1346  produtil.log.jlogger.warning(msg)
1347  else:
1348  logger.info('fix version %d matches'%version)
1349 
1350  except (KeyError,TypeError,EnvironmentError,ValueError) as e:
1351  complain('%s: fix files failed a sanity check: %s'%(
1352  datestamp,str(e)))
1353 
1354  def sanity_check_executables(self,logger=None):
1355  """!Sanity checks some of the executables.
1356 
1357  Checks to see if a few of the executables are available. This
1358  is not an exhaustive check: most executables are not checked.
1359  This check is just to see if the user forgot to install
1360  executables entirely.
1361  @param logger the logging.Logger for log messages"""
1362 
1363  if not self.getbool('sanity','check_exec',True):
1364  if logger is not None:
1365  logger.info(
1366  'Skipping executable check: [sanity] check_exec=no')
1367  return
1368 
1369  loc=None
1370  exe=None
1371 
1372  def complain(why):
1373  # msg="wrf: /path/to/wrf.exe: executable is empty"
1374  msg='%s: %s: %s'%(exe,loc,why)
1375  if logger is not None:
1376  if exe=='gsi' and os.environ.get('PARAFLAG','YES')!='NO':
1377  logger.critical(
1378  '''GSI EXECUTABLE IS MISSING:
1379 
1380 If you are not NCO, and you are on Jet, Zeus or WCOSS, the latest
1381 developmental version of the HWRF GSI, maintained by Mingjing Tong,
1382 can be found at these locations:
1383 
1384  WCOSS: /hwrf/save/emc.hurpara/EMCGSI/hwrf_gsi
1385  Zeus: /scratch1/portfolios/NCEPDEV/hwrf/save/hurpara/EMCGSI/hwrf_gsi
1386  Jet: /mnt/pan2/projects/hwrfv3/hurrun/EMCGSI/hwrf_gsi
1387 
1388 Just link or copy the src/global_gsi executable to exec/hwrf_gsi
1389 in your installation directory:
1390 
1391  ln -s /path/to/GSI_HWRF/src/global_gsi %s
1392 
1393 If you are on another machine, you will need to check out and build
1394 GSI from either the EMC or DTC repositories, then build and install
1395 it.
1396 
1397 Sincerely,
1398  / \\
1399  \\O\\ THE HWRF TEAM /O/
1400  / \\ '''%(loc,))
1401  else:
1402  logger.critical(msg)
1403  raise HWRFExecutableInsane(msg)
1404 
1405  checkme=[ 'wrf', 'gettrk', 'post', 'real_nmm', 'mpiserial' ,
1406  'hwrf_geogrid', 'tar', 'hwrf_nhc_products',
1407  'cnvgrib' ]
1408 
1409  run_gsi=self.getbool('config','run_gsi',True)
1410  run_ocean=self.getbool('config','run_ocean',True)
1411  run_relocation=self.getbool('config','run_relocation',True)
1412 
1413  run_wave=self.getbool('config','run_wave',False)
1414 
1415  if run_relocation: checkme.append('hwrf_wrf_split')
1416  if run_gsi: checkme.append('gsi')
1417  if run_ocean: checkme.append('hwrf_ocean_fcst')
1418  if run_wave: checkme.insert(0,'ww3_shel')
1419 
1420  for exe in checkme:
1421  loc=self.getexe(exe)
1422  if loc.find('/')<0:
1423  # No path, so we need to search $PATH
1424  path=produtil.fileop.find_exe(loc,raise_missing=False)
1425  if path is None:
1426  complain('cannot find in $PATH')
1427  loc=path
1428  if not os.path.exists(loc): complain('executable does not exist')
1429  if os.path.getsize(loc)<=0: complain('executable is empty')
1430  if not os.path.isfile(loc): complain('executable is not a file')
1431  if not os.access(loc,os.X_OK): complain('cannot execute')
1432 
1433  def sanity_check(self):
1434  """!Runs nearly all sanity checks.
1435 
1436  Runs simple sanity checks on the HWRF installation directory
1437  and configuration to make sure everything looks okay. May
1438  throw a wide variety of exceptions if sanity checks fail."""
1439  logger=self.log('sanity.checker')
1440  for dirvar in ( 'WORKhwrf', 'com' ):
1441  logger.info('%s: check this dir variable'%(dirvar,))
1442  thedir=self.getdir(dirvar)
1443  self.sanity_check_directory(thedir,dirvar,True,logger)
1444 
1445  enset=set()
1446  enset.add(self.get('config','ENS','99'))
1447 
1448  self.timeless_sanity_check(enset,logger)
1449 
1450  CONFhwrf=self.getdir('CONFhwrf')
1451  logger.info('Try to load configuration file %s'%(CONFhwrf,))
1452  redo=load(CONFhwrf)
1453 
1454  logger.info('Compare new and old vitals')
1455  if 'syndat' in self.__dict__ and self.syndat.stormid3 != \
1456  redo.syndat.stormid3:
1457  raise HWRFStormInsane(
1458  "New directory has the wrong stormid: correct=%s conf=%s"
1459  %(self.syndat.stormid3,redo.syndat.stormid3))
1460  if self.cycle!=redo.cycle:
1461  raise HWRFCycleInsane(
1462  'New directory has the wrong cycle: correct=%s conf=%s'
1463  %(self.cycle.strftime('%Y%m%d%H'),
1464  redo.cycle.strftime('%Y%m%d%H')))
1465 
1466  case_root=redo.getstr('config','case_root').upper()
1467  input_catalog=redo.getstr('config','input_catalog')
1468  logger.info('Case root is %s and input catalog is %s'
1469  %(repr(case_root),repr(input_catalog)))
1470 
1471  if case_root=='HISTORY':
1472  if not self.getbool('sanity','check_input',True):
1473  logger.info(
1474  'Input check is disabled: [sanity] check_input=False. '
1475  'Skipping input checks.')
1476  elif self.get('config','input_catalog')=='hwrfdata':
1477  logger.info(
1478  '[config] input_catalog=hwrfdata -- skipping input '
1479  'check: will fetch input instead.')
1480  else:
1481  in_item=self.getstr('sanity','input_item','gfs')
1482  in_dataset=self.getstr('sanity','input_dataset','gfs_sfcanl')
1483  logger.info('Retrospective mode. Check for %s %s file.'%(
1484  in_item,in_dataset))
1485  ic=hwrf.input.DataCatalog(redo,input_catalog,redo.cycle)
1486  there=ic.locate(in_dataset,in_item,redo.cycle)
1487  if there is None:
1488  raise HWRFInputInsane('Could not locate %s %s file.'
1489  %(in_dataset,in_item))
1490  if not os.path.exists(there):
1491  raise HWRFInputInsane(
1492  '%s %s file does not exist: %s'%(
1493  in_dataset,in_item,there))
1494  if not isnonempty(there):
1495  raise HWRFInputInsane('%s %s file is empty: %s'%(
1496  in_dataset,in_item,there))
1497  elif case_root=='FORECAST':
1498  logger.info('Real-time mode. Will skip data checks.')
1499  else:
1500  raise HWRFVariableInsane(
1501  'config.case_root must be HISTORY or FORECAST not %s'
1502  %(repr(case_root),))
1503 
1504  self.sanity_check_archive(logger)
1505  self.sanity_check_expt(logger)
1506 
1507  def sanity_check_da(self,logger):
1508  """!Sanity checks the data assimilation.
1509  @param logger the logging.Logger for log messages"""
1510  run_gsi=self.getbool('config','run_gsi',False)
1511  run_ensda=self.getbool('config','run_ensemble_da',False)
1512  run_ensreloc=self.getbool('config','run_ens_relocation',False)
1513  if not run_gsi:
1514  if run_ensda:
1515  logger.warning('You cannot run ENSDA without GSI. Disabling ENSDA.')
1516  run_ensda=False
1517  if not run_ensda:
1518  if run_ensreloc:
1519  logger.warning('You cannot run ENSDA relocation without ENSDA. '
1520  'Disabling ENSDA relocation.')
1521  run_ensreloc=False
1522  self.set('config','run_gsi','yes' if run_gsi else 'no')
1523  self.set('config','run_ensemble_da','yes' if run_ensda else 'no')
1524  self.set('config','run_ens_relocation','yes' if run_ensreloc else 'no')
1525 
1526  def sanity_check_expt(self,logger):
1527  """!Sanity checks the hwrf_expt module.
1528 
1529  Loads the hwrf_expt module, runs its init_module routine, and
1530  then runts its sanity_check, passing the specified logger.
1531  @param logger the logging.Logger for log messages"""
1532 
1533  if not self.getbool('sanity','check_expt',True):
1534  if logger is not None:
1535  logger.info(
1536  'Skipping hwrf_expt check: [sanity] check_expt=no')
1537  return
1538 
1539  logger.info('Export [config] CONFhwrf to the environment '
1540  'variable $CONFhwrf.')
1541  os.environ['CONFhwrf'] = self.getstr('config','CONFhwrf')
1542  logger.info('Attempt to load hwrf_expt module.')
1543  import hwrf_expt
1544  logger.info('Attempt to initialize hwrf_expt module.')
1546  logger.info("Run the hwrf_expt module's own sanity_check routine.")
1547  hwrf_expt.sanity_check(logger)
1548 
1550  """!Tries to guess default values for many configuration settings.
1551 
1552  Tries to set default values for some mandatory conf options.
1553  The default values come from either other options or from
1554  environment variables. If no suitable default can be found,
1555  execution will continue, but later jobs may fail.
1556 
1557  Config options are as follows. If $VAR appears, that refers
1558  to ENV["VAR"]:
1559 
1560  * config.cycle --- the cycle to run as a ten digit date (2014091418)
1561  Taken from cycle if present, otherwise $YMDH
1562 
1563  * config.storm_num --- the storm number as a priority 1 to 5.
1564  Taken from $storm_num or uses the default of 1.
1565 
1566  * config.stormlabel --- "storm" with the storm number appended
1567  (ie.: storm5 if storm_num=5).
1568 
1569  * dir.HOMEhwrf --- set to HOMEhwrf or $HOMEhwrf
1570 
1571  * dir.WORKhwrf --- set to WORKhwrf or $WORKhwrf
1572 
1573  * dir.syndat --- tcvitals directory. Default: $COMINARCH
1574 
1575  * config.input_catalog --- input catalog (conf section) name.
1576  Default: $INPUT_CATALOG or "hwrfdata"
1577 
1578  * config.PARAFLAG --- NO if you are NCEP Central Operations
1579  (NCO), and YES otherwise. This is used to turn on or off
1580  DBNet alerts and other NCO-specific options.
1581 
1582  In addition, the following directories are added to the [dir]
1583  section:
1584 
1585  * USHhwrf --- the location of ush scripts and the parent
1586  directory of the hwrf, pom and produtil packages
1587  * FIXhwrf --- the location of the HWRF fix directory
1588  * JOBhwrf --- the location of the HWRF jobs directory. This is
1589  not needed by normal users.
1590  * EXhwrf --- the location of the HWRF scripts directory
1591  * PARMhwrf --- the location of the HWRF parm/ directory
1592  * EXEChwrf --- the location of the HWRF exec/ directory
1593  * utilexec --- the location of the HWRF nwport/exec or
1594  /nwprod/exec directory
1595 
1596  If set, these variables will be copied to the [config] section:
1597  * EXPT --- optional: the experiment identifier, which must be
1598  alphanumeric, and can contain underscores. Default: HWRF
1599  * SUBEXPT --- optional: the subexperiment identifier, which
1600  must be alphanumeric, and can contain underscores.
1601  Default: set to value of EXPT """
1602  ENV=os.environ
1603  logger=self.log()
1604  PARAFLAG=( ENV.get('PARAFLAG','YES')!='NO' )
1605 
1606  def set_default(section,option,default,env1=None,env2=None):
1607  if not self.has_option(section,option):
1608  if env1 is not None and env1 in ENV:
1609  self.set(section,option,ENV[env1])
1610  elif env2 is not None and env2 in ENV:
1611  self.set(section,option,ENV[env2])
1612  elif default is not None:
1613  self.set(section,option,str(default))
1614  else:
1615  logger.error(
1616  'Cannot find suitable default for [%s] option %s'%(
1617  section,option))
1618 
1619  set_default('config','case_root','HISTORY','CASE_ROOT')
1620  set_default('config','EXPT','HWRF','EXPT')
1621  set_default('config','SUBEXPT','{EXPT}','SUBEXPT')
1622  set_default('dir','HOMEhwrf',None,'HOMEhwrf')
1623  set_default('dir','WORKhwrf',None,'WORKhwrf','DATA')
1624  set_default('config','datastore','{WORKhwrf}/hwrf_state.sqlite3')
1625  set_default('config','storm_num','1','storm_num')
1626  set_default('config','stormlabel','storm{storm_num}')
1627  set_default('config','input_catalog','hwrfdata','INPUT_CATALOG')
1628  set_default('dir','syndat',None,'COMINARCH')
1629  set_default('dir','com',None,'COMOUT')
1630  set_default('config','PARAFLAG','YES','PARAFLAG')
1631 
1632  if not self.has_option('config','cycle'):
1633  if 'YMDH' in ENV:
1634  self.cycle=ENV['YMDH']
1635  #cycle=self.cycle
1636 
1637  if 'NWPROD' in ENV:
1638  NWPROD='NWPROD',ENV['NWPROD']
1639  elif 'envir' in ENV and os.path.exists('/nw'+ENV['envir']):
1640  NWPROD='/nw'+ENV['envir']
1641  else:
1642  NWPROD='{HOMEhwrf}/nwport'
1643 
1644  def dirset(evar,deff,parent='{HOMEhwrf}'):
1645  if evar in ENV:
1646  self._conf.set('dir',evar,ENV[evar])
1647  elif not self._conf.has_option('dir',evar):
1648  self._conf.set('dir',evar,parent+'/'+deff.lower())
1649 
1650  dirset('FIXhwrf','fix')
1651  dirset('USHhwrf','ush')
1652  dirset('EXhwrf','scripts')
1653  dirset('EXEChwrf','exec')
1654  dirset('JOBhwrf','jobs')
1655  dirset('PARMhwrf','parm')
1656  dirset('utilexec','util/exec',NWPROD)
1657 
1658  def make_holdvars(self,part1='{PARMhwrf}/hwrf_holdvars.txt',part2=None):
1659  """!Creates the com/storm*.holdvars.txt file
1660 
1661  Creates the storm*.holdvars.txt file needed by the old
1662  ksh-based scripts. This is done for backward compatibility
1663  only. The two arguments (part1 and part2) are two files to
1664  pass through self.strinterp and then into the holdvars file.
1665  Part 1 is mandatory, but part2 is optional. It also fills in
1666  a few custom derived variables:
1667 
1668  * cap_run_gsi --- capitalized version of [config] section run_gsi
1669  * cap_run_relocation --- capitalized version of [config]
1670  section run_relocation
1671  * holdvars_model --- "COUPLED" if [config] section run_ocean is
1672  true, and "ATMOS" if it is false.
1673  @param part1 The first input file to read
1674  @param part2 The second input file to read or None to disable"""
1675  assert(isinstance(part1,basestring))
1676  out=list()
1677  logger=self.log()
1678  gsi_flag=self.getbool('config','run_gsi')
1679  self.set('holdvars','cap_run_gsi',('YES' if gsi_flag else 'NO'))
1680 
1681  reloc_flag=self.getbool('config','run_relocation')
1682  self.set('holdvars','cap_run_relocation',
1683  ('YES' if reloc_flag else 'NO'))
1684 
1685  ocean_flag=self.getbool('config','run_ocean')
1686  self.set('holdvars','holdvars_model',
1687  ('COUPLED' if ocean_flag else 'ATMOS'))
1688 
1689  # supports the new wrf executable, where:
1690  # nio_tasks_per_group may (or may not) be a list of values,
1691  # one for each domain ie. io_perggrp='4,4,2,4,2 ...'
1692  # TODO: TEMP storm1.holdvars.txt using first element of list.
1693  # NOTE: storm1.holdvars.txt currently can not handle if
1694  # io_pergrp is a list, so using the first element for now.
1695  io_pergrp_str=self.getstr('runwrf','nio_tasks_per_group','0')
1696  io_groups=self.getint('runwrf','nio_groups',0)
1697  io_pergrp_cs=io_pergrp_str.strip().strip(',').strip().split(',')
1698  io_pergrp_ss=io_pergrp_str.strip().strip(',').strip().split()
1699  if len(io_pergrp_cs) > 1:
1700  io_pergrp=int(io_pergrp_cs[0])
1701  elif len(io_pergrp_ss) > 1:
1702  io_pergrp=int(io_pergrp_ss[0])
1703  else:
1704  io_pergrp=self.getint('runwrf','nio_tasks_per_group',0)
1705 
1706  io_groups=self.getint('runwrf','nio_groups',0)
1707  io_servers = (io_pergrp*io_groups)>0
1708 
1709  self.set('holdvars','IO_SERVERS',
1710  ('YES' if io_servers else 'NO'))
1711  self.set('holdvars','IOSRV_PERGRP','%d'%io_pergrp)
1712  self.set('holdvars','IOSRV_GROUPS','%d'%io_groups)
1713 
1714  with open(self.strinterp('dir',part1),'rt') as f:
1715  for line in f:
1716  out.append(self.strinterp('holdvars',line.rstrip()))
1717  if part2 is not None:
1718  with open(self.strinterp(part2),'rt') as f:
1719  for line in f:
1720  out.append(self.strinterp(line.rstrip()))
1721  return '\n'.join(out) + '\n'
This module provides a set of utility functions to do filesystem operations.
Definition: fileop.py:1
def getexe
query the "exe" section
Definition: config.py:707
def load(filename)
Loads the HWRFLauncher created by the launch() function.
Definition: launcher.py:297
Raised when a directory is unspecified, missing or invalid.
Definition: exceptions.py:77
def set_storm(self, syndat, oldsyndat)
Sets the storm that is to be run.
Definition: launcher.py:722
def __init__
Creates a new HWRFLauncher.
Definition: launcher.py:561
def sanity_check_expt(self, logger)
Sanity checks the hwrf_expt module.
Definition: launcher.py:1526
def read_tcvitals_and_messages
Reads in the tcvitals file and message files.
Definition: launcher.py:644
def to_fraction
Converts an object or two to a fraction.
Definition: numerics.py:269
def to_datetime_rel(d, rel)
Converts objects to a datetime relative to another datetime.
Definition: numerics.py:319
Raised when configuration files were specified in the wrong order.
Definition: exceptions.py:94
def touch
Open the file for append and set mtime and atime.
Definition: fileop.py:164
def sanity_check_directory
Runs a sanity check on the provided directory paths.
Definition: launcher.py:1240
def init_module
Initializes the HWRF object structure.
Definition: hwrf_expt.py:384
def strinterp(self, sec, string, kwargs)
perform string expansion
Definition: config.py:807
def getfloat
get a float value
Definition: config.py:990
def parse_tcvitals
Reads data from a tcvitals file.
Definition: storminfo.py:302
Defines the Revital class which manipulates tcvitals files.
Definition: revital.py:1
def sanity_check(logger)
Runs a sanity check on this module's contents.
Definition: hwrf_expt.py:83
def sanity_check_ensemble
Runs a sanity check on the ensemble configurations.
Definition: launcher.py:1000
Defines StormInfo and related functions for interacting with vitals ATCF data.
Definition: storminfo.py:1
A shell-like syntax for running serial, MPI and OpenMP programs.
Definition: run.py:1
def postmsg(message)
Sends the message to the jlogfile logging stream at level INFO.
Definition: log.py:46
def sanity_check_archive
Runs a sanity check on the archiving settings.
Definition: launcher.py:1087
A replacement for the hwrf.config.HWRFConfig used throughout the HWRF system.
Definition: launcher.py:555
def log
returns a logging.Logger object
Definition: config.py:550
def get
get the value of an option from a section
Definition: config.py:1020
def sanity_check(self)
Runs nearly all sanity checks.
Definition: launcher.py:1433
def isnonempty(filename)
Returns True if the filename refers to an existent file that is non-empty, and False otherwise...
Definition: fileop.py:333
def getloc
search the config, exe and dir sections in that order
Definition: config.py:686
def getstr
get a string value
Definition: config.py:1005
Raised when the configuration had a different cycle than expected.
Definition: exceptions.py:98
def to_datetime(d)
Converts the argument to a datetime.
Definition: numerics.py:346
def parse_launch_args
Parsed arguments to scripts that launch the HWRF system.
Definition: launcher.py:170
a class that contains configuration information
Definition: config.py:396
def launch
Initializes the directory structure for a new HWRF workflow.
Definition: launcher.py:404
Obtains input data needed by various subclasses of hwrf.hwrftask.HWRFTask.
Definition: input.py:1
def makedirs
Make a directory tree, working around filesystem bugs.
Definition: fileop.py:224
Time manipulation and other numerical routines.
Definition: numerics.py:1
def guess_default_values(self)
Tries to guess default values for many configuration settings.
Definition: launcher.py:1549
def randint_zeromean
Generates "count" numbers uniformly distributed between -imax and imax, inclusive, with a mean of zero.
Definition: numerics.py:146
Raised when the configuration had a different storm than expected.
Definition: exceptions.py:96
def set(self, section, key, value)
set a config option
Definition: config.py:518
def sanity_check_fix_files
Sanity checks the fix files.
Definition: launcher.py:1304
def getint
get an integer value
Definition: config.py:975
def sanity_check_da(self, logger)
Sanity checks the data assimilation.
Definition: launcher.py:1507
parses UNIX conf files and makes the result readily available
Definition: config.py:1
def storm_for_stormnum(self)
Not implemented.
Definition: launcher.py:569
def decide_domain_center
Decide the outermost domain's center.
Definition: launcher.py:578
def tcautoseed
Sets the random seed for ensemble perturbations.
Definition: launcher.py:786
Configures logging.
Definition: log.py:1
def timeless_sanity_check
Runs all sanity checks that are not dependent on the cycle.
Definition: launcher.py:1190
def sanity_check_coupling
Runs sanity checks related to coupling.
Definition: launcher.py:1147
def sanity_check_executables
Sanity checks some of the executables.
Definition: launcher.py:1354
def getbool
get a bool value
Definition: config.py:1070
Provides the location of a file in an archive, on disk or on a remote server via sftp or ftp...
Definition: input.py:109
Exceptions raised by the hwrf package.
Definition: exceptions.py:1
def sanity_check_config_files
Runs sanity checks related to config files.
Definition: launcher.py:1130
This should be raised when the user requests a specific storm or cycle of a storm and no such vitals ...
Definition: storminfo.py:99
def has_section(self, sec)
does this section exist?
Definition: config.py:653
def make_holdvars
Creates the com/storm*.holdvars.txt file.
Definition: launcher.py:1658
def getdir
query the "dir" section
Definition: config.py:672
def has_option(self, sec, opt)
is this option set?
Definition: config.py:662
def sanity_get_fix_version
Sanity checks the fix file version.
Definition: launcher.py:1278
def multistorm_parse_args
Definition: launcher.py:34
def set_storm_multistorm(self, multistorm_real_sids, syndat4multistorm, oldsyndat4multistorm)
Definition: launcher.py:747
def find_exe
Searches the $PATH or a specified iterable of directory names to find an executable file with the giv...
Definition: fileop.py:573
def set_options(self, section, kwargs)
set values of several options in a section
Definition: config.py:478
def sanity_check_sanity_check
Checks to see if the sanity checks can be run.
Definition: launcher.py:1267
def sanity_check_forecast_length
Ensures the forecast length is valid.
Definition: launcher.py:1229
def choose_vitbase
Decides the location of the vitals file.
Definition: launcher.py:598
cycle
the analysis cycle, a datetime.datetime object
Definition: config.py:607
Storm vitals information from ATCF, B-deck, tcvitals or message files.
Definition: storminfo.py:411
def gen_vitals
Generate tcvitals files.
Definition: launcher.py:813
This class reads one or more tcvitals files and rewrites them as requested.
Definition: revital.py:38
def timestrinterp(self, sec, string, ftime, atime=None, kwargs)
performs string expansion, including time variables
Definition: config.py:826