HWRF  trunk@4391
relocate.py
1 """!This module implements Python classes that run the 2014 HWRF Relocation.
2 
3 The HWRF relocation is divided into four parts:
4 
5 * Stage1 --- remove the prior cycle's vortex
6 
7 * Stage2 --- remove the parent vortex
8 
9 * Stage3 --- Relocate and paste the fields together
10 
11 * Merge --- merge the relocation output and GSI. This is only used when
12 GSI is enabled.
13 
14 In addition, due to the requirement of splitting the relocation into
15 three stages, there are "relocation info" files for passing
16 information between the three. The RelocationInfo object provides a
17 means to read and write these files."""
18 
19 ##@var __all__
20 # List of symbols to export via "from hwrf.relocate import *"
21 __all__=['Stage1', 'Stage2', 'Stage3', 'Merge', 'RelocationInfo',
22  'WARM', 'COLD']
23 
24 import os, shutil
25 import glob
26 import time
27 import math
28 import re
29 import itertools
30 import string
31 import urlparse
32 import datetime
33 import pdb
34 import subprocess
35 import logging
36 
37 import hwrf.namelist
38 import hwrf.exceptions
39 
40 import produtil.datastore
41 import produtil.run
42 import produtil.locking
43 import produtil.fileop
44 
45 from ConfigParser import ConfigParser, SafeConfigParser, RawConfigParser
46 from hwrf.hwrftask import HWRFTask
47 from produtil.run import checkrun, run,exe, bigexe, alias,runsync
48 from hwrf.numerics import partial_ordering, TimeArray, to_timedelta, \
49  within_dt_epsilon, to_datetime_rel, \
50  to_datetime, to_fraction
51 from produtil.datastore import FileProduct, COMPLETED, FAILED, RUNNING
52 from produtil.cd import TempDir, NamedDir
53 from produtil.fileop import deliver_file, make_symlink, isnonempty, \
54  remove_file
55 from hwrf.exceptions import RelocationError, RelocateOutputMissing, \
56  StormRadiusError
57 
58 ##@var WARM
59 # A constant used by the RelocationInfo to represent warm starts.
60 WARM=object()
61 
62 ##@var COLD
63 # A constant used by the RelocationInfo to represent cold starts.
64 COLD=object()
65 
66 class RelocationInfo(object):
67  """!Passes information about relocation status between relocation stages.
68 
69  This class is used to pass information between the relocate
70  stages. There are three public member variables that ARE meant to
71  be written depending on logic within the relocation Stage1, Stage2
72  and Stage3 classes. One then writes out the relocation status
73  information to an intermediate file at the end of each relocation
74  stage using self.write_info. It is read in by a later step of the
75  relocation, or by the merge, via RelocationInfo(filename,...)
76 
77  * iflag_cold = 1 or 0 used by several of the fortran relocation
78  programs
79 
80  * warm_cold_flag = the constants WARM, COLD or None, depending on
81  whether this is a warm start, a cold start, or an unknown state.
82 
83  * cold_ok = True only if the relocation intentionally vetoed a
84  warm start, such as for a weak storm
85 
86  * initopt = 0, full vortex initialization; 1, relocation only
87 
88  * ensda_relocate = True relcocation for ensemble member"""
89  def __init__(self,filename=None):
90  """!Creates a new RelocationInfo object by reading in the
91  specified *.info file."""
92  self.iflag_cold=0
93  self.warm_cold_flag=None
94  self.cold_ok=False
95  self.initopt=0
96  self.ensda_relocate=False
97  self.ensda_relocate_continue=False
98  if filename is not None:
99  self.read_info(filename)
100  self.from_file=filename
101  return
102 
103  ##@var from_file
104  # The file that was read in.
105 
106  ##@var initopt
107  # Initialization flag variable for the relocation.
108 
109  ##@var iflag_cold
110  # An int 0 or 1 used by several of the Fortran relocation programs
111  # to trigger based on warm or cold starts.
112 
113  ##@var warm_cold_flag
114  # The constants WARM, COLD or None to indicate warm vs. cold starts.
115  # * WARM --- this is a warm start (prior cycle vortex is in use)
116  # * COLD --- this is a cold start (no prior cycle vortex in use)
117  # * None --- it is not known whether this is a cold or warm start.
118 
119  ##@var cold_ok
120  # Set to True if the relocation intentionally vetoes warm starting.
121  # This is done, for example, if the storm is weak or shallow.
122  def __str__(self):
123  """!A Pythonic string representation of this object."""
124  return 'RelocationInfo(iflag_cold=%s,warm_cold_flag=%s,cold_ok=%s,'\
125  'initopt=%s,ensda_relocate=%s,ensda_relocate_continue=%s)'\
126  %(repr(self.iflag_cold),self.warm_cold_str(),repr(self.cold_ok),
127  repr(self.initopt),repr(self.ensda_relocate),
128  repr(self.ensda_relocate_continue))
129  def make_warm_cold(self,value,logger=None):
130  """!Returns the module-level constants COLD or WARM, or None,
131  for the specified string value.
132 
133  @param value A string that is "WARM", "COLD" or "NONE" and is
134  case-insensitive. If it has any other value, None is returned
135  and a message is logged at ERROR level (if a logger is given).
136  @param logger a logging.Logger for log messages."""
137  v=str(value).upper()
138  if v=='COLD': return COLD
139  elif v=='WARM': return WARM
140  elif v=='NONE': return None
141  else:
142  if logger is not None:
143  logger.error(
144  'Invalid value %s (string %s) for warm_cold_flag. '
145  'Assuming None' %(repr(value),repr(v)))
146  return None
147  def warm_cold_str(self):
148  """!This is the opposite of the make_warm_cold routine: it
149  returns "COLD", "WARM", or "None" for the constants COLD, WARM
150  or None based on self.warm_cold_flag"""
151  if self.warm_cold_flag is COLD: return 'COLD'
152  elif self.warm_cold_flag is WARM: return 'WARM'
153  else: return 'None'
154  def read_info(self,filename,logger=None):
155  """!Reads the relocation information into this object from the
156  specified filename. This is called automatically by the
157  constructor. Error messages are logged to the given logger,
158  if one is provided.
159  @param filename the name of the file to read
160  @param logger a logging.Logger for log messages"""
161  if not isinstance(filename,basestring):
162  raise TypeError(
163  'RelocationInfo.read_info expects a string for its '
164  'filename parameter. You provided a %s %s.'
165  %(type(filename).__name__,repr(filename)))
166  with open(filename,'rt') as f:
167  scp=SafeConfigParser(defaults={
168  'iflag_cold':'0', 'warm_cold_flag':'None',
169  'cold_ok':'False', 'initopt':'0',
170  'ensda_relocate':'False',
171  'ensda_relocate_continue':'True'})
172  scp.readfp(f)
173  self.iflag_cold=scp.getint('info','iflag_cold')
174  self.warm_cold_flag=self.make_warm_cold(
175  scp.get('info','warm_cold_flag','None'))
176  self.cold_ok=scp.getboolean('info','cold_ok')
177  self.initopt=scp.getint('info','initopt')
178  self.ensda_relocate=scp.getboolean('info','ensda_relocate')
179  self.ensda_relocate_continue=scp.getboolean('info','ensda_relocate_continue')
180 
181  def write_info(self,filename,logger=None):
182  """!Writes this object's relocation information to the
183  specified *.info file. Logs errors to the specified logger,
184  if one is present.
185  @param filename the file to write
186  @param logger a logging.Logger for log messages"""
187  if logger is not None:
188  logger.info('Generating in-memory ConfigParser object for '
189  'RelocationInfo')
190  filename=str(filename)
191  thedir=os.path.dirname(filename)
192  produtil.fileop.makedirs(thedir,logger=logger)
193  c=RawConfigParser()
194  c.add_section('info')
195  c.set('info','iflag_cold',str(self.iflag_cold))
196  c.set('info','warm_cold_flag',self.warm_cold_str())
197  c.set('info','cold_ok',str(self.cold_ok))
198  c.set('info','initopt',str(self.initopt))
199  c.set('info','ensda_relocate',str(self.ensda_relocate))
200  c.set('info','ensda_relocate_continue',str(self.ensda_relocate_continue))
201 
202  if logger is not None:
203  logger.info('Writing RelocationInfo to file: %s'%(filename,))
204  with open(filename,'wt') as f:
205  c.write(f)
206 
208  """!This is a HWRF task that forms the base class for all vortex
209  relocation tasks, including the Merge. It exists solely to reduce
210  code complexity."""
211  def __init__(self,dstore,conf,section,sim,domains,taskname=None,
212  modin='GDAS1',wrfanl=None,wrfghost=None,wrfinput=None,
213  parentTrack=None,trackName='track0',ghost_domains=None,
214  dest_dir=None,gsi_d02=None,gsi_d03=None,
215  gsi_d01=None,cycling_interval=-6*3600,info=None,
216  fgat_times=None,centrack=None,ensda=None,**kwargs):
217  """!RelocationTask constructor.
218 
219  @param dstore the produtil.datastore.Datastore for database storage
220  @param conf the hwrf.config.HWRFConfig for configuration info
221  @param section the configuration section to use
222  @param sim the hwrf.wrf.WRFSimulation describing the simulation being relocated
223  @param domains the hwrf.wrf.WRFDomains being relocated
224  @param taskname the taskname in the database
225  @param modin input model: "GFS" or "GDAS1".
226  @param wrfanl the wrfanl input source, which should have a get_wrfanl() function
227  that accepts an hwrf.wrf.WRFDomain and returns an hwrf.datastore.Product
228  @param wrfghost the ghost file input source, which should have a get_ghost() function
229  that accepts an hwrf.wrf.WRFDomain and returns an hwrf.datastore.Product
230  @param wrfinput the wrfinput datasource, which must have a get_wrfinput() function
231  that returns an hwrf.datastore.Product for the wrfinput file
232  @param parentTrack the parent track file input source, which must have a products()
233  function that takes the track product name and returns the produtil.datastore.Product
234  @param trackName the parent track name to pass to parentTrack.products()
235  @param ghost_domains the list of ghost domains for relocation input
236  @param dest_dir output directory for the relocation
237  @param gsi_d02 the hwrf.gsi.FGATGSI that will run the GSI for the intermediate domain.
238  @param gsi_d03 the hwrf.gsi.FGATGSI that will run the GSI for the innermost domain.
239  @param gsi_d01 the hwrf.gsi.GSIBase that will run GSI for the outermost domain.
240  @param cycling_interval negative number of seconds between cycles (-6*3600)
241  @param info the RelocationInfo object to use
242  @param fgat_times the list of FGAT times, datetime.datetime objects
243  @param centrack the product for the center FGAT time track
244  @param kwargs passed to hwrf.hwrftask.HWRFTask.__init__"""
245  assert(not isinstance(domains,basestring))
246  self.__rinfo=None
247  if taskname is None:
248  taskname=section
249  self.info = RelocationInfo() if (info is None) else info
250  if 'location' not in kwargs:
251  kwargs['location']=os.path.join(conf.getdir('intercom'),taskname)
252 
253  self.fgat_times=fgat_times
254 
255  cycling_interval=-cycling_interval
256  if cycling_interval>0:
257  cycling_interval=-cycling_interval
258 
259  self.cycling_interval=to_timedelta(cycling_interval)
260 
261  super(RelocationTask,self).__init__(dstore,conf,section,taskname,
262  **kwargs)
263  self._ensda=ensda
264 
265  logger=self.log()
266 
267  if fgat_times is not None:
268  parent_cycle=to_datetime_rel(cycling_interval,self.conf.cycle)
269  for t in fgat_times:
270  t=to_fraction(t-parent_cycle)
271  t=int(float(round(t)))
272  if t<0:
273  raise ValueError(
274  'In RelocationTask.__init__, all fgat_times must '
275  'be 0 or greater (times relative to the parent '
276  'model). You gave: %s.'%(repr(t),))
277 
278  self.sim = sim
279  logger.debug('domains: '+repr(domains))
280  self.domains=[sim[domain] for domain in domains]
281  self.dt_epsilon=to_timedelta(300) # five minutes
282  self._gsi_d01 = gsi_d01
283  self._gsi_d02 = gsi_d02
284  self._gsi_d03 = gsi_d03
285 
286  if wrfanl is not None:
287  self._wrfanl_d02 = wrfanl.get_wrfanl(self.domains[1])
288  self._wrfanl_d03 = wrfanl.get_wrfanl(self.domains[2])
289  else:
290  self._wrfanl_d02=None
291  self._wrfanl_d03=None
292 
293  if self._ensda is not None:
294  for domain in self.domains:
295  if domain.is_moad():
296  self._ensda_wrfinput_d01 = self._ensda.get_wrfinput(
297  domain,atime=self.conf.cycle)
298  else:
299  self._ensda_wrfinput_d02 = self._ensda.get_wrfanl(
300  domain,atime=self.conf.cycle)
301  assert(self._ensda_wrfinput_d02 is not None)
302  else:
303  self._ensda_wrfinput_d01=None
304  self._ensda_wrfinput_d02=None
305 
306  self._wrfanl = wrfanl
307 
308  if wrfinput is not None:
309  self._wrfinput = wrfinput.get_wrfinput()
310  else:
311  self._wrfinput=None
312 
313  if wrfghost is not None:
314  logger.debug('ghost domains: '+repr(ghost_domains))
315  logger.debug('ghost domains iterated: '
316  +repr([ d for d in wrfghost.wrf() ]))
317  self.ghost_domains = [wrfghost.wrf()[domain]
318  for domain in ghost_domains]
319  self._ghost_d02 = wrfghost.get_ghost(ghost_domains[1])
320  assert(self._ghost_d02 is not None)
321  self._ghost_d03 = wrfghost.get_ghost(ghost_domains[2])
322  assert(self._ghost_d03 is not None)
323  self._wrfghost = wrfghost
324  self._parentTrack = parentTrack
325  self._trackName = trackName
326  self._centrack = centrack
327 
328  if dest_dir is None:
329  logger.debug('self._wrfanl='+repr(self._wrfanl))
330  logger.debug('self._wrfanl.wrf()='+repr(self._wrfanl.wrf()))
331  logger.debug(
332  'self._wrfanl.wrf().simstart()='+
333  repr(self._wrfanl.wrf().simstart()))
334  logger.debug(
335  'self._wrfanl.wrf().simstart().strftime(%%Y%%m%%d%%H)='
336  +repr(self._wrfanl.wrf().simstart().strftime('%Y%m%d%H')))
337  dest_dir=os.path.join(
338  self.getdir('WORKhwrf'),modin.lower()+'.'+
339  self._wrfanl.wrf().simstart().strftime('%Y%m%d%H'))
340  self.dest_dir=dest_dir
341  logger.debug('self.dest_dir='+self.dest_dir)
342 
343  # fhr used as input arguments
344  dt = self.sim.simstart() - self.conf.cycle
345  self._fhr = 6 + dt.days*24 + dt.seconds //3600
346 
347  # Set the DA mode input
348  self._modin=modin
349 
350  # Figure out if we are a cold start or cycled run.
351  oldcom=self.getdir('oldcom')
352  oldsid=self.getdir('oldsid')
353  logger.info('oldcom directory: %s'%oldcom)
354  if os.path.isdir(oldcom):
355  self._warm = True
356  self._prev_cycle_dir = oldcom
357  self._prev_cycle_sid = oldsid
358  else:
359  self._warm = False
360  self._prev_cycle_dir = ''
361  self._prev_cycle_sid = ''
362 
363 
364  # Figure out the vortex origin model/status.
365  if self.modin == 'GFS':
366  self._vortex = 'GFS'
367  elif self.modin == 'ENKF':
368  self._vortex = 'ENKF'
369  else:
370  if self._gsi_d02 is not None or self._gsi_d03 is not None:
371  self._vortex = 'GDAS'
372  else:
373  self._vortex = 'HDAS'
374 
375  ##@var modin
376  # The input model: GFS or GDAS1
377 
378  def get_wrfinput(self,domain=None):
379  """!Returns the wrfinput output Product for the specified
380  domain, or None if no such domain is known.
381  @returns None
382  @param domain the domain of interest
383  @note This is a abstract function that should be replaced in subclasses."""
384  return None
385 
386  ##@var info
387  # A RelocationInfo object to trade relocation information with other stages of the relocation.
388 
389  ##@var fgat_times
390  # The list of FGAT hours.
391 
392  ##@var cycling_interval
393  # The positive datetime.timedelta time between cycles.
394 
395  ##@var sim
396  # The hwrf.wrf.WRFSimulation describing the WRF simulation
397 
398  ##@var domains
399  # The list of domains from sim that match the domains with the same name provided to
400  # the constructor.
401 
402  ##@var ghost_domains
403  # The list of ghost domains passed to the constructor.
404 
405  ##@var dt_epsilon
406  # An epsilon value for time equality comparisons
407 
408  ##@var dest_dir
409  # Delivery directory for outputs.
410 
411  def get_wrfanl(self,domain):
412  """!Returns the wrfanl output Product for this Task for the
413  specified domain or None if no such product exists
414  @param domain the domain of interest
415  @returns None
416  @note This is a abstract function that should be replaced in subclasses."""
417  return None
418 
419  def get_ghost(self,domain):
420  """!Returns the wrfghost output Product for this Task for the
421  specified domain
422  @param domain the domain of interest
423  @note This is a abstract function that should be replaced in subclasses."""
424  return None
425 
426  def wrfinput_at_time(self,atime,domain):
427  """!Returns the wrfinput output file for the specified time and
428  domain, or returns None if no such file exists.
429  @param atime the time of interest
430  @param domain the domain of interest"""
431  if atime is not None and \
432  not within_dt_epsilon(atime,self.sim.simstart(),
433  self.dt_epsilon):
434  self.log().info(
435  'wrfinput_at_time: atime=%s is not near my time %s'
436  %(atime.strftime('%Y%m%d%H'),domain.strftime('%Y%m%d%H')))
437  return None
438  return self.get_wrfinput(domain)
439 
440  def wrfanl_at_time(self,atime,domain):
441  """!Returns the wrfanl output file for the specified time and
442  domain, or None if no such file exists.
443  @param atime the time of interest
444  @param domain the domain of interest"""
445  if atime is not None and \
446  not within_dt_epsilon(atime,self.sim.simstart(),
447  self.dt_epsilon):
448  self.log().info(
449  'wrfanl_at_time: atime=%s is not near my time %s'
450  %(atime.strftime('%Y%m%d%H'),domain.strftime('%Y%m%d%H')))
451  return None
452 
453  def copy_fixed(self):
454  """!Copies the fixed files to the local directory."""
455  logger = self.log()
456  tbl=self.confstr('tbl','')
457  if tbl=='':
458  tbl=self.confstrinterp('{FIXhwrf}/hwrf_eta_micro_lookup.dat')
459  assert(tbl)
460  assert(isnonempty(tbl))
461  make_symlink(tbl,"eta_micro_lookup.dat",
462  force=True, logger=logger)
463 
464  def delete_temp(self):
465  """!Deletes all temporary files created by the relocation jobs."""
466  logger=self.log()
467  dd=self.dest_dir
468  if dd is None:
469  logger.warning('Internal error: dest_dir is None. Cannot '
470  'delete temporary files.')
471  elif dd=='':
472  logger.warning('Internal error: dest_dir is the empty '
473  'string. Cannot delete temporary files.')
474  else:
475  logger.warning(str(dd)+': deleting this directory tree.')
476  if not os.path.exists(dd):
477  logger.warning(
478  str(dd)+': does not exist; nothing to delete.')
479  return
480  def rmerr(function,path,exc_info):
481  logger.info('%s: exception while deleting file: %s %s'
482  %(str(path),str(function),str(exc_info)))
483  try:
484  shutil.rmtree(dd,onerror=rmerr)
485  except EnvironmentError as ee:
486  logger.warning(
487  str(dd)+': unable to delete this directory tree; '
488  'continuing anyway: '+str(e),exc_info=True)
489  except Exception as e:
490  logger.warning(
491  str(dd)+': unhandled exception deleting this '
492  'directory tree: '+str(e),exc_info=True)
493  raise
494 
495  def products(self):
496  """!Iterates over all products generated by this task.
497  @note This is a abstract function that should be replaced in subclasses."""
498  if False: yield 'hello' # to ensure this is an interator
499 
500  def deliver_products(self,missing=None,logger=None,keep=False,
501  frominfo=None,**kwargs):
502  """!Delivers products to intercom via Product.deliver. Any
503  keyword arguments are passed on to Product.deliver. By
504  default, keep=False, which means the local copy of the file
505  may no longer exists. If frominfo is specified, it will be
506  ignored.
507  @param keep If True, then the file may be moved to the destination
508  instead of copying.
509  @param frominfo Ignored.
510  @param kwargs Passed to produtil.datastore.FileProduct.deliver()
511  @param logger a logging.Logger for log messages
512  @param missing a function called if the file to deliver does not exist.
513  It is passed the product and the basename of the file."""
514  if logger is None: logger=self.log()
515  logger.warning('Delivering products for %s'%(self.taskname,))
516  produtil.fileop.makedirs(self.location,logger=logger)
517  for p in self.products():
518  loc=p.location
519  bloc=os.path.basename(loc)
520  if os.path.exists(bloc):
521  logger.warning('%s: deliver product from ./%s'%(p.did,bloc))
522  p.deliver(frominfo=bloc,keep=keep,logger=logger,**kwargs)
523  else:
524  logger.warning('%s: ./%s does not exist. Cannot deliver.'
525  %(p.did,bloc))
526  if missing is not None:
527  missing(p,bloc)
528 
529  def write_vitals(self,filename='tcvitals.as',logger=None):
530  """!Writes the tcvitals (from self.storminfo) to the specified
531  file.
532  @param filename the file to write
533  @param logger a logging.Logger for log messages"""
534  if logger is None: logger=self.log()
535  logger.info('Writing tcvitals to %s'%(repr(filename),))
536  with open(filename,'wt') as f:
537  f.write(self.storminfo.as_tcvitals()+"\n")
538  assert(os.path.exists(filename))
539 
540  def make_ghost_namelist(self,filename,logger=None):
541  """!Writes the ghost namelist to namelist_ghost.input. Note
542  that this overwrites, and then deletes, namelist.input and
543  fort.12. It will also create the domain.center and
544  storm.center files and fill them with correct locations.
545  @param filename the file to write
546  @param logger a logging.Logger for log messages"""
547  if logger is None: logger=self.log()
548  if hasattr(self._wrfghost,'make_ghost_namelist'):
549  self._wrfghost.make_ghost_namelist(filename,logger=logger)
550  else:
551  self._wrfghost.make_namelist('namelist_ghost.input')
552  produtil.fileop.remove_file('namelist.input',logger=logger)
553  produtil.fileop.remove_file('fort.12',logger=logger)
554 
555  def make_analysis_namelist(self,filename,logger=None):
556  """!Writes the analysis namelist to namelist_analysis.input.
557  Note that this overwrites, and then deletes, namelist.input
558  and fort.12. It will also create the domain.center and
559  storm.center files and fill them with correct locations.
560  @param filename the file to write
561  @param logger a logging.Logger for log messages"""
562  if logger is None: logger=self.log()
563  if hasattr(self._wrfanl,'make_analysis_namelist'):
564  self._wrfanl.make_analysis_namelist(filename,logger=logger)
565  else:
566  self._wrfanl.make_namelist('namelist_analysis.input')
567  produtil.fileop.remove_file('namelist.input',logger=logger)
568  produtil.fileop.remove_file('fort.12',logger=logger)
569 
570  @property
571  def parent_track(self):
572  """!The Product object for the parent track file."""
573  assert(self._parentTrack is not None)
574  assert(self._trackName is not None)
575  assert(self._trackName)
576  return self._parentTrack.product(self._trackName)
577 
578  @property
579  def modin(self):
580  """!The DA mode."""
581  return self._modin
582 
583  @property
584  def vortex(self):
585  """!The vortex origin status."""
586  return self._vortex
587 
588  @property
589  def warm(self):
590  """!The status of the current cycle.
591  True if it is a cycled run.
592  False if it is a cold start."""
593  return self._warm
594 
595  @property
596  def prev_cycle_dir(self):
597  """!The previous cycle's COM directory."""
598  return self._prev_cycle_dir
599  @property
600  def prev_cycle_sid(self):
601  return self._prev_cycle_sid
602 
603  @property
604  def storm_intensity(self):
605  """!The storm intensity."""
606  return self.storminfo.wmax
607 
608  @property
609  def storm_basin(self):
610  """!The storm basin."""
611  return self.storminfo.pubbasin2
612 
613  @property
614  def center_lat(self):
615  """!The domain center latitude."""
616  return self.conf.getfloat('config','domlat')
617 
618  @property
619  def center_lon(self):
620  """!The domain center longitude."""
621  return self.conf.getfloat('config','domlon')
622 
623  @property
624  def storm_id(self):
625  """!The storm ID."""
626  return self.storminfo.stormid3
627 
628  @property
629  def fhr(self):
630  """!The forecast hour."""
631  return self._fhr
632 
633  @property
634  def rinfo(self):
635  """!The RelocationInfo."""
636  if self.info is not None and self.info.from_file is not None:
637  return self.info
638  if self.__rinfo is not None:
639  return self.__rinfo
640  tn=type(self).__name__.lower()
641  stage='stage3'
642  if tn.find('stage1')>0: stage='stage1'
643  if tn.find('stage2')>0: stage='stage2'
644  filename=os.path.join(self.outdir,stage+'.info')
645  rinfo=RelocationInfo(filename)
646  assert(rinfo.from_file is not None)
647  rinfo.read_info(filename,self.log("info"))
648  self.__rinfo=rinfo
649  return self.__rinfo
650 
651  def _make_plist_and_names(self):
652  """!Internal function to generate input product lists and names.
653 
654  This is an internal implementation function that should not be
655  called directly. It returns a three-element tuple containing
656  a list of products, and a dict mapping from product to the
657  local filename, and a dict mapping from product to the copy
658  method. This is used to implement copy_inputs, to copy input
659  files to the local directory from remote tasks' Products."""
660  def copier(p,name,logger,*args):
661  deliver_file(p.location,name,logger=logger,keep=True)
662  def linker(p,name,logger,*args):
663  make_symlink(p.location,name,force=True,logger=logger)
664  names=dict()
665  names[self._wrfinput]='wrfinput_d01'
666  names[self._wrfanl_d02]='wrfinput_d02'
667  names[self._wrfanl_d03]='wrfinput_d03'
668  names[self._ghost_d02]='wrfghost_d02'
669  names[self._ghost_d03]='wrfghost_d03'
670  plist=[ k for k in names.iterkeys() ]
671  actions=dict( (n,copier) for n in names.iterkeys() )
672  return ( plist, names, actions )
673 
674  def copy_inputs(self):
675  """!Copies, or makes, one or more input files."""
676  logger=self.log()
677  (plist,names,action)=self._make_plist_and_names()
678  def namer(p,logger,*args): return names[p]
679  def actor(p,name,logger,*args): action[p](p,name,logger,*args)
680  for p in plist:
681  logger.info("Need product %s at location=%s, available=%s"%(
682  p.did,repr(p.location),p.available))
683  # Loop over all provided products and copy them. Note that we
684  # do not wait for them (maxtime=2) we just use
685  # wait_for_products to do the looping for us:
686  if len(plist)!=produtil.datastore.wait_for_products(plist,logger,namer,actor,maxtime=2):
687  raise hwrf.exceptions.RelocationInputError("Some inputs to the relocation are missing. The previous init job must have failed.")
688  #deliver_file('wrfghost_d02', 'wrfghost_d02_orig', keep=True)
689  #deliver_file('wrfghost_d03', 'wrfghost_d03_orig', keep=True)
690 
691  # Lastly, write the tcvitals:
692  self.write_vitals('tcvitals.as')
693 
694  def set_ensda(self,ensda):
695  self._ensda=ensda
696 
697  def get_centrack(self):
698  """!Returns the Product for the center FGAT time track file if
699  available, or otherwise the parent track file Product."""
700  if self._centrack is None:
701  return self.parent_track
702  return self._centrack
703 
704  def set_centrack(self,centrack):
705  """!Sets the Product for the center FGAT time track file.
706  @param centrack the center FGAT track product"""
707  if (centrack is None):
709  "You must specify central track file")
710  self._centrack=centrack
711 
712  def del_centrack(self):
713  """!Unsets the center FGAT time track file so that
714  get_centrack() will return the parent track file instead."""
715  self._centrack=None
716 
717  ##@property centrack
718  # The track file for the center FGAT hour.
719  centrack=property(get_centrack,set_centrack,None,
720  """The track file for the center FGAT hour.""")
721 
722  def create_atcf(self,case):
723  """!Gets the parent vortex track file, either from a specified
724  directory or from the tracker, run by a previous
725  hwrf.init.HWRFInit object's tracker member.
726  @param case 1 or 2: 1 for creating the atcfunix file, 2 for
727  the atcfunix_cen file."""
728  logger=self.log()
729  if case == 1:
730  tp=self.parent_track
731  else:
732  tp=self.centrack
733  logger.info('case %d:tp %s'%(int(case),repr(tp)))
734 
735  produtil.datastore.wait_for_products(tp,logger,maxtime=2,
736  renamer=lambda p,l: 'gfs-anl.atcfunix',
737  action=lambda p,n,l: deliver_file(p.location,n,logger=l))
738 
739  start = self.sim.simstart().strftime("%Y%m%d%H")
740  gfs_atcf = self.confstr('gfs_atcf','')
741 
742  if tp.available and tp.location:
743  ta_atcf=tp.location
744  logger.warning('Using parent vortex from provided tracker data.')
745  logger.warning('%s (%s): parent vortex location'
746  %(tp.did, ta_atcf))
747  if case == 1:
748  deliver_file(ta_atcf, 'atcfunix',logger=logger)
749  else:
750  deliver_file(ta_atcf, 'atcfunix_cen',logger=logger)
751  elif gfs_atcf is not None and gfs_atcf!='':
752  logger.warning(
753  "Using parent vortex from parent model's own track file")
754  basin = self.conf.syndat.pubbasin2
755  fields = self.confstr('track_name','AVNO|PRE1|PRD1')
756  patn = '(?=%s)(?=%s)(?=%s)' %(start, basin, fields)
757  logger.warning("%s: track file"%( gfs_atcf, ))
758  with open(gfs_atcf, 'r') as ifile:
759  logger.info("Parsing track for: start=%s basin=%s fields=%s"
760  %(repr(start),repr(basin),repr(fields)))
761  with open('atcfunix', 'w') as ofile:
762  for line in file:
763  if re.search(patn, line):
764  logger.info('Keep : %s\n'%(repr(line),))
765  ofile.write(line)
766  else:
767  logger.info('Discard: %s\n'%(repr(line),))
768  logger.info('Done parsing track file.')
769  else:
770  # Send this to .error so it will go to the NCEP-wide jlogfile:
771  logger.error(
772  'Could not find a track file for parent vortex location.')
773  logger.error(
774  'Will proceed assuming parent vortex is at tcvitals.')
775  with open('atcfunix', 'a') as o: pass
776 
777  def run_ext(self, cmd, echo=None, inputs=None, incopies=None,
778  outputs=None, opt_outputs=None):
779  """!Helper function for running Fortran programs that need
780  fort.* files for inputs and outputs.
781 
782  Run an external command linking in fort.X files for input and
783  output. If self.redirect=True, redirect logs to a separate
784  file.
785 
786  @param cmd The command to execute. This function will use
787  "self.getexe()" on the command to find the external program to
788  execute.
789  @param echo If a list is passed in as the echo variable, then
790  the contents will be sent to the stdin of the program as a
791  string.
792 
793  @param inputs Input dictionary for files to link. See below.
794 
795  @param incopies Input dictionary for files to copy. See below.
796 
797  @param outputs Output dictionary for files to link. See below.
798  If the output is not present, a message is logged at ERROR
799  level.
800 
801  @param opt_outputs Optional outputs dictionary for files to
802  link. See below. If the outputs are not present, it is not
803  considered an error.
804 
805  The dictionary arguments should consist of a fortran file
806  number and the source file.
807 
808  @code
809  inputs = {11:tcvitals, 12:wrfout_d01}
810  @endcode
811 
812  would produce symbolic links:
813  @code{.unformatted}
814  fort.11 -> tcvitals
815  fort.12 -> wrfout_d01
816  @endcode
817 
818  input files can also be copied using incopies:
819  @code
820  incopies = {11:tcvitals, 12:wrfout_d01}
821  @endcode
822 
823  would create files instead of links.
824 
825  The outputs and opt_outputs (optional outputs) should be of the
826  dictionary as the inputs. As in:
827  outputs = {56:new_data_4x, 85:storm_radius}
828  this would mean the "fort.56" file would be renamed to "new_data_4x"
829  and the "fort.85" renamed to "storm_radius".
830 
831  If opt_outputs is given then the fortran file is tested to see if it
832  exists and only if it does is it renamed to the output filename.
833 
834  A log file will be created consisting of the stdout and stderr of the
835  command run. It will be named consisting of the taskname and command.
836  For example, if this is relocation stage 1 and the command is
837  hwrf_pert_ct then the log file is "rel_stage_1_hwrf_pert_ct.log" """
838 
839  cmdname=str(cmd)
840  logger = self.log()
841  prog = self.getexe(cmdname)
842  logf = '%s/logs/%s_%s.log' %(self.dest_dir,
843  self.__class__.__name__, cmdname)
844 
845  # Build up the command
846  if echo:
847  echostr=""
848  for s in echo:
849  if isinstance(s,float): echostr+="%g "%(s,)
850  elif isinstance(s,int): echostr+="%d "%(s,)
851  else: echostr+="%s "%(str(s),)
852  logger.info(
853  'Converted %s to %s for stdin input to fortran command.'
854  %(repr(echo),repr(echostr)))
855  echostr+="\n"
856  cmd = produtil.run.openmp(produtil.run.bigexe(prog)) << echostr
857  else:
859 
860  # If redirection is requested, do so:
861  if self.redirect: cmd = cmd >= logf
862 
863  # Clean up all the fortran inputs and outputs
864  empty={}
865  if inputs is None: inputs=empty
866  if outputs is None: outputs=empty
867  if incopies is None: incopies=empty
868  iof = dict(itertools.chain(inputs.items(), outputs.items(),
869  incopies.items()))
870  for k in iof:
871  produtil.fileop.remove_file('fort.'+str(k),logger=logger)
872 
873  # Link the inputs
874  if inputs:
875  produtil.fileop.fortlink(inputs, force=True,logger=logger)
876 
877  if incopies:
878  produtil.fileop.fortcopy(incopies, force=True,
879  only_log_errors=True, logger=logger)
880 
881  if self.confbool('sync_frequently',True):
882  runsync()
883  logger.warning(repr(cmd)) # use logger.warning so it is in stderr
884  produtil.run.checkrun(cmd, logger=logger)
885 
886  # Rename the outputs
887  if outputs:
888  for k, v in outputs.iteritems():
889  ffile='fort.'+str(k)
890  if os.path.exists(ffile):
891  deliver_file(ffile, v, keep=False,logger=logger)
892  else:
893  logger.error('%s: did not make file %s (would mv to %s)'
894  %(cmdname,ffile,str(v)))
895 
896  # Rename the optional outputs if they exist
897  if opt_outputs:
898  for k, v in opt_outputs.iteritems():
899  ffile = 'fort.' + str(k)
900  if os.path.exists(ffile):
901  deliver_file(ffile, v, keep=False,logger=logger)
902  else:
903  logger.warning(
904  '%s: did not make file %s (would mv to %s).'
905  %(cmdname,ffile,str(v)))
906 
907  # Clean up the input links
908  for k,v in inputs.iteritems():
909  if os.path.islink('fort.'+str(k)):
910  logger.info('%s: deleting input fort file (symlink to %s)'
911  %('fort.'+str(k),v))
912  produtil.fileop.remove_file('fort.'+str(k),logger=logger)
913 
914  # Clean up the input copies
915  for k,v in incopies.iteritems():
916  if os.path.exists('fort.'+str(k)):
917  logger.info('%s: deleting input fort file (copy of %s)'
918  %('fort.'+str(k),v))
919  produtil.fileop.remove_file('fort.'+str(k),logger=logger)
920 
921 ########################################################################
923  """!This is a HWRF task that encapsulates stage 1 of the vortex
924  relocation."""
925 
926  def __init__(self,dstore,conf,section,sim,domains,taskname=None,**kwargs):
927  """!Stage1 constructor.
928 
929  @param dstore,conf,section,sim,domains,taskname,kwargs
930  Passed to RelocationTask.__init__()"""
931  super(Stage1,self).__init__(dstore,conf,section,sim,domains,
932  taskname,**kwargs)
933 
934  def run(self):
935  """!Runs the stage 1 of the relocation."""
936  logger=self.log()
937  # NOTE: some of these are sent as postmsg instead of
938  # logger.info. That ensures they are in the jlogfile, which
939  # contains information across all cycles of all storms and all
940  # models. That way, we can find unexpected cold starts.
941  read_info=False
942  try:
943  dest_dir=self.dest_dir
944  if os.path.exists(dest_dir):
945  self.delete_temp()
946  produtil.fileop.makedirs(dest_dir)
947  self.info.initopt=self.confint('initopt',0)
948  tdrflagfile=self.conf.strinterp('dir','{com}/{stormlabel}.tdr')
949  if self._ensda is None and self.confbool('tdrconditionalvinit',False):
950  if isnonempty(tdrflagfile):
951  self.info.initopt=1
952  else:
953  self.info.initopt=0
954  self.info.iflag_cold=0
955  with NamedDir(dest_dir) as dir:
956  self.postmsg('Stage 1 running in directory: '+os.getcwd())
957  assert(not re.match('\A/tmp',os.getcwd()))
958 
959  produtil.fileop.makedirs(dest_dir+'/logs')
960  if self._wrfghost is not None or self._wrfanl is not None:
961  self.copy_namelist()
962 
963  if self._ensda is None:
964  have_prior=self.check_prior_cycle()
965 
966  if not have_prior:
967  self.postmsg('Prior cycle missing. Cold start. '
968  'Continue from Stage 2.')
969  self.info.iflag_cold=1
970  self.info.warm_cold_flag=COLD
971  expect=self.confbool('expect_cold_start')
972  if expect:
973  self.postmsg('No prior cycle exists, and no prior '
974  'cycle was expected. Cold start. '
975  'Continue from Stage 2.')
976  self.info.cold_ok=True
977  else:
978  msg='UNEXPECTED COLD START. Prior cycle data was '\
979  'missing. Continue from Stage 2. To override '\
980  'this error, set expect_cold_start=yes or '\
981  'allow_fallbacks=yes in the conf file for this '\
982  'cycle.'
983  logger.critical(msg)
984  self.info.cold_ok=False
985  if not self.confbool('allow_fallbacks'):
987 
988  else:
989  if self.storminfo.wmax<14:
990  # NOTE: If you change this, change the
991  # weak_invest default in rocoto/run_hwrf.py.
992  self.postmsg('Storm is a weak storm. This is a '
993  'cold start. Continue from Stage 2.')
994  self.info.iflag_cold=1
995  self.info.warm_cold_flag=COLD
996  self.info.cold_ok=True
997  else:
998  self.write_vitals('tcvitals.as')
999  self.copy_fixed()
1000 
1001  self.relocate_storm()
1002  self.merge_nest()
1003  self.create_atcf(1)
1004  if not self.copy_hdas():
1005  self.postmsg('Cannot get prior cycle track. '
1006  'This is a cold start.')
1007  self.info.cold_warm_flag=COLD
1008  self.info.cold_ok=False
1009  elif self.check_atcf_hours():
1010  self.postmsg('This is a warm start.')
1011  self.guess_track()
1012  self.split_wrf()
1013  self.pert_ct()
1014  else:
1015  self.postmsg('The hdas_atcfunix does not have '
1016  'all expected forecast hours.')
1017  self.postmsg('The create_trak_guess cannot '
1018  'continue with relocation.')
1019  self.postmsg('This is a cold start.')
1020  self.info.warm_cold_flag=COLD
1021  self.info.cold_ok=True
1022 
1023  self.postmsg('Stage 1 completed in directory: '
1024  +os.getcwd())
1025  else:
1026  self.info.iflag_cold=1
1027  self.info.initopt=1
1028  self.info.ensda_relocate=True
1029  self.write_vitals('tcvitals.as')
1030  self.relocate_storm()
1031  self.merge_nest()
1032  if not self.copy_ensda_track():
1033  self.postmsg('Cannot get prior cycle ensda track.')
1034  raise hwrf.exceptions.EnsdaTrackerMissing('ensda track missing')
1035  if self.check_atcf_hours():
1036  self.guess_track()
1037  self.split_wrf()
1038  self.pert_ct()
1039  else:
1040  self.info.ensda_relocate_continue=False
1041  self.postmsg('The ensda_atcfunix does not have '
1042  'all expected forecast hours.')
1043  self.postmsg('Will not perform relocation for this member')
1044 
1045  self.state=COMPLETED
1046  except Exception as e:
1047  logger.critical('Stage 1 failed: '+str(e),exc_info=True)
1048  raise
1049  finally:
1050  self.info.write_info(os.path.join(self.outdir,'stage1.info'),
1051  logger=logger)
1052  def copy_namelist(self):
1053  """!Copy the namelist files from the preceding steps"""
1054  self.log().info('stage1 copy_namelist')
1055  if self._wrfghost is not None:
1056  self.make_ghost_namelist('namelist.input.ghost')
1057  if self._wrfanl is not None:
1058  self.make_analysis_namelist('namelist.input.analysis')
1059 
1061  """!Checks to see if all data is present from the prior cycle."""
1062  logger=self.log()
1063  logger.warning('Checking for prior cycle data.')
1064  # NOTE: we assume wrfout files use colons between date
1065  # components. The copywrf.py ensures this when copying to
1066  # com.
1067  ftimestr=self.sim.simstart().strftime("%Y-%m-%d_%H:%M:%S")
1068  if self.prev_cycle_sid is False:
1069  sid=self.storm_id.lower()
1070  else:
1071  sid=self.prev_cycle_sid.lower()
1072 
1073  for d in self.domains:
1074  id = d.get_grid_id()
1075  ifile = "%s/%s.wrfout_d%02d_%s" %(self.prev_cycle_dir,
1076  sid, id,ftimestr)
1077  logger.info('ifile is %s' %ifile)
1078  if not isnonempty(ifile):
1079  logger.warning(
1080  'Prior cycle %s forecast does not exist. This is a '
1081  'cold start.'%(ftimestr,))
1082  return False
1083  logger.warning('Prior cycle data is present for time %s'%(ftimestr,))
1084  if self.fgat_times is not None:
1085  for t in self.fgat_times:
1086  ftimestr2=t.strftime("%Y-%m-%d_%H:%M:%S")
1087  for d in self.domains:
1088  id = d.get_grid_id()
1089  ifile = "%s/%s.wrfout_d%02d_%s" %(self.prev_cycle_dir,
1090  sid, id,ftimestr2)
1091  if not isnonempty(ifile):
1092  logger.warning(
1093  'Fgat wrfout_d%02d file does not exist for forecast time '
1094  '%s. This is a cold start.'%(id,ftimestr2,))
1095  return False
1096  else:
1097  logger.info('Have wrfout_d%02d file for forecast time '
1098  '%s.'%(id,ftimestr2,))
1099  logger.warning('Prior cycle data is present for all fgat times')
1100  else:
1101  logger.warning('No fgat_times specified. Prior %s forecast is '
1102  'available.'%(ftimestr,))
1103  return True
1104 
1105  def relocate_storm(self):
1106  """!Runs the hwrf_3dvar to paste the relocated storm."""
1107  self.log().info('stage1 relocate_storm')
1108  fprog = 'hwrf_3dvar'
1109  logger=self.log()
1110  ftimestr=self.sim.simstart().strftime("%Y-%m-%d_%H:%M:%S")
1111  ncks=self.getexe('ncks','')
1112  if not ncks:
1113  ncks=produtil.fileop.find_exe('ncks',raise_missing=False)
1114  if ncks:
1115  def copier(s,t,x):
1116  produtil.fileop.remove_file(t,logger)
1117  checkrun(bigexe(ncks)['-6',s,t],logger=logger)
1118  else:
1119  copier=None
1120 
1121  domains=[ d for d in self.domains ]
1122  if self.prev_cycle_sid is False:
1123  sid=self.storm_id.lower()
1124  else:
1125  sid=self.prev_cycle_sid.lower()
1126 
1127  for domain in domains:
1128  if self._ensda is None:
1129  id = domain.get_grid_id()
1130  ifile = "%s/%s.wrfout_d%02d_%s" %(self.prev_cycle_dir,
1131  sid, id,ftimestr)
1132  else:
1133  did=int(domain.get_grid_id())
1134  if domain.is_moad():
1135  id=1
1136  prod=self._ensda.get_wrfinput(domain=domain,atime=self.conf.cycle)
1137  else:
1138  id=2
1139  prod=self._ensda.get_wrfanl(domain=domain,atime=self.conf.cycle)
1140  logger.info('domain %s prod %s'%(str(domain),prod.did))
1141  ifile=prod.location
1142 
1143  wrfout = "wrfout_d%02d" %id
1144  old_wrfout = "old_hwrf_d%02d"%id
1145  if produtil.fileop.netcdfver(ifile)=='HDF5':
1146  logger.info('%s: file is HDF5, so I will assume it is '
1147  'compressed and convert back to 64-bit indexing '
1148  'NetCDF3.'%(ifile,))
1149  if copier is None:
1150  logger.critical('ncks not found; things will probably break')
1151 
1152  deliver_file(ifile,wrfout,keep=True,logger=logger,copier=copier)
1153 
1154  prog = self.getexe(fprog)
1155  log = '%s/logs/%s_%s_d%02d.log' %(
1156  self.dest_dir, self.__class__.__name__, fprog, id)
1157  cmd = produtil.run.exe(prog)['storm_relocate', wrfout, 'flnm3',
1158  old_wrfout]
1159  if self.redirect: cmd = cmd >= log
1160  produtil.run.checkrun(cmd,logger=self.log())
1161 
1162  if os.path.isfile('fort.50'):
1163  os.remove('fort.50')
1164 
1165  if os.path.isfile('fort.73'):
1166  os.remove('fort.73')
1167 
1168  def merge_nest(self):
1169  """!Runs the fortran merge_nest program."""
1170  self.log().info('stage1 merge_nest')
1171  if self._ensda is None:
1172  fprog = 'hwrf_merge_nest'
1173  else:
1174  fprog = 'hwrf_merge_enkf'
1175  evars = [ 6,
1176  self.storm_intensity,
1177  0,
1178  self.center_lat,
1179  self.center_lon,
1180  ]
1181  if self._ensda is None:
1182  ins = { 11:'tcvitals.as',
1183  26:'old_hwrf_d01',
1184  36:'old_hwrf_d02',
1185  46:'old_hwrf_d03',
1186  }
1187  else:
1188  ins = { 11:'tcvitals.as',
1189  26:'old_hwrf_d01',
1190  36:'old_hwrf_d02',
1191  }
1192 
1193  ous = { 56:'data_4x_hwrf',
1194  66:'roughness1',
1195  }
1196  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
1197 
1198  def copy_hdas(self):
1199  """!Get the previous cycle's HDAS track."""
1200  logger=self.log()
1201  logger.info('stage1 copy_hdas')
1202  if self.storm_intensity > 10:
1203  dom = 'combine'
1204  else:
1205  dom = 'parent'
1206 
1207  if self.prev_cycle_sid is False:
1208  sid=self.storm_id.lower()
1209  else:
1210  sid=self.prev_cycle_sid.lower()
1211 
1212  prev_time = to_datetime_rel(self.cycling_interval,self.conf.cycle)
1213  hdas_atcf = "%s/%s.trak.hwrf.atcfunix.%s.%s" \
1214  %(self.prev_cycle_dir, sid,
1215  prev_time.strftime("%Y%m%d%H"), dom)
1216  if not os.path.exists(hdas_atcf):
1217  # Should not get here. This means the prior cycle's
1218  # wrfout file exists for hour 6, but the 12hr track file
1219  # does not. Either the workflow management system
1220  # submitted this cycle WAY too early, or the prior cycle's
1221  # tracker failed. This should not be possible in
1222  # operations.
1223  logger.warning('Prior cycle atcf does not exist: '+hdas_atcf)
1224  logger.warning('Will use an empty track.')
1225 
1226  # Log an error at CRITICAL level to alert the operator:
1227  logger.critical('PRIOR HWRF CYCLE PROBABLY FAILED!!')
1228  logger.critical('Prior cycle has wrfout files, but no 12hr '
1229  'track file here: %s'
1230  %(hdas_atcf,))
1231  logger.critical('Check the prior cycle JHWRF_PRODUCTS job for '
1232  'errors. Check to see if NHC received the '
1233  'track file.')
1234 
1235  with open('hdas_atcfunix','wt'):
1236  pass
1237  return False
1238  deliver_file(hdas_atcf, 'hdas_atcfunix', keep=True,logger=self.log())
1239  return True
1240 
1241  def copy_ensda_track(self):
1242  """Get the previous cycle's ensemble forecast track."""
1243  logger=self.log()
1244  logger.info('stage1 copy_ensda_track')
1245 
1246  ensda_atcf=self._ensda.get_track(atime=self.conf.cycle)
1247  if ensda_atcf is None:
1248  logger.info('No track for member %s.'%(self._ensda.__enkfmem))
1249  return False
1250  else:
1251  fromfile=ensda_atcf.location
1252  deliver_file(fromfile,'ensda_atcfunix',keep=True,logger=logger)
1253  return True
1254 
1255  def check_atcf_hours(self):
1256  """!Checks to see if all FGAT hours have tracks of a required
1257  minimum length."""
1258  logger=self.log()
1259  seen=set()
1260  icyc=round(to_fraction(-self.cycling_interval))
1261  found_icyc=False
1262  logger.info('self.info.ensda_relocate is %s'%(repr(self.info.ensda_relocate)))
1263  if self.info.ensda_relocate:
1264  atcfunix='ensda_atcfunix'
1265  else:
1266  atcfunix='hdas_atcfunix'
1267  with open(atcfunix,'rt') as ha:
1268  for line in ha:
1269  rline=line.rstrip()
1270  try:
1271  hour=int(rline[30:33])
1272  seen.add(hour*3600)
1273  if abs(icyc-hour*3600)<30:
1274  logger.info('Found cycling interval hour %d in %s'
1275  %(icyc,atcfunix))
1276  found_icyc=True
1277  logger.info(
1278  'Found hour %s in %s'%(repr(hour),atcfunix))
1279  except (IndexError,ValueError,TypeError) as e:
1280  logger.warning(
1281  'Cannot parse hour from %s line: %s'
1282  %(atcfunix,rline,))
1283  if self.fgat_times is None or self.info.ensda_relocate:
1284  if found_icyc:
1285  return True
1286  else:
1287  logger.warning('Did not find cycling interval hour %d in '
1288  'hdas_atcfunix. This is a cold start.')
1289  return False
1290 
1291  if not self.info.ensda_relocate:
1292  """only 3D relocation for ensemble members"""
1293  all_found=True
1294  parent_atime=to_datetime_rel(self.cycling_interval,self.conf.cycle)
1295  for ftime in self.fgat_times:
1296  dt=to_datetime(ftime)-parent_atime
1297  fdt=to_fraction(dt)
1298  fgath=int(float(round(fdt)))
1299  if fgath not in seen:
1300  logger.warning(
1301  'Could not find hour %s in hdas_atcfunix'%(repr(fgath),))
1302  all_found=False
1303  if all_found:
1304  logger.info('All FGAT hours found in hdas_atcfunix')
1305  return True
1306  else:
1307  logger.warning('Some FGAT hours not found in hdas_atcfunix')
1308  self.info.warm_cold_flag=COLD
1309  self.cold_ok=True
1310  return False
1311 
1312  ##@var cold_ok
1313  # Set to True if the relocation intentionally vetoes warm starting.
1314  # This is done, for example, if the storm is weak or shallow.
1315 
1316  def guess_track(self):
1317  """!Runs the fortran hwrf_trk_guess program"""
1318  self.log().info('stage1 guess_track')
1319  fprog = 'hwrf_trk_guess'
1320  evars = [ self.storm_id,
1321  '%02d'%(self.storminfo.when.hour,),
1322  ]
1323  if self.info.ensda_relocate:
1324  ins = { 11:'tcvitals.as',
1325  12:'ensda_atcfunix',
1326  }
1327  else:
1328  ins = { 11:'tcvitals.as',
1329  12:'hdas_atcfunix',
1330  }
1331  ous = { 30:'trak.fnl.all' }
1332  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
1333 
1334  def split_wrf(self):
1335  """!Runs the fortran wrf_split program."""
1336  self.log().info('stage1 split_wrf')
1337  fprog = 'hwrf_wrf_split'
1338  if self.info.ensda_relocate:
1339  crfactor=3.0
1340  else:
1341  crfactor=1.0
1342 
1343  evars = [ self.fhr,
1344  0,
1345  self.storm_intensity,
1346  self.storm_basin,
1347  int(self.info.iflag_cold),
1348  crfactor,
1349  ]
1350  ins = { 11:'tcvitals.as',
1351  26:'data_4x_hwrf',
1352  30:'trak.fnl.all',
1353  46:'old_hwrf_d01',
1354  }
1355  ous = { 56:'wrf_env',
1356  71:'storm_pert',
1357  85:'storm_radius',
1358  }
1359  otime = self.sim.simstart().strftime("%Y%m%d%H")
1360  opts = { 25:'disturbance.dat',
1361  52:'rel_inform.'+ otime,
1362  55:'vital_syn.' + otime,
1363  }
1364  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous,
1365  opt_outputs=opts)
1366  assert(produtil.fileop.isnonempty('./storm_radius'))
1367 
1368  def pert_ct(self):
1369  """!Runs the fortran hwrf_pert_ct program."""
1370  self.log().info('stage1 pert_ct')
1371  fprog = 'hwrf_pert_ct'
1372  if self.info.ensda_relocate:
1373  evars = [ 6,
1374  self.storm_basin,
1375  1,
1376  ]
1377  else:
1378  evars = [ 6,
1379  self.storm_basin,
1380  0,
1381  ]
1382 
1383  ins = { 11:'tcvitals.as',
1384  26:'wrf_env',
1385  46:'roughness1',
1386  65:'storm_radius',
1387  71:'storm_pert',
1388  }
1389  ous = { 14:'storm_size_p',
1390  23:'storm_sym',
1391  58:'storm_pert_new',
1392  }
1393  opts = { 35:'storm_pert_step1_1' }
1394  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous,
1395  opt_outputs=opts)
1396 
1397 ########################################################################
1399  """!This is a HWRF task that encapsulates stage 2 of the vortex
1400  relocation which removes the parent model's vortex."""
1401 
1402  def __init__(self,dstore,conf,section,sim,domains,taskname=None,**kwargs):
1403  """!Stage2 constructor
1404  @param dstore,conf,section,sim,domains,taskname,kwargs
1405  Passed to the RelocationTask.__init__() """
1406  super(Stage2,self).__init__(dstore,conf,section,sim,domains,
1407  taskname,**kwargs)
1408 
1409  def run(self):
1410  """!Runs stage 2 of the relocation."""
1411  read_info=False
1412  logger=self.log()
1413  try:
1415  with NamedDir(self.dest_dir) as dir:
1416  self.postmsg('Stage 2 starting in directory: '+os.getcwd())
1417  assert(not re.match('\A/tmp',os.getcwd()))
1418 
1419  self.info.read_info(
1420  os.path.join(self.outdir,'stage1.info'),logger=logger)
1421  read_info=True
1422 
1423  produtil.fileop.makedirs(self.dest_dir+'/logs')
1424  self.write_vitals()
1425  self.copy_fixed()
1426  self.copy_inputs()
1427  self.relocate_storm()
1428 # self.create_nest()
1429  self.create_atcf(1)
1430  self.create_track()
1431  remove_file('roughness',logger=logger)
1432  remove_file('roughness2',logger=logger)
1433  self.merge_nests()
1434  self.wrf_split()
1435  self.postmsg('Stage 2 completed in directory: '+os.getcwd())
1436  self.state=COMPLETED
1437  except Exception as e:
1438  logger.critical('Stage 2 failed: '+str(e),exc_info=True)
1439  raise
1440  finally:
1441  if read_info:
1442  self.info.write_info(os.path.join(
1443  self.outdir,'stage2.info'),logger=logger)
1444 
1445  def relocate_storm(self):
1446  """!Runs the hwrf_diffwrf_3dvar program on all inputs to
1447  create binary file for input to the Fortran programs."""
1448  self.log().info('stage2 relocate_storm')
1449  fprog = 'hwrf_3dvar'
1450  icom_dir = self.conf.getdir('intercom')
1451  logger=self.log()
1452  prog = self.getexe('hwrf_3dvar')
1453  for d in self.domains:
1454  id = d.get_grid_id()
1455 
1456  fin = "wrfinput_d%02d" %id
1457  fou = "new_gfs_d%02d" %id
1458 
1459  log = '%s/logs/%s_%s_d%02d.log' %(
1460  self.dest_dir, self.__class__.__name__, fprog, id)
1461  cmd = produtil.run.exe(prog)['storm_relocate', fin, 'flnm3', fou]
1462  if self.redirect: cmd = cmd >= log
1463  produtil.run.checkrun(cmd,logger=logger)
1464 
1465  if os.path.isfile('fort.50'):
1466  os.remove('fort.50')
1467 
1468  if os.path.isfile('fort.73'):
1469  os.remove('fort.73')
1470 
1471  for id in range(2, 4):
1472 
1473  fin = "wrfghost_d%02d" %id
1474  fou = "new_ght_d%02d" %id
1475 
1476  log = '%s/logs/%s_%s_ghost_d%02d.log' %(
1477  self.dest_dir, self.__class__.__name__, fprog, id)
1478  cmd = produtil.run.exe(prog)['storm_relocate', fin, 'flnm3', fou]
1479  if self.redirect: cmd = cmd >= log
1480 
1481  produtil.run.checkrun(cmd,logger=logger)
1482 
1483  if os.path.isfile('fort.50'):
1484  os.remove('fort.50')
1485 
1486  if os.path.isfile('fort.73'):
1487  os.remove('fort.73')
1488 
1489  def create_nest(self):
1490  """!Runs the fortran hwrf_create_nest program."""
1491  fprog = 'hwrf_create_nest'
1492  evars = [ 6,
1493  self.storm_basin,
1494  ]
1495  ins = { 26:'new_gfs_d01',
1496  46:'new_gfs_d02',
1497  }
1498  ous = { 57:'new_data_d01' }
1499  opts = { 56:'new_data_1x' }
1500  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous,
1501  opt_outputs=opts)
1502  logger=self.log()
1503  deliver_file('new_gfs_d01', 'new_gfs_d01_org',
1504  keep=False, logger=logger)
1505  deliver_file('new_data_d01','new_gfs_d01',
1506  keep=False, logger=logger)
1507 
1508  def create_track(self):
1509  """!Runs the fortran create_trak_fnl program."""
1510  self.log().info('stage2 create_track')
1511  fprog = 'hwrf_create_trak_fnl'
1512  evars = [ self.storm_id,
1513  self.sim.simstart().strftime("%Y"),
1514  self.storm_basin,
1515  ]
1516  ins = { 11:'tcvitals.as',
1517  12:'atcfunix',
1518  }
1519  ous = { 30:'trak.fnl.all_gfs' }
1520  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
1521 
1522  def merge_nests(self):
1523  """!Runs the fortran merge_nest program."""
1524  self.log().info('stage2 merge_nests')
1525  fprog = 'hwrf_merge_nest'
1526  evars = [ 6,
1527  0,
1528  1,
1529  self.center_lat,
1530  self.center_lon,
1531  self.storm_basin,
1532  ]
1533  ins = { 11:'tcvitals.as',
1534  26:'new_gfs_d01',
1535  36:'new_gfs_d02',
1536  46:'new_gfs_d03',
1537  }
1538  ous = { 56:'data_4x_gfs',
1539  66:'roughness2',
1540  }
1541  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
1542 
1543  def wrf_split(self):
1544  """!Runs the fortran split_wrf program."""
1545  self.log().info('stage2 wrf_split')
1546 
1547  if os.path.isfile("storm_pert_new"):
1548  ibgs = 1
1549  else:
1550  ibgs = 2
1551 
1552  rel = 'rel_inform_gfs.%s' %(
1553  self.sim.simstart().strftime("%Y%m%d%H"))
1554  vital = 'vital_syn_gfs.%s' %(
1555  self.sim.simstart().strftime("%Y%m%d%H"))
1556 
1557  fprog = 'hwrf_wrf_split'
1558  evars = [ self.fhr,
1559  ibgs,
1560  self.storm_intensity,
1561  self.storm_basin,
1562  self.info.iflag_cold,
1563  1.0,
1564  ]
1565  ins = { 11:'tcvitals.as',
1566  26:'data_4x_gfs',
1567  30:'trak.fnl.all_gfs',
1568  46:'new_gfs_d01',
1569  65:'storm_radius',
1570  }
1571  ous = { 52:rel,
1572  56:'gfs_env',
1573  71:'storm_pert_gfs',
1574  85:'storm_radius_gfs',
1575  }
1576  opts = { 55:vital }
1577 
1578  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous,
1579  opt_outputs=opts)
1580 
1581 
1582 ########################################################################
1584  """!This is a HWRF task that encapsulates stage 3 of the vortex
1585  relocation which relocates and pastes the vortexes together from
1586  various sources."""
1587 
1588  def __init__(self,dstore,conf,section,sim,domains,taskname=None,**kwargs):
1589  """!Stage3 constructor.
1590  @param dstore,conf,section,sim,domains,taskname,kwargs
1591  Passed to the RelocationTask.__init__()"""
1592  super(Stage3,self).__init__(dstore,conf,section,sim,domains,
1593  taskname,**kwargs)
1594  with dstore.transaction() as t:
1595  self._prod_ghost_d02=FileProduct(dstore,'wrfghost_d02',
1596  self.taskname,location=os.path.join(self.location,
1597  'wrfghost_d02'))
1598  self._prod_ghost_d03=FileProduct(dstore,'wrfghost_d03',
1599  self.taskname,location=os.path.join(self.location,
1600  'wrfghost_d03'))
1601  self._prod_wrfinput=FileProduct(dstore,'wrfinput_d01',
1602  self.taskname,location=os.path.join(self.location,
1603  'wrfinput_d01'))
1604  self._prod_wrfanl_d02=FileProduct(dstore,'wrfinput_d02',
1605  self.taskname,location=os.path.join(self.location,
1606  'wrfinput_d02'))
1607  self._prod_wrfanl_d03=FileProduct(dstore,'wrfinput_d03',
1608  self.taskname,location=os.path.join(self.location,
1609  'wrfinput_d03'))
1610  self._prod_storm_radius=FileProduct(dstore,'storm_radius',
1611  self.taskname,location=os.path.join(self.location,
1612  'storm_radius'))
1613  self._prod_ens_wrfout_d01=FileProduct(dstore,'wrfout_d01',
1614  self.taskname,location=os.path.join(self.location,
1615  'wrfout_d01'))
1616  self._prod_ens_wrfout_d02=FileProduct(dstore,'wrfout_d02',
1617  self.taskname,location=os.path.join(self.location,
1618  'wrfout_d02'))
1619 
1620  def get_ghost(self,domain):
1621  """!Returns Product objects for the ghost domain output file
1622  for the specified domain.
1623  @param domain the domain of interest"""
1624  logger=self.log()
1625  logger.debug('get_ghost',repr(domain))
1626  if domain==self.ghost_domains[1]:
1627  if self._prod_ghost_d02 is not None:
1628  logger.debug('is domain 2',repr(self.ghost_domains[1]))
1629  return self._prod_ghost_d02
1630  else:
1631  logger.debug('no prod_ghost_d02')
1632  else:
1633  pass # print 'is not domain 2',repr(self.ghost_domains[1])
1634  if domain==self.ghost_domains[2]:
1635  if self._prod_ghost_d03 is not None:
1636  logger.debug('is domain 3',repr(self.ghost_domains[2]))
1637  return self._prod_ghost_d03
1638  else:
1639  logger.debug('no prod_ghost_d03',repr(self.ghost_domains[2]))
1640  else:
1641  logger.debug('is not domain 3',repr(self.ghost_domains[2]))
1642  logger.info('get_ghost: no ghost for domain '+str(domain))
1643 
1644  def get_wrfout(self,domain):
1645  logger=self.log()
1646  if not domain in self.domains:
1647  logger.info('Invalid domain: %s not in %s'%(
1648  str(domain), ', '.join([str(x) for x in self.omains])))
1649  return None
1650  logger.debug('get_wrfout',repr(domain))
1651  if domain==self.domains[0]:
1652  if self._prod_ens_wrfout_d01 is not None:
1653  logger.debug('is domain 1',repr(self.domains[0]))
1654  return self._prod_ens_wrfout_d01
1655  else:
1656  logger.debug('no prod_ens_wrfout_d01')
1657  else:
1658  pass # print 'is not domain 1',repr(self.ghost_domains[0])
1659  if domain==self.domains[1]:
1660  if self._prod_ens_wrfout_d02 is not None:
1661  logger.debug('is domain 2',repr(self.domains[1]))
1662  return self._prod_ens_wrfout_d02
1663  else:
1664  logger.debug('no prod_ens_wrfout_d0',repr(self.domains[1]))
1665  else:
1666  logger.debug('is not domain 2',repr(self.domains[1]))
1667  logger.info('get_wrfout: no wrfout for domain '+str(domain))
1668 
1669  def wrfinput_at_time(self,atime,domain):
1670  """!Returns a Product object for the wrfinput output file for
1671  the specified domain if the atime matches this object's
1672  self.sim.simstart()
1673  @param atime the time of interest
1674  @param domain the domain of interest"""
1675  if not domain in self.sim: return None
1676  domain=self.sim[domain]
1677  if atime is not None and \
1678  not within_dt_epsilon(atime,self.sim.simstart(),
1679  self.dt_epsilon):
1680  self.log().info(
1681  'wrfinput_at_time: atime=%s is not near my time %s'
1682  %(atime.strftime('%Y%m%d%H'),domain.strftime('%Y%m%d%H')))
1683  return None
1684  return self.get_wrfinput(domain)
1685 
1686  def wrfanl_at_time(self,atime,domain):
1687  """!Returns a Product object for the wrfanl output file for the
1688  specified domain if the atime matches this objects'
1689  self.sim.simstart().
1690  @param atime the time of interest
1691  @param domain the domain of interest"""
1692  if atime is not None and \
1693  not within_dt_epsilon(atime,self.sim.simstart(),
1694  self.dt_epsilon):
1695  self.log().info(
1696  'wrfanl_at_time: atime=%s is not near my time %s'
1697  %(atime.strftime('%Y%m%d%H'),domain.strftime('%Y%m%d%H')))
1698  return None
1699  return self.get_wrfanl(domain)
1700 
1701  def get_wrfanl(self,domain):
1702  """!Returns a Product object for the wrfanl output file for the
1703  specified domain.
1704  @param domain the domain of interest"""
1705  if domain==self.domains[1] and self._wrfanl_d02 is not None:
1706  return self._prod_wrfanl_d02
1707  if domain==self.domains[2] and self._wrfanl_d03 is not None:
1708  return self._prod_wrfanl_d03
1709 
1710  def get_wrfinput(self,domain=None):
1711  """!Returns a Product object for the wrfinput output file. If
1712  a domain is specified, and is not the correct MOAD, then None
1713  is returned.
1714  @param domain the domain of interest"""
1715  if domain is not None and domain!=self.domains[0]: return None
1716  return self._prod_wrfinput
1717 
1718  def get_storm_radius(self):
1719  """!Returns a Product for the storm radius file."""
1720  return self._prod_storm_radius
1721 
1722  def get_track(self):
1723  """!Returns a Product for the track file."""
1724  return self.parent_track
1725 
1726  def products(self,domains=None):
1727  """!Iterates over all products, or all selected products.
1728  @param domains If an iterable of domains is given, only
1729  iterates over products for those domains."""
1730  logger=self.log()
1731  if domains is None:
1732  yield self.get_storm_radius()
1733  domains=set()
1734  for d in self.domains: domains.add(d)
1735  if self._wrfghost is not None:
1736  for d in self.ghost_domains: domains.add(d)
1737  hit=set()
1738  for d in domains:
1739  # Only iterate over a product once:
1740  if not d in hit:
1741  hit.add(d)
1742  else:
1743  continue
1744  if self._ensda is None:
1745  if d==self.domains[0] and self._wrfinput is not None:
1746  yield self._prod_wrfinput
1747  if d==self.domains[1] and self._wrfanl_d02 is not None:
1748  yield self._prod_wrfanl_d02
1749  if d==self.domains[2] and self._wrfanl_d03 is not None:
1750  yield self._prod_wrfanl_d03
1751  if d==self.ghost_domains[1] and self._ghost_d02 is not None:
1752  yield self._prod_ghost_d02
1753  if d==self.ghost_domains[2] and self._ghost_d03 is not None:
1754  yield self._prod_ghost_d03
1755  else:
1756  if d==self.domains[0] and self._ensda_wrfinput_d01 is not None:
1757  yield self._prod_ens_wrfout_d01
1758  if d==self.domains[1] and self._ensda_wrfinput_d02 is not None:
1759  yield self._prod_ens_wrfout_d02
1760 
1761 
1762  def _missing_product(self,prod,basename):
1763  """!Internal function that raises an exception when a product is missing.
1764 
1765  This is an internal implementation function. It should not be
1766  called directly. This is called by deliver_products when an
1767  expected input file is missing. It either returns, or raises
1768  an exception. See deliver_products for details.
1769  @param prod the Product
1770  @param basename the basename of the missing file"""
1771  raise RelocateOutputMissing(
1772  'Mandatory output file %s is missing'%(repr(basename),))
1773 
1774  def run(self):
1775  """!Runs stage 3 of the vortex relocation."""
1776  logger=self.log()
1777  read_info=False
1778  try:
1780  with NamedDir(self.dest_dir) as dir:
1781  self.postmsg('Stage 3 running in directory: '+os.getcwd())
1782  assert(not re.match('\A/tmp',os.getcwd()))
1783 
1784  self.info.read_info(
1785  os.path.join(self.outdir,'stage1.info'),logger=logger)
1786  read_info=True
1787 
1788  if not self.info.ensda_relocate:
1789  self.info.read_info(
1790  os.path.join(self.outdir,'stage2.info'),logger=logger)
1791  read_info=True
1792 
1793  produtil.fileop.makedirs(self.dest_dir+"/logs")
1794  self.copy_fixed()
1795 
1796  if self.info.ensda_relocate:
1797  self.gfs_flag = 1
1798  if os.path.exists('storm_pert_new'):
1799  logger.info('have storm_pert_new (check 1)')
1800  self.ensda_relocate_run()
1801  else:
1802  self.gfs_flag = 6
1803  if os.path.exists('storm_pert_new'):
1804  logger.info('have storm_pert_new (check 1)')
1805  self.info.warm_cold_flag=WARM
1806  else:
1807  logger.info('do not have storm_pert_new (check 1)')
1808  self.info.warm_cold_flag=COLD
1809  logger.info('storm intensity is %04d'%(self.storm_intensity))
1810  if self.storm_intensity<20 or self.info.initopt==1:
1811  logger.info('intensity <20 or initopt=1')
1812  self.weak_cold_run()
1813  self.gfs_flag=0
1814 
1815  if os.path.exists('storm_pert_new'):
1816  logger.info('have storm_pert_new (check 2)')
1817  self.cycled_or_weak_run()
1818  else:
1819  logger.info('do not have storm_pert_new (check 2)')
1820  self.anl_bogus_10m()
1821 
1822  self.inter_2to2()
1823  self.inter_2to2_again()
1824  remove_file('flag_file')
1825  self.inter_4to6()
1826 
1827  self.update_3dvar()
1828  self.deliver_products(missing=self._missing_product)
1829  self.postmsg('Stage 3 completed in directory: '+os.getcwd())
1830  self.state=COMPLETED
1831  except Exception as e:
1832  logger.critical('Stage 3 failed: '+str(e),exc_info=True)
1833  raise
1834  finally:
1835  if read_info: self.info.write_info(os.path.join(
1836  self.outdir,'stage3.info'),logger=logger)
1837 
1838  ##@var gfs_flag
1839  # Initialization flag variable relating to parent model vortex usage.
1840 
1841  ##@var modin
1842  # Input model: GFS or GDAS
1843 
1844  def weak_cold_run(self):
1845  """!Runs the portion of the relocation that is used for weak,
1846  cold storms."""
1847  self.log().info('stage3 cold_run')
1848  #if self.storm_intensity < 20:
1849  self.gfs_flag = 0
1850  self.pert_ct_weak()
1851  self.create_atcf(2)
1852  self.create_track()
1853  #self.anl_bogus_10m()
1854  #else:
1855  # self.gfs_flag = 6
1856  # self.anl_bogus_10m()
1857 
1859  """!Runs the portion of the relocation that is run for cycled
1860  or weak storms."""
1861  self.log().info('stage3 cycled_or_weak_run')
1862  self.anl_4x(case=1)
1863  if self.gfs_flag>2 and os.path.isfile('flag_file2') and \
1864  self.modin == 'GFS':
1865  self.log().info('gfs_flag>2, have flag_file2 and modin is GFS')
1866  self.gfs_flag=0
1867  self.pert_ct_gfs()
1868  self.anl_4x(case=2)
1869  if os.path.isfile('flag_file'):
1870  self.log().info('have flag_file')
1871  self.anl_cs_10m()
1872  if os.path.isfile('flag_file2'):
1873  self.log().info('have flag_file2')
1874  self.anl_bogus_10m()
1875 
1877  """Runs relocation for ensemble member"""
1878  self.log().info('stage3 cycled_or_weak_run')
1879  self.anl_4x(case=3)
1880 
1881  def anl_4x(self, case=2):
1882  """!Runs the anl_4x programs.
1883  @param case 1 or 2: why is anl_4x being run."""
1884  self.log().info('stage3 anl_4x')
1885  self.log().info('self.fhr %s'%(self.fhr))
1886  fprog = 'hwrf_anl_4x'
1887  remove_file('flag_file')
1888  remove_file('flag_file2')
1889  evars = [ self.fhr,
1890  self.storm_basin,
1891  self.gfs_flag,
1892  self.info.initopt,
1893  ]
1894  if case==1:
1895  ins = { 11:'tcvitals.as',
1896  12:'hdas_atcfunix',
1897  14:'storm_size_p',
1898  23:'storm_sym',
1899  26:'gfs_env',
1900  46:'roughness1',
1901  71:'storm_pert_new',
1902  }
1903  if isnonempty('trak.fnl.all'):
1904  ins[30] = 'trak.fnl.all'
1905  else:
1906  ins[30] = 'trak.fnl.all_gfs_cen'
1907  elif case==2:
1908  ins = { 11:'tcvitals.as',
1909  12:'atcfunix',
1910  14:'storm_size_p',
1911  23:'storm_sym',
1912  26:'gfs_env',
1913  46:'roughness2',
1914  71:'storm_pert_new',
1915  }
1916  else:
1917  ins = { 11:'tcvitals.as',
1918  12:'ensda_atcfunix',
1919  14:'storm_size_p',
1920  23:'storm_sym',
1921  26:'wrf_env',
1922  46:'roughness1',
1923  71:'storm_pert_new',
1924  }
1925  ous = { 36:'wrf_env_new' }
1926  oous = { 56:'new_data_4x' }
1927 
1928  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous,
1929  opt_outputs=oous)
1930 
1931  deliver_file('storm_radius', 'storm_radius_1', keep=True,
1932  logger=self.log())
1933  assert(produtil.fileop.isnonempty('storm_radius'))
1934  if not os.path.exists('flag_file'):
1935  self.log().warning(
1936  'NO FLAG FILE!!! The hwrf_anl_4x program did not make '
1937  'the flag_file.')
1938 
1939  def anl_cs_10m(self):
1940  """!Runs the anl_cs_10m fortran program."""
1941  self.log().info('stage3 anl_cs_10m')
1942  fprog = 'hwrf_anl_cs'
1943  assert(self.info.iflag_cold is not None)
1944  evars = [ 6,
1945  self.storm_basin,
1946  int(self.info.iflag_cold),
1947  ]
1948  axisy_47 = '%s/hwrf_storm_cyn_axisy_47'%(self.getdir('FIXhwrf'))
1949  storm_20 = '%s/hwrf_storm_20'%(self.getdir('FIXhwrf'))
1950  ins = { 11:'tcvitals.as',
1951  23:'storm_sym',
1952  26:'wrf_env_new',
1953  85:'storm_radius',
1954  46:'roughness1',
1955  }
1956  inc = { 71:axisy_47,
1957  72:axisy_47,
1958  73:axisy_47,
1959  74:axisy_47,
1960  75:axisy_47,
1961  76:storm_20,
1962  77:storm_20,
1963  78:axisy_47,
1964  }
1965  ous = { 56:'new_data_4x' }
1966  assert(produtil.fileop.isnonempty('storm_radius'))
1967 
1968  self.run_ext(fprog, echo=evars, inputs=ins, incopies=inc, outputs=ous)
1969  if not os.path.exists('flag_file'):
1970  self.log().warning(
1971  'NO FLAG FILE!!! The hwrf_anl_cs_10m program did not make '
1972  'the flag_file.')
1973 
1974  def anl_bogus_10m(self):
1975  """!Runs the anl_bogus_10m fortran program."""
1976  self.log().info('stage3 anl_bogus_10m')
1977  fprog = 'hwrf_anl_bogus'
1978  evars = [ 6,
1979  self.storm_basin,
1980  ]
1981  axisy_47 = '%s/hwrf_storm_cyn_axisy_47'%(self.getdir('FIXhwrf'))
1982  storm_20 = '%s/hwrf_storm_20'%(self.getdir('FIXhwrf'))
1983  ins = { 11:'tcvitals.as',
1984  26:'gfs_env',
1985  36:'data_4x_gfs',
1986  46:'roughness2',
1987  61:'storm_pert_gfs',
1988  85:'storm_radius_gfs',
1989  }
1990  inc = { 71:axisy_47,
1991  72:axisy_47,
1992  73:axisy_47,
1993  74:axisy_47,
1994  75:axisy_47,
1995  76:storm_20,
1996  77:storm_20,
1997  78:axisy_47,
1998  }
1999  ous = { 56:'new_data_4x' }
2000 
2001  self.run_ext(fprog, echo=evars, inputs=ins, incopies=inc, outputs=ous)
2002 
2003  def pert_ct_weak(self):
2004  """!Runs hwrf_pert_ct for the weak storm case."""
2005  self.log().info('stage3 pert_ct_weak')
2006  logger=self.log()
2007  def cp(a,b):
2008  if os.path.exists(a):
2009  deliver_file(a,b,keep=True,logger=logger)
2010  else:
2011  logger.warning(
2012  '%s: does not exist; will not copy to %s'%(a,b))
2013 
2014  cp('storm_pert_gfs','storm_pert')
2015  cp('storm_radius_gfs','storm_radius')
2016  cp('atcfunix','hdas_atcfunix')
2017  cp('roughness2','roughness1')
2018 
2019  fprog = 'hwrf_pert_ct'
2020  evars = [ 6,
2021  self.storm_basin,
2022  0,
2023  ]
2024  ins = { 11:'tcvitals.as',
2025  12:'atcfunix',
2026  26:'gfs_env',
2027  46:'roughness1',
2028  65:'storm_radius',
2029  71:'storm_pert',
2030  }
2031  ous = { 14:'storm_size_p',
2032  23:'storm_sym',
2033  58:'storm_pert_new',
2034  }
2035 
2036  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2037  assert(produtil.fileop.isnonempty('storm_radius'))
2038 
2039  def pert_ct_gfs(self):
2040  """!Runs hwrf_pert_ct for the gfs vortex case."""
2041  self.log().info('stage3 pert_ct_gfs')
2042  logger=self.log()
2043  def cp(a,b):
2044  if os.path.exists(a):
2045  deliver_file(a,b,keep=True,logger=logger)
2046  else:
2047  logger.warning(
2048  '%s: does not exist; will not copy to %s'%(a,b))
2049 
2050  remove_file('flag_file',logger=logger)
2051  remove_file('storm_pert_new',logger=logger)
2052  remove_file('flag_file2',logger=logger)
2053 
2054  cp('storm_pert_gfs','storm_pert')
2055  cp('storm_radius_gfs','storm_radius')
2056  cp('atcfunix','hdas_atcfunix')
2057  cp('roughness2','roughness1')
2058 
2059  fprog = 'hwrf_pert_ct'
2060  evars = [ 6,
2061  self.storm_basin,
2062  0,
2063  ]
2064  ins = { 11:'tcvitals.as',
2065  12:'atcfunix',
2066  26:'gfs_env',
2067  46:'roughness1',
2068  65:'storm_radius',
2069  71:'storm_pert',
2070  }
2071  ous = {
2072  23:'storm_sym',
2073  58:'storm_pert_new',
2074  }
2075 
2076  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2077  assert(produtil.fileop.isnonempty('storm_radius'))
2078 
2079  def inter_2to2(self):
2080  """!Runs the hwrf_inter_2to2 program."""
2081  self.log().info('stage3 iter_2to2')
2082  fprog = 'hwrf_inter_2to2'
2083  evars = [ 6,
2084  1,
2085  ]
2086  ins = { 11:'tcvitals.as',
2087  26:'new_data_4x',
2088  46:'new_gfs_d01',
2089  }
2090  if self._gsi_d02 is not None:
2091  ins[36] = 'new_ght_d02'
2092  ous = { 56:'data_merge_g02' }
2093  else:
2094  self.log().info('gsi_d02 not run, interpolate to d02')
2095  ins[36] = 'new_gfs_d02'
2096  ous = { 56:'data_merge_d02' }
2097 
2098  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2099 
2100  def inter_2to2_again(self):
2101  """!Runs the hwrf_inter_2to2 program again."""
2102  self.log().info('stage3 inter_2to2again')
2103  fprog = 'hwrf_inter_2to2'
2104  evars = [ 6,
2105  1,
2106  ]
2107  ins = { 11:'tcvitals.as',
2108  26:'new_data_4x',
2109  46:'new_gfs_d01',
2110  }
2111  if self._gsi_d03 is not None:
2112  self.log().info('stage3 inter_2to2')
2113  ins[36] = 'new_ght_d03'
2114  ous = { 56:'data_merge_g03' }
2115  else:
2116  self.log().info('gsi_d03 not run, interpolate to d03')
2117  ins[36] = 'new_gfs_d03'
2118  ous = { 56:'data_merge_d03' }
2119 
2120  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2121 
2122  def inter_4to6(self):
2123  """!Runs the hwrf_inter_4to6 program."""
2124  self.log().info('stage3 4to6')
2125  fprog='hwrf_inter_4to6'
2126  evars = [ 6,
2127  self.storm_basin,
2128  ]
2129  ins = { 11:'tcvitals.as',
2130  26:'new_gfs_d01',
2131  36:'new_data_4x',
2132  46:'new_gfs_d01',
2133  85:'storm_radius_gfs',
2134  }
2135  ous = { 56:'data_merge_d01' }
2136  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2137  deliver_file('storm_radius_gfs','storm_radius',
2138  keep=True,logger=self.log())
2139 
2140  def update_3dvar(self):
2141  """!Runs the hwrf_diffwrf_3dvar to update the output files."""
2142  self.log().info('stage3 update_3dvar')
2143  logger=self.log()
2144  fprog = 'hwrf_3dvar'
2145  prog = self.getexe(fprog)
2146 
2147  for d in self.domains:
2148  self.log().info('stage3 update_3dvar domain='+str(d))
2149  id = d.get_grid_id()
2150 
2151  if not self.info.ensda_relocate:
2152  ifile = 'data_merge_d%02d' %id
2153  ofile = 'wrfinput_d%02d' %id
2154 
2155  gsi = '_gsi_d%02d'%id
2156  if getattr(self, gsi, None) is not None:
2157  ifile = 'data_merge_g%02d' %id
2158  ofile = 'wrfghost_d%02d' %id
2159  else:
2160  if d.is_moad():
2161  continue
2162  ifile = 'new_data_4x'
2163  ofile = 'wrfout_d02'
2164 
2165  log = '%s/logs/%s_%s_d%02d.log' %(
2166  self.dest_dir, self.__class__.__name__, fprog, id)
2167  cmd = produtil.run.exe(prog)['3dvar_update', ofile, ifile]
2168  if self.redirect: cmd = cmd >= log
2169  produtil.run.checkrun(cmd,logger=logger)
2170 
2171  def create_track(self):
2172  """!Runs the create_trak_fnl program."""
2173  logger=self.log()
2174  logger.info('stage3 create_track')
2175  fprog = 'hwrf_create_trak_fnl'
2176  evars = [ self.storm_id,
2177  self.sim.simstart().strftime("%Y"),
2178  self.storm_basin,
2179  ]
2180  ins = { 11:'tcvitals.as',
2181  12:'atcfunix_cen',
2182  }
2183  ous = { 30:'trak.fnl.all_gfs_cen' }
2184 
2185  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2186 
2187 ########################################################################
2188 
2190  """!This represents all three stages of the relocate. The
2191  individual stages may be accessed by rstage1, rstage2 and rstage3.
2192  The RelocationInfo object that is shared between them can be
2193  accessed by the "info" member variable"""
2194  def __init__(self,dstore,conf,section,sim,domains,
2195  taskname_pattern=None,**kwargs):
2196  """!Relocation constructor. Creates Stage1, Stage2 and Stage3
2197 
2198  @param dstore the produtil.datastore.Datastore for database storage
2199  @param conf the hwrf.config.HWRFConfig for configuration info
2200  @param section the configuration section to use
2201  @param sim the hwrf.wrf.WRFSimulation describing the simulation being relocated
2202  @param domains the hwrf.wrf.WRFDomains being relocated
2203  @param taskname_pattern Pattern for generating the subtask tasknames,
2204  which is passed through make_taskname()
2205  @param kwargs passed to hwrf.hwrftask.HWRFTask.__init__()
2206  """
2207  assert('info' not in kwargs)
2209 
2210  tn=self.make_taskname(taskname_pattern,1)
2211  self.rstage1=Stage1(dstore,conf,section,sim,domains,taskname=tn,
2212  info=self.info,**kwargs)
2213 
2214  tn=self.make_taskname(taskname_pattern,2)
2215  self.rstage2=Stage2(dstore,conf,section,sim,domains,taskname=tn,
2216  info=self.info,**kwargs)
2217 
2218  tn=self.make_taskname(taskname_pattern,3)
2219  self.rstage3=Stage3(dstore,conf,section,sim,domains,taskname=tn,
2220  info=self.info,**kwargs)
2221 
2222  ##@var info
2223  # The RelocationInfo with relocation information to trade between
2224  # stages
2225 
2226  ##@var rstage1
2227  # Stage1 of the relocation
2228 
2229  ##@var rstage2
2230  # Stage2 of the relocaiton
2231 
2232  ##@var rstage3
2233  # Stage3 of the relocation
2234 
2235  def make_taskname(self,taskname_pattern,istage):
2236  """!Creates the task name for relocation stage istage based on
2237  the pattern taskname_pattern.
2238 
2239  @param taskname_pattern The string format for the taskname,
2240  which must contain exactly one %d.
2241  @param istage the integer 1, 2 or 3 to substitute into taskname_pattern"""
2242  istage=int(istage)
2243  taskname_pattern=str(taskname_pattern)
2244  return taskname_pattern % (istage)
2245 
2246 ########################################################################
2248  """!This is a HWRF task that merges the WRF analysis files."""
2249 
2250  def __init__(self,dstore,conf,section,relocate,wrfinput,wrfanl,
2251  taskname=None,gsi_d01=None,gsi_d02=None,gsi_d03=None,
2252  ges_d02=None,ges_d03=None,**kwargs):
2253  """!Merge constructor
2254  @param dstore the produtil.datastore.Datastore for database storage
2255  @param conf the hwrf.config.HWRFConfig for configuration info
2256  @param section the configuration section to use
2257  @param relocate the Stage3 of the middle FGAT time relocation
2258  @param wrfinput The source of parent model data wrfinput files.
2259  @param wrfanl The source of parent model data wrfanl files.
2260  @param taskname the task name in the database
2261  @param gsi_d01,gsi_d02,gsi_d03 hwrf.gsi.FGATGSI classes for the
2262  GSI
2263  @param ges_d02,ges_d03 Ghost files for the first guess to GSI.
2264  @param kwargs passed to hwrf.hwrftask.HWRFTask.__init__ """
2265  domains=relocate.domains
2266  ghost_domains=relocate.ghost_domains
2267  assert(ghost_domains is not None)
2268  assert(domains is not None)
2269  self._gsi_d01_input=None if(gsi_d01 is None) else \
2270  gsi_d01.get_wrfinput()
2271  self._gsi_d02_ghost=None if(gsi_d02 is None) else \
2272  gsi_d02.get_ghost(ghost_domains[1])
2273  self._gsi_d03_ghost=None if(gsi_d03 is None) else \
2274  gsi_d03.get_ghost(ghost_domains[2])
2275  self.set_ges(ges_d02,ges_d03)
2276  assert(self._gsi_d02_ghost is not None or self._gsi_d03_ghost is not None)
2277  if relocate is not None:
2278  super(Merge,self).__init__(
2279  dstore,conf,section,relocate.sim,relocate.domains,taskname,
2280  modin=relocate._modin,wrfanl=wrfanl,
2281  wrfghost=relocate._wrfghost,wrfinput=wrfinput,
2282  ghost_domains=relocate.ghost_domains,
2283  gsi_d01=gsi_d01,gsi_d02=gsi_d02,gsi_d03=gsi_d03,**kwargs)
2284  self._input_storm_radius=relocate.get_storm_radius()
2285  else:
2286  super(Merge,self).__init__(dstore,conf,section,kwargs['sim'],
2287  kwargs['domains'],taskname,**kwargs)
2288  self._input_storm_radius=None
2289  with dstore.transaction() as t:
2290  self._prod_wrfinput=FileProduct(dstore,'wrfinput_d01',
2291  self.taskname,location=os.path.join(self.location,
2292  'wrfinput_d01'))
2293  self._prod_wrfanl_d02=FileProduct(dstore,'wrfinput_d02',
2294  self.taskname,location=os.path.join(self.location,
2295  'wrfinput_d02'))
2296  self._prod_wrfanl_d03=FileProduct(dstore,'wrfinput_d03',
2297  self.taskname,location=os.path.join(self.location,
2298  'wrfinput_d03'))
2299  def set_ges(self,ges_d02,ges_d03):
2300  """!Sets the ges_d02 and ges_d03 first guess ghost file sources."""
2301  if (ges_d02 is None) != (ges_d03 is None):
2303  "You must specify both d02 and d03 first guess files "
2304  "OR neither. You cannot specify only one of the files.")
2305  self._ges_d02=ges_d02
2306  self._ges_d03=ges_d03
2307  def get_wrfinput(self,domain):
2308  """!Returns the wrfinput output product for the specified domain
2309  or None if no such data is available
2310  @param domain the domain of interest"""
2311  if domain is not None and domain!=self.domains[0]: return None
2312  return self._prod_wrfinput
2313 
2314  def wrfanl_at_time(self,atime,domain):
2315  """!Returns the wrfanl output product for the specified domain
2316  and time or None if no such data is available.
2317  @param atime the time of interest
2318  @param domain the domain of interest"""
2319  return self.get_wrfanl(domain)
2320 
2321  def get_wrfanl(self,domain):
2322  """!Returns the wrfanl product for the specified domain or None
2323  if no such data is available.
2324  @param domain the domain of interest"""
2325  if domain==self.domains[0]:
2326  self.log.error(
2327  'Requested domain %s, which is the moad. The MOAD has '
2328  'no wrfanl file.'%(repr(domain),))
2329  if domain==self.domains[1]: return self._prod_wrfanl_d02
2330  if domain==self.domains[2]: return self._prod_wrfanl_d03
2331  self.log.error('ERROR: requested domain %s, which is not in '
2332  'self.domains=%s'%(repr(domain),repr(self.domains)))
2334  """!If no relocate was given, gets the storm radius file from a
2335  fix file. Also checks to see if the storm_radius file is
2336  present and non-empty, regardless of whether it came from the
2337  fix or relocate."""
2338  logger=self.log()
2339  if self._input_storm_radius is None:
2340  storm_radius=os.path.join(self.getdir('FIXhwrf'),
2341  'hwrf_storm_radius')
2342  logger.warning(
2343  'Could not get storm_radius from the relocate jobs.')
2344  logger.warning(
2345  'Will use the fix file $FIXhwrf/hwrf_storm_radius instead.')
2346  make_symlink(storm_radius,'storm_radius',force=True,logger=logger)
2347  if not isnonempty('storm_radius'):
2348  msg='storm_radius file is missing'
2349  logger.error(msg)
2350  raise StormRadiusError(msg)
2351 
2352  def blend_gsi(self):
2353  """!Runs the hwrf_blend_gsi program if first guess data was
2354  supplied to the constructor."""
2355  logger=self.log()
2356  if self._ges_d02 is None or self._ges_d03 is None:
2357  logger.warning("First guess not supplied to Merge.__init__. "
2358  "Disabling hwrf_blend_gsi.")
2359  return False
2360  elif self._gsi_d03_ghost is not None:
2361  gsidomain=[ 3 ]
2362  else:
2363  gsidomain=[ 2 ]
2364 
2365  prog = self.getexe('hwrf_3dvar')
2366 
2367  for d in gsidomain:
2368  diffme=[ ['gsiges_d0%d'%d,'new_ges_d0%d'%d],
2369  ['wrfghost_d0%d'%d,'anl_ght_d0%d'%d] ]
2370  for infile,outfile in diffme:
2371  log = '%s/logs/%s_%s_blend_gsi_diff.log' %(
2372  self.dest_dir, self.__class__.__name__, outfile)
2373  cmd = produtil.run.exe(prog)['storm_relocate', infile,
2374  'flnm3', outfile]
2375  if self.redirect: cmd = cmd >= log
2376  produtil.run.checkrun(cmd,logger=logger)
2377  self.run_ext('hwrf_blend_gsi',[6,self.storm_basin],
2378  inputs={11:'tcvitals.as',
2379  26:'anl_ght_d0%d'%d,
2380  36:'new_ges_d0%d'%d},
2381  outputs={56:'new_ght_d0%d'%d})
2382  log = '%s/logs/%s_blend_gsi_update_d0%d.log' %(
2383  self.dest_dir, self.__class__.__name__,d)
2384  infile='wrfghost_d0%d'%d
2385  outfile='new_ght_d0%d'%d
2386  cmd = produtil.run.exe(prog)['3dvar_update', infile,outfile]
2387  if self.redirect: cmd = cmd >= log
2388  produtil.run.checkrun(cmd,logger=logger)
2389  return True
2390 
2391  def products(self,domains=None):
2392  """!Iterates over output products
2393  @param domains if present, only the products for these listed domains will
2394  be iterated."""
2395  if domains is None: domains=self.domains
2396  for d in domains:
2397  if d==self.domains[0]: yield self._prod_wrfinput
2398  if d==self.domains[1]: yield self._prod_wrfanl_d02
2399  if d==self.domains[2]: yield self._prod_wrfanl_d03
2400 
2401  def run(self):
2402  """!Runs the merge."""
2403  logger=self.log()
2404  try:
2405  if os.path.exists(self.dest_dir):
2406  shutil.rmtree(self.dest_dir)
2408  with NamedDir(self.dest_dir,keep=not self.scrub,
2409  logger=self.log()) as dir:
2410  self.postmsg('Merge running in directory: '+os.getcwd())
2411  assert(not re.match('\A/tmp',os.getcwd()))
2412 
2413  produtil.fileop.makedirs(self.dest_dir+"/logs")
2414 
2415  self.copy_inputs()
2416  if self.conf.getbool('config','blend_innercore'):
2417  blended=self.blend_gsi()
2418  if blended:
2419  self.postmsg('Ran GSI blending.')
2420  else:
2421  self.postmsg('Skipped GSI blending.')
2422  self.check_storm_radius()
2423  self.relocate_storm()
2424  if self._gsi_d03 is not None:
2425  self.inter_2to1(3)
2426  produtil.fileop.remove_file('flag_file',logger=logger)
2427  if self._gsi_d02 is not None:
2428  if self._gsi_d03 is not None:
2429  self.inter_3to2()
2430  else:
2431  self.inter_2to3()
2432  self.inter_2to1ges(2)
2433  self.inter_2to1(2)
2434  else:
2435  self.inter_2to2()
2436  if self._gsi_d02 is not None or self._gsi_d03 is not None:
2437  self.inter_2to6()
2438  else:
2439  logger.warning('Not running inter_2to6 because GSI is '
2440  'disabled for domains 2 & 3')
2441 
2442  self.update_3dvar()
2443  self.deliver_products()
2444  self.postmsg('Merge running in directory: '+os.getcwd())
2445  except Exception as e:
2446  logger.critical('Merge failed: '+str(e),exc_info=True)
2447  raise
2448 
2449  def _make_plist_and_names(self):
2450  """!Internal function to generate input product lists and names.
2451 
2452  This is an internal implementation function that should not be
2453  called directly. It returns a three-element tuple containing
2454  a list of products, and a dict mapping from product to the
2455  local filename, and a dict mapping from product to the copy
2456  method. This is used to implement copy_inputs, to copy input
2457  files to the local directory from remote tasks' Products.
2458 
2459  This overrides the superclass _make_plist_and_names to add the
2460  guess and wrfghost products."""
2461  logger=self.log()
2462  def copier(p,name,logger,*args):
2463  deliver_file(p.location,name,logger=logger,keep=True)
2464  def linker(p,name,logger,*args):
2465  make_symlink(p.location,name,force=True,logger=logger)
2466  names=dict()
2467 
2468  names[self._wrfinput]='wrfinput_d01'
2469  names[self._wrfanl_d02]='wrfinput_d02'
2470  names[self._wrfanl_d03]='wrfinput_d03'
2471  if self._input_storm_radius is not None:
2472  names[self._input_storm_radius]='storm_radius'
2473 
2474  if self._ges_d02 is not None:
2475  names[self._ges_d02]='gsiges_d02'
2476 
2477  if self._ges_d03 is not None:
2478  names[self._ges_d03]='gsiges_d03'
2479 
2480  if self._gsi_d02_ghost is not None:
2481  # Use gsi output
2482  names[self._gsi_d02_ghost]='wrfghost_d02'
2483  else:
2484  # Use original ghost
2485  assert(False)
2486  names[self._ghost_d02]='wrfghost_d02'
2487 
2488  if self._gsi_d03_ghost is not None:
2489  # Use gsi output
2490  names[self._gsi_d03_ghost]='wrfghost_d03'
2491 
2492  plist=[ k for k in names.iterkeys() ]
2493  actions=dict( (n,copier) for n in names.iterkeys() )
2494  return ( plist, names, actions )
2495 
2496  def relocate_storm(self):
2497  """!Runs the hwrf_diffwrf_3dvar for all domains."""
2498  logger=self.log()
2499  logger.info('relocate storm')
2500  fprog = 'hwrf_3dvar'
2501  icom_dir = self.conf.getdir('intercom')
2502  prog = self.getexe('hwrf_3dvar')
2503 
2504  for id in range(2, 4):
2505 
2506  fin = "gsiges_d%02d" %id
2507  fou = "new_ges_d%02d" %id
2508 
2509  if not isnonempty(fou):
2510  log = '%s/logs/%s_%s_gsiges_d%02d.log' %(
2511  self.dest_dir, self.__class__.__name__, fprog, id)
2512  cmd = produtil.run.exe(prog)['storm_relocate', fin, 'flnm3', fou]
2513  if self.redirect: cmd = cmd >= log
2514  produtil.run.checkrun(cmd,logger=logger)
2515 
2516  if os.path.isfile('fort.50'):
2517  os.remove('fort.50')
2518 
2519  if os.path.isfile('fort.73'):
2520  os.remove('fort.73')
2521 
2522  for d in self.domains:
2523  id = d.get_grid_id()
2524 
2525  fin = "wrfinput_d%02d" %id
2526  fou = "new_gfs_d%02d" %id
2527  if id == 1:
2528  fou = 'new_hdas_d01'
2529 
2530  log = '%s/logs/%s_%s_d%02d.log' %(
2531  self.dest_dir, self.__class__.__name__, fprog, id)
2532  cmd = produtil.run.exe(prog)['storm_relocate', fin, 'flnm3', fou]
2533  if self.redirect: cmd = cmd >= log
2534  produtil.run.checkrun(cmd,logger=logger)
2535 
2536  if os.path.isfile('fort.50'):
2537  os.remove('fort.50')
2538 
2539  if os.path.isfile('fort.73'):
2540  os.remove('fort.73')
2541 
2542  for id in range(2, 4):
2543 
2544  fin = "wrfghost_d%02d" %id
2545  fou = "new_ght_d%02d" %id
2546 
2547  log = '%s/logs/%s_%s_ghost_d%02d.log' %(
2548  self.dest_dir, self.__class__.__name__, fprog, id)
2549  cmd = produtil.run.exe(prog)['storm_relocate', fin, 'flnm3', fou]
2550  if self.redirect: cmd = cmd >= log
2551  produtil.run.checkrun(cmd,logger=logger)
2552 
2553  if os.path.isfile('fort.50'):
2554  os.remove('fort.50')
2555 
2556  if os.path.isfile('fort.73'):
2557  os.remove('fort.73')
2558 
2559  def inter_2to1(self, domain):
2560  """!Runs the hwrf_inter_2to1 Fortran program to interpolate fields."""
2561  self.log().info('inter_2to1')
2562  fprog = 'hwrf_inter_2to1'
2563  evars = [ 6,
2564  self.storm_basin,
2565  ]
2566  ins = { 26:'new_ght_d%02d' %domain,
2567  36:'new_gfs_d%02d' %domain,
2568  }
2569  ous = { 56:'data_merge_d%02d' %domain }
2570  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2571 
2572  def inter_2to1ges(self,domain):
2573  """!Runs the hwrf_inter_2to1 Fortran program to interpolate fields."""
2574  self.log().info('inter_2to1ges')
2575  logger=self.log()
2576  fprog = 'hwrf_inter_2to1'
2577  evars = [ 6,
2578  self.storm_basin,
2579  ]
2580  ins = { 26:'new_ges_d%02d' %domain,
2581  36:'new_gfs_d%02d' %domain,
2582  }
2583  ous = { 56:'ges_merge_d%02d' %domain }
2584  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2585  deliver_file('wrfinput_d02', 'wrfges_d02', keep=True)
2586 
2587  fprog = 'hwrf_3dvar'
2588  prog = self.getexe(fprog)
2589 
2590  ifile = 'ges_merge_d02'
2591  ofile = 'wrfges_d02'
2592  cmd = produtil.run.exe(prog)['3dvar_update', ofile, ifile]
2593  if self.redirect: cmd = cmd >= log
2594  produtil.run.checkrun(cmd,logger=logger)
2595 
2596  def inter_2to2(self):
2597  """!Runs the hwrf_inter_2to2 Fortran program to interpolate fields."""
2598  self.log().info('inter_2to2')
2599  fprog = 'hwrf_inter_2to2'
2600  evars = [ 6,
2601  1,
2602  ]
2603  ins = { 26:'new_ght_d03',
2604  36:'new_gfs_d02',
2605  46:'new_hdas_d01',
2606  }
2607  ous = { 56:'data_merge_d02' }
2608  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2609 
2610  def inter_2to3(self):
2611  """!Interpolates gsi_d02 analysis increment to d03 and
2612  adds the increment to d03 first guess"""
2613  self.log().info('inter_2to3')
2614  logger=self.log()
2615 
2616  fprog = 'hwrf_inter_2to2'
2617  evars = [ 6,
2618  2,
2619  ]
2620  ins = { 21:'new_ges_d02',
2621  26:'new_ght_d02',
2622  36:'new_ges_d03',
2623  46:'new_hdas_d01',
2624  }
2625  ous = { 56:'data_merge_d03' }
2626  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2627 
2628  def inter_2to6(self):
2629  """!Runs the hwrf_inter_2to6 Fortran program to interpolate fields."""
2630  self.log().info('inter_2to6')
2631  fprog = 'hwrf_inter_2to6'
2632  evars = [ 6,
2633  1,
2634  1,
2635  ]
2636  ins = { 26:'new_gfs_d02',
2637  46:'new_hdas_d01',
2638  85:'storm_radius',
2639  }
2640  ous = { 56:'data_merge_d01' }
2641  if self._gsi_d02 is not None:
2642  ins[36] = 'new_ght_d02'
2643  else:
2644  ins[36] = 'new_ght_d03'
2645  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2646 
2647  def inter_3to2(self):
2648  """!Runs the hwrf_inter_3to2 Fortran program to interpolate fields."""
2649  self.log().info('inter_3to2')
2650  fprog = 'hwrf_inter_2to6'
2651  evars = [ 6,
2652  1,
2653  2,
2654  ]
2655  ins = { 26:'new_gfs_d03',
2656  36:'new_ght_d03',
2657  46:'new_ght_d02',
2658  85:'storm_radius',
2659  }
2660  ous = { 56:'data_merge_g02' }
2661  self.run_ext(fprog, echo=evars, inputs=ins, outputs=ous)
2662  deliver_file('data_merge_g02', 'new_ght_d02', keep=True)
2663  deliver_file('wrfghost_d02', 'newghost_d02', keep=True)
2664 
2665  fprog = 'hwrf_3dvar'
2666  prog = self.getexe(fprog)
2667 
2668  logger=self.log()
2669  ifile = 'new_ght_d02'
2670  ofile = 'newghost_d02'
2671  cmd = produtil.run.exe(prog)['3dvar_update', ofile, ifile]
2672  if self.redirect: cmd = cmd >= log
2673  produtil.run.checkrun(cmd,logger=logger)
2674 
2675  def update_3dvar(self):
2676  """!Runs the hwrf_diffwrf_3dvar program to update the output
2677  domains."""
2678  self.log().info('update_3dvar')
2679  logger=self.log()
2680  fprog = 'hwrf_3dvar'
2681  prog = self.getexe(fprog)
2682 
2683  for d in self.domains:
2684  if d==self.domains[0] and self._gsi_d02 is None \
2685  and self._gsi_d03 is None:
2686  logger.warning(
2687  'Not updating MOAD: GSI is disabled for domains 2 & 3')
2688  continue
2689  id = d.get_grid_id()
2690 
2691  ifile = 'data_merge_d%02d' %id
2692  ofile = 'wrfinput_d%02d' %id
2693 
2694  log = '%s/logs/%s_%s_d%02d.log' %(
2695  self.dest_dir, self.__class__.__name__, fprog, id)
2696  cmd = produtil.run.exe(prog)['3dvar_update', ofile, ifile]
2697  if self.redirect: cmd = cmd >= log
2698  produtil.run.checkrun(cmd,logger=logger)
2699 
def __init__
Creates a new RelocationInfo object by reading in the specified *.info file.
Definition: relocate.py:89
Change directory, handle temporary directories.
Definition: cd.py:1
This module provides a set of utility functions to do filesystem operations.
Definition: fileop.py:1
def netcdfver(filename)
What is the NetCDF version of this file?
Definition: fileop.py:177
def create_track(self)
Runs the fortran create_trak_fnl program.
Definition: relocate.py:1508
def anl_bogus_10m(self)
Runs the anl_bogus_10m fortran program.
Definition: relocate.py:1974
def confstrinterp(self, string, section=None, kwargs)
Alias for self.icstr for backward compatibility.
Definition: hwrftask.py:319
warm_cold_flag
The constants WARM, COLD or None to indicate warm vs.
Definition: relocate.py:93
def inter_2to2(self)
Runs the hwrf_inter_2to2 program.
Definition: relocate.py:2079
from_file
The file that was read in.
Definition: relocate.py:100
def write_vitals
Writes the tcvitals (from self.storminfo) to the specified file.
Definition: relocate.py:529
def __str__(self)
A Pythonic string representation of this object.
Definition: relocate.py:122
def pert_ct_weak(self)
Runs hwrf_pert_ct for the weak storm case.
Definition: relocate.py:2003
info
A RelocationInfo object to trade relocation information with other stages of the relocation.
Definition: relocate.py:249
def run(self)
Runs the stage 1 of the relocation.
Definition: relocate.py:934
Raised when required inputs to the relocation are missing.
Definition: exceptions.py:429
def getexe
Alias for hwrf.config.HWRFConfig.get() for the "exe" section.
Definition: hwrftask.py:403
def inter_2to2(self)
Runs the hwrf_inter_2to2 Fortran program to interpolate fields.
Definition: relocate.py:2596
def storm_id(self)
The storm ID.
Definition: relocate.py:624
def get_wrfinput
Returns the wrfinput output Product for the specified domain, or None if no such domain is known...
Definition: relocate.py:378
def guess_track(self)
Runs the fortran hwrf_trk_guess program.
Definition: relocate.py:1316
def redirect(self)
Should subprograms' outputs be redirected to separate files?
Definition: hwrftask.py:190
Handles file locking using Python "with" blocks.
Definition: locking.py:1
taskname
Read-only property: the name of this task.
Definition: datastore.py:1134
def write_info
Writes this object's relocation information to the specified *.info file.
Definition: relocate.py:181
A subclass of Product that represents file delivery.
Definition: datastore.py:856
The base class of tasks run by the HWRF system.
Definition: hwrftask.py:25
def copy_namelist(self)
Copy the namelist files from the preceding steps.
Definition: relocate.py:1052
def wait_for_products
Waits for products to be available and performs an action on them.
Definition: datastore.py:979
def blend_gsi(self)
Runs the hwrf_blend_gsi program if first guess data was supplied to the constructor.
Definition: relocate.py:2352
cold_ok
Set to True if the relocation intentionally vetoes warm starting.
Definition: relocate.py:1309
Raised when the relocation could not find the prior cycle's 6hr forecast, but it expected to be able ...
Definition: exceptions.py:436
def set_ges(self, ges_d02, ges_d03)
Sets the ges_d02 and ges_d03 first guess ghost file sources.
Definition: relocate.py:2299
def remove_file
Deletes the specified file.
Definition: fileop.py:251
This is a HWRF task that encapsulates stage 3 of the vortex relocation which relocates and pastes the...
Definition: relocate.py:1583
def products(self)
Iterates over all products generated by this task.
Definition: relocate.py:495
def get_wrfinput(self, domain)
Returns the wrfinput output product for the specified domain or None if no such data is available...
Definition: relocate.py:2307
def make_taskname(self, taskname_pattern, istage)
Creates the task name for relocation stage istage based on the pattern taskname_pattern.
Definition: relocate.py:2235
def copy_inputs(self)
Copies, or makes, one or more input files.
Definition: relocate.py:674
def merge_nest(self)
Runs the fortran merge_nest program.
Definition: relocate.py:1168
def __init__(self, dstore, conf, section, sim, domains, taskname=None, kwargs)
Stage1 constructor.
Definition: relocate.py:926
def ensda_relocate_run(self)
Definition: relocate.py:1876
def products
Iterates over all products, or all selected products.
Definition: relocate.py:1726
This is a HWRF task that encapsulates stage 2 of the vortex relocation which removes the parent model...
Definition: relocate.py:1398
def make_ghost_namelist
Writes the ghost namelist to namelist_ghost.input.
Definition: relocate.py:540
def check_atcf_hours(self)
Checks to see if all FGAT hours have tracks of a required minimum length.
Definition: relocate.py:1255
def fortcopy(forts, basedir=None, logger=None, only_log_errors=False, kwargs)
A convenience function for copying files to local fort.N files for various integers N using deliver_f...
Definition: fileop.py:868
def inter_4to6(self)
Runs the hwrf_inter_4to6 program.
Definition: relocate.py:2122
def checkrun(arg, logger=None, kwargs)
This is a simple wrapper round run that raises ExitStatusException if the program exit status is non-...
Definition: run.py:398
rstage2
Stage2 of the relocaiton.
Definition: relocate.py:2215
def relocate_storm(self)
Runs the hwrf_diffwrf_3dvar program on all inputs to create binary file for input to the Fortran prog...
Definition: relocate.py:1445
rstage1
Stage1 of the relocation.
Definition: relocate.py:2211
def set_centrack(self, centrack)
Sets the Product for the center FGAT time track file.
Definition: relocate.py:704
def read_info
Reads the relocation information into this object from the specified filename.
Definition: relocate.py:154
def get_wrfanl(self, domain)
Returns the wrfanl output Product for this Task for the specified domain or None if no such product e...
Definition: relocate.py:411
sim
The hwrf.wrf.WRFSimulation describing the WRF simulation.
Definition: relocate.py:278
def relocate_storm(self)
Runs the hwrf_diffwrf_3dvar for all domains.
Definition: relocate.py:2496
def create_nest(self)
Runs the fortran hwrf_create_nest program.
Definition: relocate.py:1489
def merge_nests(self)
Runs the fortran merge_nest program.
Definition: relocate.py:1522
def openmp
Sets the number of OpenMP threads for the specified program.
Definition: run.py:415
def wrfanl_at_time(self, atime, domain)
Returns a Product object for the wrfanl output file for the specified domain if the atime matches thi...
Definition: relocate.py:1686
def inter_2to6(self)
Runs the hwrf_inter_2to6 Fortran program to interpolate fields.
Definition: relocate.py:2628
def confbool
Alias for self.conf.getbool for section self.section.
Definition: hwrftask.py:287
Raised when an impossible configuration is requested.
Definition: exceptions.py:431
def warm_cold_str(self)
This is the opposite of the make_warm_cold routine: it returns "COLD", "WARM", or "None" for the cons...
Definition: relocate.py:147
def run(self)
Runs stage 3 of the vortex relocation.
Definition: relocate.py:1774
def delete_temp(self)
Deletes all temporary files created by the relocation jobs.
Definition: relocate.py:464
def run_ext
Helper function for running Fortran programs that need fort.
Definition: relocate.py:778
def _missing_product(self, prod, basename)
Internal function that raises an exception when a product is missing.
Definition: relocate.py:1762
Base class of tasks run by HWRF.
Definition: hwrftask.py:1
A shell-like syntax for running serial, MPI and OpenMP programs.
Definition: run.py:1
This is a HWRF task that forms the base class for all vortex relocation tasks, including the Merge...
Definition: relocate.py:207
def inter_2to1(self, domain)
Runs the hwrf_inter_2to1 Fortran program to interpolate fields.
Definition: relocate.py:2559
def copy_hdas(self)
Get the previous cycle's HDAS track.
Definition: relocate.py:1198
def inter_2to2_again(self)
Runs the hwrf_inter_2to2 program again.
Definition: relocate.py:2100
def wrfanl_at_time(self, atime, domain)
Returns the wrfanl output product for the specified domain and time or None if no such data is availa...
Definition: relocate.py:2314
def getdir
Alias for hwrf.config.HWRFConfig.get() for the "dir" section.
Definition: hwrftask.py:396
ghost_domains
The list of ghost domains passed to the constructor.
Definition: relocate.py:317
initopt
Initialization flag variable for the relocation.
Definition: relocate.py:95
def __init__(self, dstore, conf, section, sim, domains, taskname=None, kwargs)
Stage2 constructor.
Definition: relocate.py:1402
outdir
The directory in which this task should deliver its final output.
Definition: hwrftask.py:176
def update_3dvar(self)
Runs the hwrf_diffwrf_3dvar to update the output files.
Definition: relocate.py:2140
domains
The list of domains from sim that match the domains with the same name provided to the constructor...
Definition: relocate.py:280
def isnonempty(filename)
Returns True if the filename refers to an existent file that is non-empty, and False otherwise...
Definition: fileop.py:333
def get_track(self)
Returns a Product for the track file.
Definition: relocate.py:1722
def update_3dvar(self)
Runs the hwrf_diffwrf_3dvar program to update the output domains.
Definition: relocate.py:2675
Stores products and tasks in an sqlite3 database file.
Definition: datastore.py:1
location
Read-write property, an alias for getlocation() and setlocation().
Definition: datastore.py:563
def weak_cold_run(self)
Runs the portion of the relocation that is used for weak, cold storms.
Definition: relocate.py:1844
This is a HWRF task that encapsulates stage 1 of the vortex relocation.
Definition: relocate.py:922
def wrfinput_at_time(self, atime, domain)
Returns a Product object for the wrfinput output file for the specified domain if the atime matches t...
Definition: relocate.py:1669
This subclass of TempDir takes a directory name, instead of generating one automatically.
Definition: cd.py:228
def makedirs
Make a directory tree, working around filesystem bugs.
Definition: fileop.py:224
Time manipulation and other numerical routines.
Definition: numerics.py:1
def center_lon(self)
The domain center longitude.
Definition: relocate.py:619
def make_analysis_namelist
Writes the analysis namelist to namelist_analysis.input.
Definition: relocate.py:555
def split_wrf(self)
Runs the fortran wrf_split program.
Definition: relocate.py:1334
def get_ghost(self, domain)
Returns Product objects for the ghost domain output file for the specified domain.
Definition: relocate.py:1620
def create_track(self)
Runs the create_trak_fnl program.
Definition: relocate.py:2171
def fortlink
This is a convenience routine that makes many symbolic links to fort.N files for various integers N u...
Definition: fileop.py:834
def get_ghost(self, domain)
Returns the wrfghost output Product for this Task for the specified domain.
Definition: relocate.py:419
def copy_ensda_track(self)
Definition: relocate.py:1241
def confint
Alias for self.conf.getint for section self.section.
Definition: hwrftask.py:248
centrack
The track file for the center FGAT hour.
Definition: relocate.py:719
def run(self)
Runs the merge.
Definition: relocate.py:2401
cycling_interval
The positive datetime.timedelta time between cycles.
Definition: relocate.py:259
def __init__(self, dstore, conf, section, sim, domains, taskname=None, kwargs)
Stage3 constructor.
Definition: relocate.py:1588
def products
Iterates over output products.
Definition: relocate.py:2391
This module provides two different ways to generate Fortran namelist files from HWRFConfig sections: ...
Definition: namelist.py:1
def get_storm_radius(self)
Returns a Product for the storm radius file.
Definition: relocate.py:1718
dt_epsilon
An epsilon value for time equality comparisons.
Definition: relocate.py:281
def scrub(self)
Should temporary files be deleted as soon as they are not needed?
Definition: hwrftask.py:195
Raised when a relocation program did not produce an expected output file.
Definition: exceptions.py:433
modin
The input model: GFS or GDAS1.
Definition: relocate.py:365
def log
Obtain a logging domain.
Definition: hwrftask.py:425
def wrfanl_at_time(self, atime, domain)
Returns the wrfanl output file for the specified time and domain, or None if no such file exists...
Definition: relocate.py:440
def relocate_storm(self)
Runs the hwrf_3dvar to paste the relocated storm.
Definition: relocate.py:1105
info
The RelocationInfo with relocation information to trade between stages.
Definition: relocate.py:2208
dest_dir
Delivery directory for outputs.
Definition: relocate.py:340
def cycled_or_weak_run(self)
Runs the portion of the relocation that is run for cycled or weak storms.
Definition: relocate.py:1858
def inter_3to2(self)
Runs the hwrf_inter_3to2 Fortran program to interpolate fields.
Definition: relocate.py:2647
iflag_cold
An int 0 or 1 used by several of the Fortran relocation programs to trigger based on warm or cold sta...
Definition: relocate.py:92
def __init__(self, dstore, conf, section, sim, domains, taskname_pattern=None, kwargs)
Relocation constructor.
Definition: relocate.py:2195
Passes information about relocation status between relocation stages.
Definition: relocate.py:66
def get_wrfanl(self, domain)
Returns a Product object for the wrfanl output file for the specified domain.
Definition: relocate.py:1701
def check_storm_radius(self)
If no relocate was given, gets the storm radius file from a fix file.
Definition: relocate.py:2333
def storm_basin(self)
The storm basin.
Definition: relocate.py:609
def __init__(self, dstore, conf, section, sim, domains, taskname=None, modin='GDAS1', wrfanl=None, wrfghost=None, wrfinput=None, parentTrack=None, trackName='track0', ghost_domains=None, dest_dir=None, gsi_d02=None, gsi_d03=None, gsi_d01=None, cycling_interval=-6 *3600, info=None, fgat_times=None, centrack=None, ensda=None, kwargs)
RelocationTask constructor.
Definition: relocate.py:216
def __init__(self, dstore, conf, section, relocate, wrfinput, wrfanl, taskname=None, gsi_d01=None, gsi_d02=None, gsi_d03=None, ges_d02=None, ges_d03=None, kwargs)
Merge constructor.
Definition: relocate.py:2252
fgat_times
The list of FGAT hours.
Definition: relocate.py:253
def create_atcf(self, case)
Gets the parent vortex track file, either from a specified directory or from the tracker, run by a previous hwrf.init.HWRFInit object's tracker member.
Definition: relocate.py:722
def vortex(self)
The vortex origin status.
Definition: relocate.py:584
def make_warm_cold
Returns the module-level constants COLD or WARM, or None, for the specified string value...
Definition: relocate.py:129
def fhr(self)
The forecast hour.
Definition: relocate.py:629
def wrf_split(self)
Runs the fortran split_wrf program.
Definition: relocate.py:1543
def rinfo(self)
The RelocationInfo.
Definition: relocate.py:634
def inter_2to1ges(self, domain)
Runs the hwrf_inter_2to1 Fortran program to interpolate fields.
Definition: relocate.py:2572
Exceptions raised by the hwrf package.
Definition: exceptions.py:1
def exe(name, kwargs)
Returns a prog.ImmutableRunner object that represents a large serial program that must be run on a co...
Definition: run.py:242
def center_lat(self)
The domain center latitude.
Definition: relocate.py:614
def confstr
Alias for self.conf.getstr for section self.section.
Definition: hwrftask.py:261
def deliver_products(self, missing=None, logger=None, keep=False, frominfo=None, kwargs)
Delivers products to intercom via Product.deliver.
Definition: relocate.py:501
def postmsg(self, message, args, kwargs)
same as produtil.log.jlogger.info()
Definition: datastore.py:1084
def run(self)
Runs stage 2 of the relocation.
Definition: relocate.py:1409
def wrfinput_at_time(self, atime, domain)
Returns the wrfinput output file for the specified time and domain, or returns None if no such file e...
Definition: relocate.py:426
This is a HWRF task that merges the WRF analysis files.
Definition: relocate.py:2247
def del_centrack(self)
Unsets the center FGAT time track file so that get_centrack() will return the parent track file inste...
Definition: relocate.py:712
def storm_intensity(self)
The storm intensity.
Definition: relocate.py:604
def warm(self)
The status of the current cycle.
Definition: relocate.py:589
cold_ok
Set to True if the relocation intentionally vetoes warm starting.
Definition: relocate.py:94
def find_exe
Searches the $PATH or a specified iterable of directory names to find an executable file with the giv...
Definition: fileop.py:573
def anl_cs_10m(self)
Runs the anl_cs_10m fortran program.
Definition: relocate.py:1939
def get_wrfinput
Returns a Product object for the wrfinput output file.
Definition: relocate.py:1710
def prev_cycle_dir(self)
The previous cycle's COM directory.
Definition: relocate.py:596
def _make_plist_and_names(self)
Internal function to generate input product lists and names.
Definition: relocate.py:651
def get_centrack(self)
Returns the Product for the center FGAT time track file if available, or otherwise the parent track f...
Definition: relocate.py:697
gfs_flag
Initialization flag variable relating to parent model vortex usage.
Definition: relocate.py:1797
def bigexe(name, kwargs)
Alias for exe() for backward compatibility.
Definition: run.py:254
def get_wrfanl(self, domain)
Returns the wrfanl product for the specified domain or None if no such data is available.
Definition: relocate.py:2321
def inter_2to3(self)
Interpolates gsi_d02 analysis increment to d03 and adds the increment to d03 first guess...
Definition: relocate.py:2610
def anl_4x
Runs the anl_4x programs.
Definition: relocate.py:1881
rstage3
Stage3 of the relocation.
Definition: relocate.py:2219
def check_prior_cycle(self)
Checks to see if all data is present from the prior cycle.
Definition: relocate.py:1060
def pert_ct_gfs(self)
Runs hwrf_pert_ct for the gfs vortex case.
Definition: relocate.py:2039
def copy_fixed(self)
Copies the fixed files to the local directory.
Definition: relocate.py:453
This represents all three stages of the relocate.
Definition: relocate.py:2189
def parent_track(self)
The Product object for the parent track file.
Definition: relocate.py:571
def pert_ct(self)
Runs the fortran hwrf_pert_ct program.
Definition: relocate.py:1368