HWRF  trunk@4391
mpipomtc.py
1 """!Runs the POM initialization and POM-WRF coupled forecast.
2 
3 This module handles the POM-coupled WRF simulation. It contains two
4 critical pieces:
5 
6 * POMInit -- an HWRFTask that is a wrapper around the Python pom package.
7 * WRFCoupledPOM - a subclass of hwrf.fcsttask.WRFAtmos that runs the
8  WRF-POM two-way coupled system based on the output of the POMInit."""
9 
10 ##@var __all__
11 # The list of symbols exported by "from hwrf.mpipomtc import *"
12 __all__ = ['POMInit', 'WRFCoupledPOM']
13 
14 import os, shutil, math
16 import produtil.rusage
18 import hwrf.coupling
20 
21 from produtil.rusage import setrlimit, rusage, getrlimit
22 from produtil.datastore import UpstreamFile, wait_for_products, \
23  COMPLETED, RUNNING, FAILED
24 from produtil.fileop import isnonempty, make_symlink, deliver_file
25 from produtil.cd import NamedDir
26 from produtil.run import mpirun, mpi
27 from hwrf.numerics import to_datetime, to_datetime_rel, to_fraction
28 from hwrf.exceptions import OceanInitFailed
29 from pom.exceptions import POMInputError
30 
31 ##@var prodnames
32 # Mapping from product names to a tuple. Each tuple contains the work
33 # directory file location and the final output location, in that
34 # order. Both directory locations are sent through hwrf.config.HWRFConfig
35 prodnames={ 'grid': ( '{oceandir}/{vit[stormname]}.grid.nc',
36  '{outdir}/{out_prefix}.pom.grid.nc'),
37  'ts_initial': ( '{oceandir}/{vit[stormname]}.ts_initial.nc',
38  '{outdir}/{out_prefix}.pom.ts_initial.nc' ),
39  'ts_clim': ( '{oceandir}/{vit[stormname]}.ts_clim.nc',
40  '{outdir}/{out_prefix}.pom.ts_clim.nc' ),
41  'uv_initial': ( '{oceandir}/{vit[stormname]}.uv_initial.nc',
42  '{outdir}/{out_prefix}.pom.uv_initial.nc' ),
43  'el_initial': ( '{oceandir}/{vit[stormname]}.el_initial.nc',
44  '{outdir}/{out_prefix}.pom.el_initial.nc' ),
45  'restart.phase2': ( '{oceandir}/restart.phase2.nc',
46  '{outdir}/{out_prefix}.pom.restart.phse2.nc'
47  ),
48  'pom.nml': ( '{nmldir}/pom.nml',
49  '{outdir}/{out_prefix}.pom.nml' ) }
50 """A mapping from product name to a two-element tuple. The tuple
51 contains the path to the file in the local directory structure of the
52 pom package, and the destination file within the HWRF system. Both
53 should be sent through string interpolation (strinterp or
54 timestrinterp) before use."""
55 
57  """!A wrapper around the pom package that runs the POM initialization.
58 
59  This HWRFTask subclass is a wrapper around the pom package. It
60  runs the POM initialization, and generates the POM namelist for
61  the forecast run."""
62  def __init__(self,dstore,conf,section,taskname=None,vitfile=None,
63  fcstlen=None,outstep=86400,**kwargs):
64  """!Creates a POMInit.
65  @param dstore the produtil.datastore.Datastore to use
66  @param conf the HWRFConfig to use
67  @param section the section name for this task
68  @param taskname the task name. Default: section
69  @param vitfile the vitals file with tcvitals for all times this
70  storm has existed. Default:
71  self.icstr('{WORKhwrf}/{stormlabel}.vitals')
72  @param fcstlen The forecast length in hours.
73  @param outstep The output timestep in seconds.
74  @param kwargs Other keyword arguments are passed to the superclass constructor."""
75  if 'outdir' not in kwargs: outdir=conf.getdir('com')
76  super(POMInit,self).__init__(dstore,conf,section,taskname=taskname,
77  **kwargs)
78  self._sfc_dataset = str(kwargs.get('sfc_dataset',self.confstr(
79  'sfc_dataset','hwrfdata')))
80  self._loop_dataset = str(kwargs.get('loop_dataset',self.confstr(
81  'loop_dataset','hwrfdata')))
82  self._sfcanl_item = str(kwargs.get('sfcanl_item',self.confstr(
83  'sfcanl_item','gfs_sfcanl')))
84  self._sanl_item = str(kwargs.get('sanl_item',self.confstr(
85  'sanl_item','gfs_sanl')))
86  self._loop_item = str(kwargs.get('loop_item',self.confstr(
87  'loop_item','gfdl_loop')))
88  self._wc_ring_item = str(kwargs.get('wc_ring_item',self.confstr(
89  'wc_ring_item','gfdl_wc_ring')))
90  self._atime=to_datetime(conf.cycle)
91  self.__fcstlen=fcstlen
92  self.__outstep=int(outstep)
93  if self.__outstep<30: self.__outstep=86400
94  if vitfile is None:
95  vitfile=self.icstr('{WORKhwrf}/{stormlabel}.vitals')
96  self._vitfile=vitfile
97  if 'catalog' in kwargs and isinstance(kwargs['catalog'],
99  self._catalog=kwargs['catalog']
100  else:
101  incat = str(kwargs.get('catalog',self.confstr(
102  'catalog','hwrfdata')))
103  self._catalog=hwrf.input.DataCatalog(conf,incat,conf.cycle)
104 
105  self._products=dict()
106 
107  rundir=self.workdir
108  outputdir=os.path.join(rundir,'output')
109  self._make_products(outputdir)
110 
111  def run(self):
112  """!Runs the POM initialization and copies the results to their
113  destinations within the HWRF work area."""
114  try:
115  self.state=RUNNING
116  logger=self.log()
117  rundir=self.workdir
118  assert(rundir)
119  inputdir=os.path.join(rundir,'input')
120  outputdir=os.path.join(rundir,'output')
121  if os.path.exists(rundir):
122  shutil.rmtree(rundir)
123  with NamedDir(rundir,keep=True,logger=logger) as d:
124  with NamedDir(inputdir,keep=True,logger=logger) as d:
125  self.get_inputs()
126  with NamedDir(outputdir,keep=True,logger=logger) as d:
127  self.run_init(inputdir,outputdir)
128  self.deliver_products(os.path.join(outputdir,'OCEAN'),
129  outputdir)
130  self.state=COMPLETED
132  logger.info('Basin is unsupported.')
133  self.state=COMPLETED
134  raise # caller needs to handle this
135  except Exception as e:
136  logger.error('Unhandled exception in ocean init: %s'
137  %(str(e),),exc_info=True)
138  self.state=FAILED
139  raise
140 
141  def _make_products(self,outdir):
142  """!Creates FileProduct objects for all output files.
143  @param outdir The directory to which the pom package output its
144  final files."""
145  atime=self._atime
146  oceandir=os.path.join(outdir,'OCEAN')
147  with self.dstore.transaction():
148  for prodname,filepaths in prodnames.iteritems():
149  (localpath,compath)=filepaths
151  self.dstore,prodname,self.taskname)
152  prod.location=self.timestr(compath,atime,atime,
153  outdir=self.outdir)
154  slocalpath=self.timestr(localpath,atime,atime,
155  oceandir=oceandir,nmldir=outdir)
156  prod['localpath']=slocalpath
157  self._products[prodname]=( prod,slocalpath )
158 
159  def deliver_products(self,oceandir,nmldir,redeliver=False):
160  """!Delivers files to their final destination
161  Copies results to their destinations within the HWRF work areas
162  @param oceandir the OCEAN directory created by the pom package in run()
163  @param nmldir the directory in which the forecast namelist was made
164  @param redeliver if True, products are re-copied even if
165  available=True"""
166  assert(self._products)
167  logger=self.log()
168  good=True
169  baddies=list()
170  atime=self._atime
171  produtil.fileop.makedirs(self.outdir,logger=logger)
172  for prodname,stuff in self._products.iteritems():
173  assert(isinstance(stuff,tuple))
174  ( prod,localpath ) = stuff
175  if prod.available and not redeliver:
176  logger.info('%s: already available and redeliver=False, so '
177  'skipping this (available=%s location=%s)'%(
178  prod.did,repr(prod.available),repr(prod.location)))
179  continue
180  if not os.path.exists(localpath):
181  logger.warning(
182  localpath+": expected output file does not exist.")
183  good=False
184  baddies.append(localpath+' (missing)')
185  continue
186  elif not isnonempty(localpath):
187  logger.warning(localpath+": is empty. Will deliver anyway."
188  +" Beware of impending failures.")
189  baddies.append(localpath+' (empty)')
190  prod.deliver(frominfo=localpath,keep=False,logger=logger)
191  if len(baddies)>0:
192  msg='Some ocean outputs were empty or missing: '+\
193  (', '.join(baddies))
194  logger.warning(msg)
195  if not good:
196  logger.critical('Ocean init failed: '+msg)
197  raise OceanInitFailed(msg)
198 
199  def run_init(self,inputdir,outputdir):
200  """!Internal function that passes control to the pom package
201 
202  This internal implemenentation function passes control to
203  the pom package. This is part of the implementation of
204  run(). Do not call directly except for debugging.
205  @param inputdir the ocean input data directory
206  @param outputdir the ocean data output directory"""
207  CENTERID=self.storminfo.center.upper()
208  EXEChwrf=self.getdir('EXEChwrf')
209  PARMhwrf=self.getdir('PARMhwrf')
210  FIXhwrf=os.path.join(self.getdir('FIXhwrf'),'hwrf-pom')
211  VITDIR=inputdir
212  GFSDIR=inputdir
213  LCDIR=inputdir
214  CSTREAM=outputdir
215  COMIN=self.getdir('com')
216  STARTDATE=self._atime.strftime('%Y%m%d%H')
217  STORMID=self.storminfo.stormid3.upper()
218  STORMNAME=self.storminfo.stormname.upper()
219  kwargs=dict(logger=self.log(), conf=self.conf)
220  method=self.confstr('method','')
221  if method: kwargs['input_method']=method.upper()
222  assert(GFSDIR.find('pom/output')<0)
223  logger=self.log()
224  setrlimit(logger=logger,stack=6e9,ignore=True)
225  getrlimit(logger=logger)
226  sync_frequently=self.confbool('sync_frequently',True)
227  pom.master.run_init(STORMNAME,STORMID,STARTDATE,EXEChwrf,PARMhwrf,
228  FIXhwrf,VITDIR,GFSDIR,LCDIR,CSTREAM,COMIN,
229  fcstlen=self.__fcstlen,outstep=self.__outstep,
230  sync_frequently=sync_frequently,
231  **kwargs)
232 
233  def products(self,name=None):
234  """!Iterates over products
235 
236  Iterates over Product objects for all of the files that need
237  to be copied to the forecast directory to run POM. The
238  products will all have a "localname" metadata value telling
239  the local filename they should have in the forecast directory.
240  @param name If given, only the product with this name is yielded"""
241  if name is None:
242  for p in self._products.itervalues(): yield p[0]
243  else:
244  if name in self._products: yield self._products[name][0]
245 
246  def inputiter(self):
247  """!Iterates over all needed input data."""
248  yield dict(dataset=self._sfc_dataset,item=self._sanl_item,
249  atime=self._atime)
250  yield dict(dataset=self._sfc_dataset,item=self._sfcanl_item,
251  atime=self._atime)
252  # Get the basin as it will be passed to the pom package. This
253  # pair of odd lines exactly reproduces the process that is
254  # used:
255  STORMID=self.storminfo.stormid3.upper()
256  BASIN=STORMID[2].upper()
257  if BASIN=='L':
258  yield dict(dataset=self._loop_dataset,item=self._loop_item,
259  atime=self._atime)
260  yield dict(dataset=self._loop_dataset,item=self._wc_ring_item,
261  atime=self._atime)
262 
263  def get_inputs(self):
264  """!Obtains input data, links or copies to places expected by POM.
265 
266  Copies all inputs to locations expected by the pom package.
267  Copies the GFS sanl and sfcanl, waiting for them if needed.
268  Makes a new tcvitals file by parsing the old one, and
269  generating a new one, discarding lines containing "INVEST"."""
270  logger=self.log()
271  atime=self._atime
272 
273  # Copy GFS sanl and sfcanl files (required)
274  with self.dstore.transaction() as t:
275  sanlx=self._catalog.locate(self._sfc_dataset,self._sanl_item,
276  atime=atime,logger=logger)
277  sfcanlx=self._catalog.locate(self._sfc_dataset,self._sfcanl_item,
278  atime=atime,logger=logger)
279 
280  sanl=UpstreamFile(self.dstore,'input_sanl',self.taskname,
281  minsize=30000)
282  sanl.location=sanlx
283  sanl.available=False
284  sfcanl=UpstreamFile(self.dstore,'input_sfcanl',self.taskname,
285  minsize=30000)
286  sfcanl.location=sfcanlx
287  sfcanl.available=False
288 
289  names={ sanl:self.timestr('gfs.t{aHH}z.sanl',0,atime=self._atime),
290  sfcanl:self.timestr('gfs.t{aHH}z.sfcanl',0,
291  atime=self._atime) }
292  def namer(p,logger,*a): return names[p]
293  def actor(p,name,logger,*a): make_symlink(p.location,name,
294  force=True,logger=logger)
295  wait_for_products([sanl,sfcanl],logger,namer,actor)
296 
297  # Copy loop current (optional)
298  maxback=max(1,self.confint('loop_current_max_days_back',30))
299  bad=True
300  for idelta in xrange(maxback):
301  hdelta=idelta*24.0
302  looptime=to_datetime_rel(hdelta,atime)
303  stime=looptime.strftime('%Y%m%d%H')
304  loop=self._catalog.locate(self._loop_dataset,self._loop_item,
305  atime=looptime,logger=logger)
306  wc_ring=self._catalog.locate(
307  self._loop_dataset,self._wc_ring_item,atime=looptime,
308  logger=logger)
309  bad=False
310  if not isnonempty(loop):
311  bad=True
312  logger.warning('%s (loop at time %s): is empty or '
313  'non-existant'%(str(loop),stime))
314  if not isnonempty(wc_ring):
315  bad=True
316  logger.warning('%s (loop wc_ring at time %s): is empty or '
317  'non-existant'%(str(wc_ring),stime))
318  if not bad: break
319  if not bad:
320  looploc=self.timestr('hwrf_gfdl_loop_current_rmy5.dat.{aYMD}',
321  0,atime=self._atime)
322  make_symlink(loop,looploc,logger=logger)
323  wc_ringloc=self.timestr(
324  'hwrf_gfdl_loop_current_wc_ring_rmy5.dat.{aYMD}',
325  0,atime=self._atime)
326  make_symlink(wc_ring,wc_ringloc,logger=logger)
327  else:
328  logger.critical('No loop current available. Checked %d day(s) '
329  'for loop current for %s'
330  %(maxback,atime.strftime('%Y%m%d')))
331 
332  # Create tcvitals file, excluding INVEST lines
333  vitdest='syndat_tcvitals.%04d'%(self.storminfo.when.year,)
334  logger.info('Copy vitals %s to %s'%(self._vitfile,vitdest))
335  with open(vitdest,'wt') as outf:
336  with open(self._vitfile,'rt') as inf:
337  for line in inf:
338  if line.find('INVEST')>=0:
339  continue
340  outf.write(line)
341 
342 ########################################################################
344  """!This is an internal implementation class that should never be
345  used directly. It instructs the hwrf.coupling.CoupledWRF to call
346  the WRFCoupledPOM.copy_pom_inputs to check or link POM input
347  data."""
348  def __init__(self,wcp):
349  """Creates a POMIniter that will pass control to the given
350  WRFCoupledPOM object, stored as self.wcp."""
351  self.wcp=wcp
352  def check_coupled_inputs(self,logger):
353  """Calls the WRFCoupledPOM.copy_pom_inputs with just_check=True."""
354  return self.wcp.copy_pom_inputs(just_check=True)
355  def link_coupled_inputs(self,just_check,logger):
356  """Calls the WRFCoupledPOM.copy_pom_inputs passing just_check."""
357  return self.wcp.copy_pom_inputs(bool(just_check))
358 
359 ########################################################################
361  """!Runs a WRF-POM coupled simulation.
362 
363  Most of the work of this class is done by the superclass,
364  WRFAtmos. This class adds code to copy the inputs needed by POM
365  and the coupler. There are three critical new config section
366  values:
367 
368  * wm3c_ranks = number of coupler ranks. Default: 1
369  * pom_ranks = number of POM ranks. Default: 9
370  * wrf_ranks = nubmer of WRF ranks. No default. This one is
371  mandatory."""
372  def __init__(self,dstore,conf,section,wrf,keeprun=True,
373  wrfdiag_stream='auxhist1',pominit=None,**kwargs):
374  """!WRFCoupledPOM constructor.
375  @param dstore the produtil.datastore.Datastore to use
376  @param conf the hwrf.config.HWRFConfig that provides configuration ifnormation
377  @param section the config section in conf
378  @param wrf the hwrf.wrf.WRFSimulation object that is being run
379  @param keeprun if True, the simulation temporary files are retained
380  @param wrfdiag_stream the stream that generates wrfdiag files
381  @param pominit The POMInit object.
382  @param kwargs passed to hwrf.fcsttask.WRFAtmos.__init__"""
383  if not isinstance(pominit,POMInit):
384  raise TypeError(
385  'The pominit argument to WRFCoupledPOM.__init__ must be a '
386  'POMInit object. You passed a %s %s.'%
387  (type(pominit).__name__,repr(pominit)))
388  super(WRFCoupledPOM,self).__init__(dstore,conf,section,wrf,keeprun,
389  wrfdiag_stream,**kwargs)
390  self._pominit=pominit
391  pominiter=POMIniter(self)
392  self.couple('coupler','hwrf_wm3c','wm3c_ranks',1)
393  self._add_wave()
394  self.couple('pom','hwrf_ocean_fcst','ocean_ranks',9,pominiter)
395  self.couple('wrf','wrf','wrf_ranks')
396  # Ocean output daily:
397  self.add_coupled_stream('ocean',[
398  0,86400,172800,259200,345600,432000])
399 
400  def remove_ocean(self):
401  """!Removes the ocean component from coupling.
402  @post Any call to run() will not include ocean coupling."""
403  self.uncouple('pom')
404 
405  @property
406  def pominit(self):
407  """!Returns the POMInit object for this coupled forecast."""
408  return self.component('pom').initer
409 
410  def _add_wave(self):
411  """!Internal function for adding wave coupling. This must be
412  implemented by a subclass.
413  @protected"""
414  pass
415 
416  def copy_pom_inputs(self,just_check=False):
417  """!Copies or checks for the inputs required by the POM model.
418  This is an internal function used by the PomIniter class. Do
419  not call directly.
420 
421  @param just_check If just_check=True, the inputs are not
422  copied; instead, the routine just checks for them. Do not use
423  just_check: call check_inputs instead."""
424  logger=self.log()
425  logger.info('Copying POM inputs from POMInit task %s'
426  %(self._pominit.taskname,))
427  n_copied=0
428  for prod in self._pominit.products():
429  assert(isinstance(prod,produtil.datastore.Product))
430  if not prod.available: prod.check(logger=logger)
431  localname=prod.meta('localpath','')
432  avail=prod.available
433  loc=prod.location
434  if not localname:
435  msg='POM product %s (available=%s location=%s) has no '\
436  'localname.'%(prod.did,repr(avail),repr(loc))
437  if just_check:
438  logger.warning(msg)
439  else:
440  logger.error(msg)
441  raise POMInputError(msg)
442  if not avail:
443  msg='POM product %s (available=%s location=%s localname=%s)'\
444  ' is not available.'\
445  %(prod.did,repr(avail),repr(loc),repr(localname))
446  if just_check:
447  logger.warning(msg)
448  return False
449  else:
450  logger.error(msg)
451  raise POMInputError(msg)
452  if not loc:
453  msg='POM product %s (available=%s location=%s localname=%s)'\
454  ' has no location.'\
455  %(prod.did,repr(avail),repr(loc),repr(localname))
456  if just_check:
457  logger.warning(msg)
458  return False
459  else:
460  logger.error(msg)
461  raise POMInputError(msg)
462  if not just_check:
463  deliver_file(loc,os.path.basename(localname),keep=True,
464  logger=logger)
465  n_copied+=1
466  if n_copied<1:
467  msg='No outputs reported by POM initialization.'\
468  ' Did you forget to run the ocean init?'
469  if just_check:
470  logger.warning(msg)
471  return False
472  else:
473  logger.error(msg)
474  raise POMInputError(msg)
475  logger.info('Copied %d POM inputs. Returning True.'%(n_copied,))
476  return True
Change directory, handle temporary directories.
Definition: cd.py:1
This module provides a set of utility functions to do filesystem operations.
Definition: fileop.py:1
def run_init(self, inputdir, outputdir)
Internal function that passes control to the pom package.
Definition: mpipomtc.py:199
def products
Iterates over products.
Definition: mpipomtc.py:233
taskname
Read-only property: the name of this task.
Definition: datastore.py:1134
A subclass of Product that represents file delivery.
Definition: datastore.py:856
The base class of tasks run by the HWRF system.
Definition: hwrftask.py:25
conf
This HWRFTask's hwrf.config.HWRFConfig object.
Definition: hwrftask.py:415
dstore
Read-only property, an alias for getdatastore(), the Datastore in which this Datum resides...
Definition: datastore.py:557
def pominit(self)
Returns the POMInit object for this coupled forecast.
Definition: mpipomtc.py:406
def component(self, which)
Definition: coupling.py:308
def add_coupled_stream(self, stream, times)
Products.
Definition: coupling.py:342
def link_coupled_inputs(self, just_check, logger)
Definition: mpipomtc.py:355
def confbool
Alias for self.conf.getbool for section self.section.
Definition: hwrftask.py:287
def _add_wave(self)
Internal function for adding wave coupling.
Definition: mpipomtc.py:410
Base class of tasks run by HWRF.
Definition: hwrftask.py:1
A shell-like syntax for running serial, MPI and OpenMP programs.
Definition: run.py:1
def getdir
Alias for hwrf.config.HWRFConfig.get() for the "dir" section.
Definition: hwrftask.py:396
A piece of data produced by a Task.
Definition: datastore.py:716
outdir
The directory in which this task should deliver its final output.
Definition: hwrftask.py:176
Runs the real_nmm or wrf executables.
Definition: fcsttask.py:1
Raised when an unsupported basin is requested.
Definition: exceptions.py:32
Stores products and tasks in an sqlite3 database file.
Definition: datastore.py:1
This subclass of TempDir takes a directory name, instead of generating one automatically.
Definition: cd.py:228
def makedirs
Make a directory tree, working around filesystem bugs.
Definition: fileop.py:224
Time manipulation and other numerical routines.
Definition: numerics.py:1
This module allows querying resource usage and limits, as well as setting resource limits...
Definition: rusage.py:1
def deliver_products
Delivers files to their final destination Copies results to their destinations within the HWRF work a...
Definition: mpipomtc.py:159
workdir
The directory in which this task should be run.
Definition: hwrftask.py:156
def confint
Alias for self.conf.getint for section self.section.
Definition: hwrftask.py:248
Main script to for running ocean spin up: Phase1 and Phase2 (also known as Phase3 and Phase4)...
Definition: master.py:1
def timestr(self, string, ftime, atime=None, section=None, kwargs)
Expands a string in the given conf section, including time vars.
Definition: hwrftask.py:367
def get_inputs(self)
Obtains input data, links or copies to places expected by POM.
Definition: mpipomtc.py:263
def log
Obtain a logging domain.
Definition: hwrftask.py:425
A wrapper around the pom package that runs the POM initialization.
Definition: mpipomtc.py:56
def __init__(self, dstore, conf, section, wrf, keeprun=True, wrfdiag_stream='auxhist1', pominit=None, kwargs)
WRFCoupledPOM constructor.
Definition: mpipomtc.py:373
Raised when the ocean init did not produce some expected outputs.
Definition: exceptions.py:118
def copy_pom_inputs
Copies or checks for the inputs required by the POM model.
Definition: mpipomtc.py:416
def run_init(STORMNAME, STORMID, STARTDATE, EXEChwrf, PARMhwrf, FIXhwrf, VITDIR, GFSDIR, LCDIR, CSTREAM, COMIN, init_method=None, logger=None, fcstlen=None, outstep=None, sync_frequently=False, kwargs)
Run the ocean initialization.
Definition: master.py:60
This module contains exception classes for reporting errors in the POM initialization.
Definition: exceptions.py:1
def uncouple
Removes a component, or all components, from the coupling.
Definition: coupling.py:286
Provides the location of a file in an archive, on disk or on a remote server via sftp or ftp...
Definition: input.py:109
Exceptions raised by the hwrf package.
Definition: exceptions.py:1
def run(self)
Runs the POM initialization and copies the results to their destinations within the HWRF work area...
Definition: mpipomtc.py:111
def _make_products(self, outdir)
Creates FileProduct objects for all output files.
Definition: mpipomtc.py:141
def confstr
Alias for self.conf.getstr for section self.section.
Definition: hwrftask.py:261
def __init__(self, dstore, conf, section, taskname=None, vitfile=None, fcstlen=None, outstep=86400, kwargs)
Creates a POMInit.
Definition: mpipomtc.py:63
def inputiter(self)
Iterates over all needed input data.
Definition: mpipomtc.py:246
def __init__(self, wcp)
Definition: mpipomtc.py:348
def check_coupled_inputs(self, logger)
Definition: mpipomtc.py:352
def icstr(self, string, section=None, kwargs)
Expands a string in the given conf section.
Definition: hwrftask.py:351
Runs a WRF-POM coupled simulation.
Definition: mpipomtc.py:360
Represents a Product created by an external workflow.
Definition: datastore.py:915
This is an internal implementation class that should never be used directly.
Definition: mpipomtc.py:343