HWRF  trunk@4391
hycom.py
1 """This module will one day contain the implementation of the HyCOM
2 initialization and forecast jobs."""
3 
4 import re, os, glob, datetime
6 import hwrf.numerics
8 from produtil.cd import NamedDir
9 from produtil.fileop import make_symlink
10 from produtil.datastore import FileProduct, COMPLETED, RUNNING, FAILED
11 from produtil.run import *
12 from produtil.log import jlogger
13 from hwrf.numerics import to_datetime,to_datetime_rel,TimeArray
14 
15 def yesno(x):
16  return 'YES' if x else 'NO'
17 
18 def scriptexe(task,path):
19  """Generates a produtil.prog.Runner for running a Hycom ksh
20  script. Adds a bunch of variables from config sections."""
21  there=task.confstrinterp(path)
22  e=exe(there)
23  vars=dict()
24  for k,v in task.conf.items(task.confstr('strings','hycomstrings')):
25  vars[k]=str(v)
26  for k,v in task.conf.items(task.confstr('bools','hycombools')):
27  vars[k]=yesno(v)
28  RTOFSDIR=task.meta('RTOFSDIR','')
29  if RTOFSDIR:
30  vars['RTOFSDIR']=RTOFSDIR
31  return e.env(**vars)
32 
33 def read_RUNmodIDout(path):
34  RUNmodIDout=''
35  with open(path,'rt') as f:
36  for line in f:
37  m=re.match('^export RUNmodIDout=(.*)$',line)
38  if m:
39  RUNmodIDout=m.groups()[0]
40  return RUNmodIDout
41 
43  def remove_ocean(): self.uncouple()
44 
45  def __init__(self,dstore,conf,section,taskname=None,fcstlen=126,
46  **kwargs):
47  super(HYCOMInit,self).__init__(dstore,conf,section,
48  taskname=taskname,**kwargs)
49  self.forecast_exe=None
50  self.step3_worked=False
51  self.run_coupled=True
52  self.fcstlen=fcstlen
53  self.make_products()
54  def make_products(self):
55  """Initializes all Product objects to make them available to
56  future jobs."""
57  # Add the HyCOM-specific products whose delivery location is
58  # in COM with the standard output file prefix
59  # (invest99l.2017110318).
60  logger=self.log()
62  self.dstore,'hycom_settings',self.taskname,location=
63  self.confstrinterp('{com}/{out_prefix}.hycom_settings'))
64 
65  # prodnameA and prodnameB are three-hourly:
66  fhrs=range(int(self.fcstlen+24.001))
67  fhrs=fhrs[0::3]
68  atime=to_datetime(self.conf.cycle)
69  ftimes=[to_datetime_rel(t*3600,atime) for t in fhrs]
70  self.init_file2a=TimeArray(atime,ftimes[-1],3*3600.0)
71  self.init_file2b=TimeArray(atime,ftimes[-1],3*3600.0)
72  for ftime in ftimes:
73  prodnameA=self.timestr('hwrf_basin.{fahr:03d}.a',ftime,atime)
74  filepathA=self.icstr('{com}/{out_prefix}.{pn}',pn=prodnameA)
75  prodnameB=self.timestr('hwrf_basin.{fahr:03d}.b',ftime,atime)
76  filepathB=self.icstr('{com}/{out_prefix}.{pn}',pn=prodnameB)
77  self.init_file2a[ftime]=FileProduct(
78  self.dstore,prodnameA,self.taskname,location=filepathA)
79  self.init_file2b[ftime]=FileProduct(
80  self.dstore,prodnameB,self.taskname,location=filepathB)
81 
82  # The optional_file is only created if init_script3 succeeds:
83  # self.optional_file=FileProduct(
84  # self.dstore,'optional_file',self.taskname,location=
85  # self.confstrinterp('{com}/{out_prefix}.optional_file'))
86 
87  # initial conditions:
88  self.restart_out=dict()
89  for ab in 'ab':
90  for what in ('restart_out','restart_outR'):
91  local=what+'.'+ab # like restart_out.a or restart_outR.b
92  self.restart_out[local]=FileProduct(self.dstore,local,self.taskname)
93 
94  self.spin_archv_a=FileProduct(self.dstore,'spin_archv_a',self.taskname)
95  self.spin_archv_b=FileProduct(self.dstore,'spin_archv_b',self.taskname)
96 
97  # forcing files for coupled run
98  self.forcing_products=dict()
99  ffiles=['airtmp','precip','presur','radflx','shwflx','surtmp',
100  'tauewd','taunwd','vapmix','wndspd']
101  for ffile in ffiles:
102  for ab in 'ab':
103  file='forcing.%s.%s'%(ffile,ab)
104  comf=self.confstrinterp('{com}/{out_prefix}.'+file)
105  prod=FileProduct(self.dstore,file,self.taskname,location=comf)
106  prod.location=comf
107  self.forcing_products[file]=prod
108  logger.debug('%s => %s (%s)'%(file,comf,repr(prod)))
109 
110  self.limits=FileProduct(
111  self.dstore,'limits',self.taskname,location=
112  self.confstrinterp('{com}/{out_prefix}.limits'))
113 
115  self.dstore,'blkdat.input',self.taskname,location=
116  self.confstrinterp('{com}/{out_prefix}.standalone.blkdat.input'))
117 
118  def fill_ocstatus(self,ocstatus,logger):
119  """Fills the ocean status files with information. This is
120  called from exhwrf_ocean_init after a successful call to the
121  run() function below. The ocstatus argument is the
122  hwrf.coupling.OceanStatus object that fills the files."""
123  # First argument: True=run coupled, False=don't
124  # Second argument: the logger argument sent to this function
125  # Third argument: a list of extra lines to dump into fill_ocstatus
126  ocstatus.set(self.run_coupled,logger,[
127  'forecast_exe=%s'%self.forecast_exe,
128  'step3_worked='+('YES' if self.step3_worked else 'NO'), ])
129  def recall_ocstatus(self,ocstatus,logger):
130  """Reads the ocean status back in during the forecast and
131  check_init jobs, filling the self.run_coupled,
132  self.forecast_exe and self.step3_worked variables."""
133  # Get the name of the first ocstatus file for error reporting:
134  for filename in ocstatus.fileiter():
135  break
136 
137  # Read the lins of the ocstatus file:
138  lines=ocstatus.read(logger)
139  for line in lines:
140  # See if any lines are KEY=VALUE lines, and split them into parts
141  m=re.match('^ *([^ =]+) *= *(.*?) *$',line)
142  if not m:
143  logger.warning('%s: unparseable ocstatus line: %s'
144  %(filename,line))
145  continue
146  (key,value)=m.groups()
147  value=value.strip()
148  key=key.strip()
149 
150  # See if any recognized key=value lines are present:
151  if key=='RUN_COUPLED':
152  if value=='YES': self.run_coupled=True
153  elif value=='NO': self.run_coupled=False
154  else:
155  logger.warning('%s: ignoring unrecognized RUN_COUPLED value %s'%(
156  filename,repr(value)))
157  elif key=='forecast_exe': self.forecast_exe=value
158  elif key=='step3_worked': self.step3_worked=value=='YES'
159  else:
160  logger.warning('%s: ignoring unkown key %s'%(filename,key))
161 
162  def find_rtofs_data(self):
163  """!Fills the RTOFS staging area with RTOFS data."""
164 
165  logger=self.log()
166  cyc=self.conf.cycle
167 
168  rtofs_atime=datetime.datetime(cyc.year,cyc.month,cyc.day,0)
169  rtofs_ymd=rtofs_atime.strftime('%Y%m%d')
170 
171  # Input directories:
172  tardir=self.confstr('RTOFS_TAR','/dev/null')
173  histdir=self.confstr('RTOFS_HIST','/dev/null')
174  fcstdir=self.confstr('RTOFS_FCST','/dev/null')
175 
176  # Decide the staging directory:
177  outdir=self.confstr('RTOFS_STAGE','')
178  if not outdir:
179  outdir=os.path.join(self.workdir,
180  rtofs_atime.strftime('rtofs.%Y%m%d'))
181 
182  # Get data:
184  with NamedDir(outdir,keep=True,logger=logger,rm_first=False) as d:
185  parmin=self.confstrinterp('{PARMhwrf}/hwrf_get_rtofs.nml.in')
186  parmout='get_rtofs.nml'
187  with open(parmin,'rt') as inf:
188  with open(parmout,'wt') as outf:
189  outf.write(ni.parse(inf,logger,parmin,atime=rtofs_atime))
190  checkrun(mpirun(mpi(self.getexe('hwrf_get_rtofs')),allranks=True),
191  logger=logger)
192 
193  def run(self):
194  """Runs the hycom initialization. Raises an exception if
195  something goes wrong. Returns on success."""
196  logger=self.log()
197  # Reset coupling status information:
198  self.step3_worked=False
199  self.forecast_exe=None
200  self.run_coupled=False
201  try:
202  self.state=RUNNING
203  # Inside the "with" block, we create and cd to the work
204  # directory and then cd back out at the end. The
205  # "rm_first=True" means the directory is deleted first if
206  # it already exists upon entry of the "with" block.
207  with NamedDir(self.workdir,keep=not self.scrub,
208  logger=logger,rm_first=True) as d:
209  # Run first init script and raise an exception if it fails:
210  checkrun(scriptexe(self,'{USHhwrf}/hycom/select_domain.sh'),
211  logger=logger)
212  self.hycom_settings.deliver(frominfo='./hycom_settings')
213 
214  self.find_rtofs_data()
215 
216  # Run second init script and raise an exception if it fails:
217  checkrun(scriptexe(self,'{USHhwrf}/hycom/create_bc_ic.sh'),
218  logger=logger)
219 
220  # Find the executable chosen by select_domain.sh
221  RUNmodIDout=read_RUNmodIDout('./hycom_settings')
222  self.forecast_exe=self.icstr('{forecast_exe}',
223  RUNmodIDout=RUNmodIDout)
224 
225  # Run the standalone:
226  checkrun(mpirun(mpi(self.forecast_exe)*90),logger=self.log())
227 
228  # Deliver BC A files:
229  for (ftime,prod) in self.init_file2a.iteritems():
230  fromloc=prod.prodname # delivery source
231  prod.deliver(frominfo=fromloc,keep=True,logger=logger)
232 
233  # Deliver BC B files:
234  for (ftime,prod) in self.init_file2b.iteritems():
235  fromloc=prod.prodname # delivery source
236  prod.deliver(frominfo=fromloc,keep=True,logger=logger)
237 
238  # Deliver restart files
239  for(prodname,prod) in self.restart_out.iteritems():
240  (local,ab)=prodname.split('.')
241  loc=self.icstr('{'+local+'}',ab=ab,RUNmodIDout=RUNmodIDout)
242  prod.deliver(location=loc,frominfo=prodname,
243  keep=True,logger=logger)
244 
245  # Deliver last standalone archv A and B files:
246  notab=self.conf.cycle.strftime('archv.%Y_%j_%H.')
247  for ab in 'ab':
248  loc=self.icstr('{spin_archv}',ab=ab,RUNmodIDout=RUNmodIDout)
249  self.spin_archv_a.deliver(
250  frominfo=notab+ab,location=loc,keep=True,
251  logger=logger)
252 
253  # Run third init script and raise an exception if it fails:
254  checkrun(scriptexe(self,'{USHhwrf}/hycom/init_step3.sh'),
255  logger=logger)
256 
257  # Deliver the forcing files:
258  for (name,prod) in self.forcing_products.iteritems():
259  prod.deliver(frominfo='./'+name)
260 
261  self.limits.deliver(frominfo='./limits')
262 
263  self.blkdat_input.deliver(frominfo='./blkdat.input')
264 
265  # Make sure we run coupled:
266  self.run_coupled=True
267  self.state=COMPLETED
268  except Exception as e:
269  logger.error('Unhandled exception in ocean init: %s'
270  %(str(e),),exc_info=True)
271  self.state=FAILED
272  raise
273  except: # fatal signal, other catastrophic errors
274  self.state=FAILED
275  raise
276 
278  def __init__(self,hycomfcst,ocstatus):
279  self.hycomfcst=hycomfcst
280  self.ocstatus=ocstatus
281  @property
282  def hycominit(self):
283  return self.hycomfcst.hycominit
284  def check_coupled_inputs(self,logger):
285  """This subroutine is run by the check_init job and checks to
286  see if the initialization has succeeded. It returns True if
287  the inputs are all present, and False if they're not."""
288  hf=self.hycomfcst
289  hi=hf.hycominit
290  hi.recall_ocstatus(self.ocstatus,logger)
291  if not hi.run_coupled:
292  logger.warning('Hycom init says we will not run coupled.')
293  return True
294  # We get here if we run coupled. The HYCOMInit.run() function
295  # always makes the init_file1 and init_file2 products:
296  prods=list()
297 
298  # Check A files:
299  for (ftime,prod) in hi.init_file2a.iteritems():
300  prods.append(prod)
301  # Check B files
302  for (ftime,prod) in hi.init_file2b.iteritems():
303  prods.append(prod)
304  # Check restart files
305  # add restart files
306  # Check forcing files
307  # add forcing files
308 
309  #if hi.step3_worked:
310  # prods.append(hi.optional_file)
311  #else:
312  # logger.info('init_step3 did not work, so skipping optional_file')
313  count=0
314  for prod in prods:
315  if not prod.available:
316  logger.error('%s: product not available'%(prod.did,))
317  elif not prod.location:
318  logger.error('%s: no path set in database'%(prod.did,))
319  elif not os.path.exists(prod.location):
320  logger.error('%s: %s: file does not exist'%(
321  prod.did,prod.location))
322  else:
323  logger.info('%s: %s: product is delivered'%(
324  prod.did,prod.location))
325  count+=1
326  if count<len(prods):
327  logger.error('Have only %d of %d products. Ocean init failed.'%(
328  count,len(prods)))
329  return False
330  else:
331  logger.info('Rejoice: we have all coupled inputs')
332  return True
333  def link_coupled_inputs(self,just_check,logger):
334  """Called from the forecast job. If just_check=True, this
335  calls check_coupled_inputs. Otherwise, this links all hycom
336  inputs to the local directory."""
337  if just_check:
338  return self.check_coupled_inputs(logger)
339  hi=self.hycomfcst.hycominit
340  hi.recall_ocstatus(self.ocstatus,logger)
341  if not hi.run_coupled:
342  logger.warning('Hycom init says we will not run coupled.')
343  return True
344 
345  # List of product object to link/copy:
346  prods=list()
347 
348  # A files:
349  for (ftime,prod) in hi.init_file2a.iteritems():
350  prods.append(prod)
351  # B files
352  for (ftime,prod) in hi.init_file2b.iteritems():
353  prods.append(prod)
354 
355  # Make the subdirectories
356  if not just_check:
357  produtil.fileop.makedirs('nest',logger=logger)
358  produtil.fileop.makedirs('incup',logger=logger)
359 
360  # Now link the inputs:
361  cycle=self.hycominit.conf.cycle
362  for prod in prods:
363  if not prod.available or not prod.location or \
364  not os.path.exists(prod.location):
365  msg='%s: input not present (location=%s available=%s)'\
366  %(prod.did, repr(prod.location), repr(prod.available))
367  logger.error(msg)
369  m=re.match('hwrf_basin.0*(\d+).([ab])',prod.prodname)
370  if not m:
371  make_symlink(prod.location,prod.prodname,
372  logger=logger,force=True)
373  else:
374  (hr,ab)=m.groups()
375  hr=int(hr)
376  logger.info('Link hour %s relative to cycle %s.'%(repr(hr),repr(cycle)))
377  t=hwrf.numerics.to_datetime_rel(hr*3600,cycle)
378  name1=t.strftime('nest/archv.%Y_%j_%H.')+ab
379  name2=t.strftime('incup/incupd.%Y_%j_%H.')+ab
380  for name in ( name1, name2 ):
381  make_symlink(prod.location,name,
382  logger=logger,force=True)
383 
384  # DAN link restart file (bill02l.2015061600.rtofs_hat10.restart.[ab])
385  op=hi.icstr('{out_prefix}.')
386  nop=len(op)
387  assert(nop>5)
388  for part in [ 'rtofs','limits','forcing' ]:
389  globby=hi.icstr('{com}/{out_prefix}.{part}*',part=part)
390  nfound=0
391  # Path will be something like:
392  # /path/to/com/2015061600/02L/bill02l.2015061600.rtofs_hat10.restart.a
393  # ipref is index of the "." after 2015061600
394  # localname is rtofs_hat10.restart.a
395  # finalname is hat10.restart_in.a
396  for path in glob.glob(globby):
397  nfound+=1
398  ipref=path.rfind(op)
399  localname=path[ipref+nop:]
400  # Rename restart files:
401  finalname=re.sub('.*\.restart\.([^/]*)','restart_in.\\1',localname)
402  make_symlink(path,finalname,force=True,logger=logger)
403  logger.info('%s: %d files linked\n',globby,nfound)
404  assert(nfound>0)
405 
406  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.rtofs_hat10.restart.a','restart_in.a',force=True,logger=logger)
407  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.rtofs_hat10.restart.b','restart_in.b',force=True,logger=logger)
408 
409  # # DAN link in limits file (bill02l.2015061600.rtofs_hat10.limits)
410  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.limits','limits',force=True,logger=logger)
411 
412  # # DAN link in forcing files
413  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.airtmp.a','forcing.airtmp.a',force=True,logger=logger)
414  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.airtmp.b','forcing.airtmp.b',force=True,logger=logger)
415  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.precip.a','forcing.precip.a',force=True,logger=logger)
416  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.precip.b','forcing.precip.b',force=True,logger=logger)
417  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.presur.a','forcing.presur.a',force=True,logger=logger)
418  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.presur.b','forcing.presur.b',force=True,logger=logger)
419  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.radflx.a','forcing.radflx.a',force=True,logger=logger)
420  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.radflx.b','forcing.radflx.b',force=True,logger=logger)
421  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.shwflx.a','forcing.shwflx.a',force=True,logger=logger)
422  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.shwflx.b','forcing.shwflx.b',force=True,logger=logger)
423  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.surtmp.a','forcing.surtmp.a',force=True,logger=logger)
424  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.surtmp.b','forcing.surtmp.b',force=True,logger=logger)
425  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.tauewd.a','forcing.tauewd.a',force=True,logger=logger)
426  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.tauewd.b','forcing.tauewd.b',force=True,logger=logger)
427  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.taunwd.a','forcing.taunwd.a',force=True,logger=logger)
428  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.taunwd.b','forcing.taunwd.b',force=True,logger=logger)
429  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.vapmix.a','forcing.vapmix.a',force=True,logger=logger)
430  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.vapmix.b','forcing.vapmix.b',force=True,logger=logger)
431  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.wndspd.a','forcing.wndspd.a',force=True,logger=logger)
432  # make_symlink('/pan2/projects/hwrfv3/Dan.Iredell/pytmp/HyHWRF-2015/com/2015061600/02L/bill02l.2015061600.forcing.wndspd.b','forcing.wndspd.b',force=True,logger=logger)
433 
434  if not just_check:
435  hsprod=self.hycominit.hycom_settings
436  assert(hsprod is not None)
437  assert(hsprod.available)
438  assert(hsprod.location)
439  RUNmodIDout=read_RUNmodIDout(hsprod.location)
440  self.link_hycom_fix(RUNmodIDout)
441  self.link_hycom_parm(RUNmodIDout)
442  return True
443 
444  def link_hycom_parm(self,RUNmodIDout):
445  logger=self.hycominit.log()
446  mine={ 'fcst.blkdat.input':'blkdat.input',
447  'patch.input.90':'patch.input',
448  'ports.input':'ports.input' }
449  for (parmbase,localname) in mine.iteritems():
450  parmfile=self.hycominit.icstr(
451  '{PARMhwrf}/hwrf_{RUNmodIDout}.basin.{PARMBASE}',
452  RUNmodIDout=RUNmodIDout,PARMBASE=parmbase)
453  produtil.fileop.make_symlink(parmfile,localname,logger=logger,force=True)
454 
455  def link_hycom_fix(self,RUNmodIDout):
456  assert(RUNmodIDout)
457  logger=self.hycominit.log()
458  globby=self.hycominit.icstr('{FIXhycom}/hwrf_{RUNmodIDout}.basin.*',
459  RUNmodIDout=RUNmodIDout)
460  forcewanted=set(['chl.a','chl.b','kpar.a','kpar.b','offlux.a','offlux.b','rivers.a','rivers.b'])
461  n=0
462  nlinked=0
463  for path in glob.glob(globby):
464  basename=os.path.basename(path)
465  fd=basename.find('forcing')
466  linked=False
467  n+=1
468  if fd<0:
469  bd=basename.find('basin.')
470  if bd>0:
471  produtil.fileop.make_symlink(path,basename[(bd+6):],logger=logger,force=True)
472  linked=True
473  else:
474  forcetype=basename[(fd+8):]
475  if forcetype in forcewanted:
476  produtil.fileop.make_symlink(path,'forcing.'+forcetype,logger=logger,force=True)
477  linked=True
478  if not linked:
479  logger.info('%s: not linking %s'%(basename,path))
480  else:
481  nlinked+=1
482  logger.info('Linked %d of %d HyCOM fix files for RUNmodIDout=%s'%(
483  nlinked,n,repr(RUNmodIDout)))
484  produtil.fileop.make_symlink('../relax.rmu.a','nest/rmu.a',logger=logger,force=True)
485  produtil.fileop.make_symlink('../relax.rmu.b','nest/rmu.b',logger=logger,force=True)
486 
487  def make_exe(self,task,exe,ranks):
488  """Returns an MPIRanksBase to run the executable chosen by the
489  initialization. This function must only be called after
490  link_coupled_inputs"""
491  if not isinstance(ranks,int):
492  raise TypeError('The ranks argument to make_exe must be an int. You provided a %s %s'%(type(ranks).__name__,repr(ranks)))
493  wantexe=task.hycominit.forecast_exe
494  if not wantexe:
496  'The forecast_exe option was not specified in the '
497  'ocean status file.')
498  return mpi(wantexe)*ranks
499 
501  """This subclass of CoupledWRF runs the HyCOM-coupled WRF."""
502  def __init__(self,dstore,conf,section,wrf,ocstatus,keeprun=True,
503  wrfdiag_stream='auxhist1',hycominit=None,**kwargs):
504  if not isinstance(hycominit,HYCOMInit):
505  raise TypeError(
506  'The hycominit argument to WRFCoupledHYCOM.__init__ must be a '
507  'HYCOMInit object. You passed a %s %s.'%
508  (type(hycominit).__name__,repr(hycominit)))
509  super(WRFCoupledHYCOM,self).__init__(dstore,conf,section,wrf,keeprun,
510  wrfdiag_stream,**kwargs)
511  self._hycominit=hycominit
512  hycominiter=HYCOMIniter(self,ocstatus)
513 
514  self.couple('coupler','hwrf_wm3c','wm3c_ranks',1)
515  self.couple('hycom','hycom','ocean_ranks',90,hycominiter)
516  self.couple('wrf','wrf','wrf_ranks')
517 
518  @property
519  def hycominit(self):
520  """Returns the HYCOMInit object."""
521  return self._hycominit
522 
524  """Runs the ocean post-processor on the HyCOM output, in parallel
525  with the model."""
526  def __init__(self,ds,conf,section,fcstlen,hycom,**kwargs):
527  super(HYCOMPost,self).__init__(ds,conf,section,**kwargs)
528  self.fcstlen=fcstlen
529  self.hycom=hycom # the WRFCoupledHYCOM object
530  def run(self):
531  """Called from the ocean post job to run the HyCOM post."""
532  logger=self.log()
533  self.state=RUNNING
534  try:
535  checkrun(scriptexe(self,'{USHhwrf}/hycom/ocean_post.sh'),
536  logger=logger)
537  self.state=COMPLETED
538  except Exception as e:
539  self.state=FAILED
540  logger.error("Ocean post failed: %s"%(str(e),),exc_info=True)
541  raise
542  def unrun(self):
543  """Called from the unpost job to delete the HyCOM post output
544  in preparation for a rerun of the entire post-processing for
545  the cycle."""
546  logger=self.log()
547  self.state=RUNNING
548  try:
549  checkrun(scriptexe(self,'{USHhwrf}/hycom/ocean_unpost.sh'),
550  logger=logger)
551  self.state=COMPLETED
552  except Exception as e:
553  self.state=FAILED
554  logger.error("Ocean post failed: %s"%(str(e),),exc_info=True)
555  raise
556 
Change directory, handle temporary directories.
Definition: cd.py:1
This module provides a set of utility functions to do filesystem operations.
Definition: fileop.py:1
def confstrinterp(self, string, section=None, kwargs)
Alias for self.icstr for backward compatibility.
Definition: hwrftask.py:319
def check_coupled_inputs(self, logger)
Definition: hycom.py:284
def run(self)
Definition: hycom.py:193
def getexe
Alias for hwrf.config.HWRFConfig.get() for the "exe" section.
Definition: hwrftask.py:403
def link_hycom_parm(self, RUNmodIDout)
Definition: hycom.py:444
def to_datetime_rel(d, rel)
Converts objects to a datetime relative to another datetime.
Definition: numerics.py:319
taskname
Read-only property: the name of this task.
Definition: datastore.py:1134
A subclass of Product that represents file delivery.
Definition: datastore.py:856
The base class of tasks run by the HWRF system.
Definition: hwrftask.py:25
conf
This HWRFTask's hwrf.config.HWRFConfig object.
Definition: hwrftask.py:415
dstore
Read-only property, an alias for getdatastore(), the Datastore in which this Datum resides...
Definition: datastore.py:557
def make_exe(self, task, exe, ranks)
Definition: hycom.py:487
def link_hycom_fix(self, RUNmodIDout)
Definition: hycom.py:455
def fill_ocstatus(self, ocstatus, logger)
Definition: hycom.py:118
def find_rtofs_data(self)
Fills the RTOFS staging area with RTOFS data.
Definition: hycom.py:162
section
The confsection in self.section for this HWRFTask (read-only)
Definition: hwrftask.py:422
A shell-like syntax for running serial, MPI and OpenMP programs.
Definition: run.py:1
Base class of tasks run by HWRF.
Definition: hwrftask.py:1
Stores products and tasks in an sqlite3 database file.
Definition: datastore.py:1
def check_coupled_inputs(self, logger)
Definition: coupling.py:47
This subclass of TempDir takes a directory name, instead of generating one automatically.
Definition: cd.py:228
def makedirs
Make a directory tree, working around filesystem bugs.
Definition: fileop.py:224
Time manipulation and other numerical routines.
Definition: numerics.py:1
def make_products(self)
Definition: hycom.py:54
workdir
The directory in which this task should be run.
Definition: hwrftask.py:156
def timestr(self, string, ftime, atime=None, section=None, kwargs)
Expands a string in the given conf section, including time vars.
Definition: hwrftask.py:367
def scrub(self)
Should temporary files be deleted as soon as they are not needed?
Definition: hwrftask.py:195
def log
Obtain a logging domain.
Definition: hwrftask.py:425
Insert config file data into a Fortran namelist file.
Definition: namelist.py:154
A time-indexed array that can only handle equally spaced times.
Definition: numerics.py:689
Raised when the ocean init did not produce some expected outputs.
Definition: exceptions.py:118
Configures logging.
Definition: log.py:1
def unrun(self)
Definition: hycom.py:542
Exceptions raised by the hwrf package.
Definition: exceptions.py:1
def confstr
Alias for self.conf.getstr for section self.section.
Definition: hwrftask.py:261
def link_coupled_inputs(self, just_check, logger)
Definition: hycom.py:333
def recall_ocstatus(self, ocstatus, logger)
Definition: hycom.py:129
def icstr(self, string, section=None, kwargs)
Expands a string in the given conf section.
Definition: hwrftask.py:351
def run(self)
Definition: hycom.py:530
def scriptexe(task, path)
Definition: hycom.py:18
def make_symlink
Creates a symbolic link "target" that points to "source".
Definition: fileop.py:677