HWRF  trunk@4391
hwrf_make_jobs.py
1 #! /usr/bin/env python
2 
3 ##@namespace ush.hwrf_make_jobs
4 # Makes the jobs/JHWRF_* sh scripts.
5 #
6 # This script is run by EMC and NCO to generate jobs/JHWRF_* files,
7 # which are the shell wrappers around the Python scripts. The calling
8 # convention is quite simple:
9 # @code{.sh}
10 # ush/hwrf_make_jobs.py
11 # @endcode
12 #
13 # It should be called from the directory above ush/ and jobs/. It
14 # will read in the jobs/JHWRF.in file, and parse it repeatedly, once
15 # per output jobs/JHWRF_* file. Each time through, it generates a
16 # dict of variables (job name, etc.) to send to a
17 # produtil.atparse.AtParser, which generates the final job script.
18 
19 import os, sys, StringIO, logging
21 from os.path import join,dirname
22 
23 def job(JJOBNAME,JOBMORE='',EXNAME=None,**kwargs):
24  """!Makes a dict to pass to the AtParser to generate a job with the
25  given specifications.
26  @param JJOBNAME the JJOB's name: the part after "JHWRF_"
27  @param JOBMORE unused. Sets the JOBMORE variable
28  @param EXNAME ex-script name (the part between exhwrf_ and .py).
29  Set automatically from the JJOBNAME if absent.
30  @param kwargs inserted into the resulting dict via "update"
31  @returns the new dict"""
32  if EXNAME is None: EXNAME=JJOBNAME.lower()
33  out=dict(JJOBNAME=str(JJOBNAME),
34  JOBMORE=str(JOBMORE),
35  EXNAME=str(EXNAME))
36  out.update(PARQ='devmax2',SHAREQ='devmax2_shared')
37  out.update(kwargs)
38  return out
39 
40 def make_job(jd,lines,logger):
41  """!Makes one J-Job by parsing the given lines using an atparser
42  @param jd a dict to pass to the atparser
43  @param lines an array of lines from JHWRF.in
44  @param logger where to send errors
45  @returns a string containing whatever should be in the job file"""
46  sio=StringIO.StringIO()
47  ap=produtil.atparse.ATParser(sio,jd,logger)
48  i=0
49  for line in lines:
50  i+=1
51  ap.parse_line(line,'JHWRF.in',i)
52  out=sio.getvalue()
53  sio.close()
54  return out
55 
56 def main():
57  """!Main program. Loops over all known job names producing the
58  resulting job file for each inside the jobs/ directory."""
60  logger=logging.getLogger('hwrf_make_jobs')
61 
62  # List of jobs to create:
63  jobs = [ job('GSI'), job('BUFRPREP'), job('ENSDA'), job('ENSDA_OUTPUT'),
64  job('ENSDA_PRE'), job('FORECAST'), job('GSI_POST'),
65  job('INIT'), job('LAUNCH'), job('OUTPUT'), job('UNPOST'),
66  job('MERGE'), job('RELOCATE'), job('OCEAN_INIT'), job('POST'),
67  job('PRODUCTS') ]
68 
69  # Read the JHWRF.in file:
70  hwrf_make_jobs_py=os.path.realpath(__file__)
71  HOMEhwrf=dirname(dirname(hwrf_make_jobs_py))
72  JOBhwrf=join(HOMEhwrf,'jobs')
73  JHWRF_in_path=join(JOBhwrf,'JHWRF.in')
74  try:
75  with open(JHWRF_in_path,'rt') as jhwrf_in_file:
76  jhwrf_in=jhwrf_in_file.readlines()
77  except EnvironmentError as e:
78  logger.error('%s: %s'%(JHWRF_in_path,str(e)),exc_info=True)
79  sys.exit(1)
80 
81  # Make the jobs:
82  for jd in jobs:
83  filename=os.path.join(JOBhwrf,'JHWRF_'+jd['JJOBNAME'].upper())
84  if 'JOBMORE' in jd and jd['JOBMORE']:
85  # job run in multiple different ways:
86  filename+='.mode.'+jd['JOBMORE']
87  contents=make_job(jd,jhwrf_in,logger)
88  logger.info('%s: write file'%(filename,))
89  with open(filename,'wt') as outf:
90  outf.write(contents)
91 
92 if __name__=='__main__': main()
Contains setup(), which initializes the produtil package.
Definition: setup.py:1
def setup(ignore_hup=False, dbnalert_logger=None, jobname=None, cluster=None, send_dbn=None, thread_logger=False, thread_stack=2 **24, kwargs)
Initializes the produtil package.
Definition: setup.py:15
Takes input files or other data, and replaces certain strings with variables or functions.
Definition: atparse.py:98
ATParser is a text parser that replaces strings with variables and function output.
Definition: atparse.py:1