initexit.py 27.6 KB
Newer Older
1
2
3
4
#!/usr/bin/env python
# da_initexit.py

"""
Peters, Wouter's avatar
Peters, Wouter committed
5
6
.. module:: initexit
.. moduleauthor:: Wouter Peters 
7
8
9
10

Revision History:
File created on 13 May 2009.

Peters, Wouter's avatar
Peters, Wouter committed
11
The CycleControl class is found in the module :mod:`initexit`. It is derived from the standard python :class:`dictionary` object. It is the only core object of CTDAS that is automatically created in the pipeline, the user (normally) does not need to modify or extend it. The class is created based on options and arguments passes on the command line when submitting your main CTDAS job. 
12
13
14

Valid options are defined in 

15
.. autofunction:: da.tools.initexit.parse_options
16
17
18
19
20
21

With the name of a valid ``rc-file``, the CycleControl object is instantiated and validated. An example rc-file looks
like this:::

    ! Info on the data assimilation cycle

22
23
24
25
26
27
    time.restart        : False                     ! Restart from an existing run T/F
    time.start          : 2000-01-01 00:00:00       ! Start time of first cycle
    time.finish         : 2000-01-08 00:00:00       ! End time of last cycle
    time.cycle          : 7                         ! length of each cycle, 7 means one week
    time.nlag           : 5                         ! number of cycles in one smoother window
    dir.da_run          : ${HOME}/tmp/test_da       ! the run directory for you project
28
29
30

    ! Info on the DA system used

31
32
    da.system           : CarbonTracker             ! an identifier for your inversion system
    da.system.rc        : da/rc/carbontracker.rc    ! the settings needed in your inversion system
33
34
35

    ! Info on the forward model to be used

36
37
38
    da.obsoperator         : TM5                                ! an identifier for your observation operator
    da.obsoperator.rc      : ${HOME}/Modeling/TM5/tm5-ctdas.rc  ! the rc-file needed to run youobservation operator
    da.optimizer.nmembers  : 30                                 ! the number of ensemble members desired in the optimization
39
40
41
42

The most important method of the CycleControl object are listed below:

.. autoclass:: da.tools.initexit.CycleControl 
43
   :members: setup, finalize,  collect_restart_data, move_restart_data, 
44
             submit_next_cycle, setup_file_structure, recover_run, random_seed
45
46
47
48
49
50
51

Two important attributes of the CycleControl object are:
    (1) DaSystem, an instance of a :ref:`dasystem`
    (2) DaPlatForm, an instance of a :ref:`platform`

Other functions in the module initexit that are related to the control of a DA cycle are:

52
53
.. autofunction:: da.tools.initexit.start_logger 
.. autofunction:: da.tools.initexit.validate_opts_args 
54
55


56
"""
karolina's avatar
karolina committed
57
58
59
import logging
import os
import sys
60
import glob
karolina's avatar
karolina committed
61
import shutil
karolina's avatar
karolina committed
62
import copy
63
64
65
import getopt
import cPickle
import numpy as np
karolina's avatar
karolina committed
66
from string import join
67

karolina's avatar
karolina committed
68
import da.tools.rc as rc
69
from da.tools.general import create_dirs, to_datetime, advance_time
70

karolina's avatar
karolina committed
71
needed_da_items = [
72
73
74
75
76
    'time.start',
    'time.finish',
    'time.nlag',
    'time.cycle',
    'dir.da_run',
77
78
79
    'da.resources.ncycles_per_job',
    'da.resources.ntasks',
    'da.resources.ntime',
80
81
82
83
84
    'da.system',
    'da.system.rc',
    'da.obsoperator',
    'da.obsoperator.rc',
    'da.optimizer.nmembers']
85
86
87
88
89
90
91

# only needed in an earlier implemented where each substep was a separate job
# validprocesses = ['start','done','samplestate','advance','invert']


class CycleControl(dict):
    """
92
    This object controls the CTDAS system flow and functionality.
93
    """
94
        
karolina's avatar
karolina committed
95
    def __init__(self, opts=[], args={}):
96
        """
Peters, Wouter's avatar
Peters, Wouter committed
97
98
        The CycleControl object is instantiated with a set of options and arguments.
        The list of arguments must contain the name of an existing ``rc-file``. 
99
100
        This rc-file is loaded by method :meth:`~da.tools.initexit.CycleControl.load_rc` and validated
        by :meth:`~da.tools.initexit.CycleControl.validate_rc`
101

Peters, Wouter's avatar
Peters, Wouter committed
102
        Options for the CycleControl consist of accepted command line flags or arguments 
103
        in :func:`~da.tools.initexit.CycleControl.parse_options`
104

105
        """
karolina's avatar
karolina committed
106
107
        rcfile = args['rc']
        self.load_rc(rcfile)
108
        self.validate_rc()
109
110
111
112
        self.opts = opts

        # Add some useful variables to the rc-file dictionary

karolina's avatar
karolina committed
113
        self['jobrcfilename'] = rcfile
karolina's avatar
karolina committed
114
        self['dir.da_submit'] = os.getcwd()
115
        self['da.crash.recover'] = '-r' in opts
116
        self['transition'] = '-t' in opts
karolina's avatar
karolina committed
117
        self['verbose'] = '-v' in opts
118
119
120
        self.dasystem = None # to be filled later
        self.restart_filelist = [] # List of files needed for restart, to be extended later
        self.output_filelist = [] # List of files needed for output, to be extended later
121
122


karolina's avatar
karolina committed
123
    def load_rc(self, rcfilename):
124
125
126
127
        """ 
        This method loads a DA Cycle rc-file with settings for this simulation 
        """

karolina's avatar
karolina committed
128
        rcdata = rc.read(rcfilename)
karolina's avatar
karolina committed
129
        for k, v in rcdata.iteritems():
130
131
            self[k] = v

karolina's avatar
karolina committed
132
133
        logging.info('DA Cycle rc-file (%s) loaded successfully' % rcfilename)
        
134

135
    def validate_rc(self):
136
        """ 
Peters, Wouter's avatar
Peters, Wouter committed
137
138
        Validate the contents of the rc-file given a dictionary of required keys. 
        Currently required keys are :attr:`~da.tools.initexit.needed_da_items`
139
140
        """

karolina's avatar
karolina committed
141
142
143
144
145
        for k, v in self.iteritems():
            if v in ['True', 'true', 't', 'T', 'y', 'yes']:
                self[k] = True
            if v in ['False', 'false', 'f', 'F', 'n', 'no']:
                self[k] = False
146
            if 'date' in k : 
147
                self[k] = to_datetime(v)
148
            if k in ['time.start', 'time.end', 'time.finish', 'da.restart.tstamp']:
149
                self[k] = to_datetime(v)
150
151
        for key in needed_da_items:
            if not self.has_key(key):
karolina's avatar
karolina committed
152
153
154
155
156
157
158
159
                msg = 'Missing a required value in rc-file : %s' % key
                logging.error(msg)
                logging.error('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ')
                logging.error('Please note the update on Dec 02 2011 where rc-file names for DaSystem and ')
                logging.error('are from now on specified in the main rc-file (see da/rc/da.rc for example)')
                logging.error('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ')
                raise IOError, msg
        logging.debug('DA Cycle settings have been validated succesfully')
160

161
    def parse_times(self):
162
163
164
165
166
167
168
169
        """ 
        Parse time related parameters into datetime objects for later use 
        """

        startdate = self['time.start']
        finaldate = self['time.finish']                  

        if finaldate <= startdate:
karolina's avatar
karolina committed
170
            logging.error('The start date (%s) is not greater than the end date (%s), please revise' % (startdate.strftime('%Y%m%d'), finaldate.strftime('%Y%m%d'))) 
171
172
173
174
175
176
177
178
            raise ValueError
        cyclelength = self['time.cycle']                 # get time step

# Determine end date

        if cyclelength == 'infinite':
            enddate = finaldate
        else:
179
            enddate = advance_time(startdate, cyclelength)
180

karolina's avatar
karolina committed
181
        dt = enddate - startdate
182

karolina's avatar
karolina committed
183
        if enddate > finaldate:  # do not run beyond finaldate
184
185
            enddate = finaldate

karolina's avatar
karolina committed
186
187
188
189
        self['time.start'] = startdate
        self['time.end'] = enddate
        self['time.finish'] = finaldate
        self['cyclelength'] = dt
190

karolina's avatar
karolina committed
191
192
        logging.info("===============================================================")
        logging.info("DA Cycle start date is %s" % startdate.strftime('%Y-%m-%d %H:%M'))
193
        logging.info("DA Cycle end date is %s" % enddate.strftime('%Y-%m-%d %H:%M'))
karolina's avatar
karolina committed
194
195
196
197
        logging.info("DA Cycle final date is %s" % finaldate.strftime('%Y-%m-%d %H:%M'))  
        logging.info("DA Cycle cycle length is %s" % cyclelength)
        logging.info("DA Cycle restart is %s" % str(self['time.restart']))
        logging.info("===============================================================")
198
199


200
    def set_sample_times(self, lag):
201
202
203
204
205
206
        """
        Set the times over which a sampling interval will loop, depending on 
        the lag. Note that lag falls in the interval [0,nlag-1]
        """

        # Start from cycle times 
karolina's avatar
karolina committed
207
208
        self['time.sample.start'] = copy.deepcopy(self['time.start'])
        self['time.sample.end'] = copy.deepcopy(self['time.end'])
209
210
211
212

        # Now advance depending on lag

        for l in range(lag):
213
            self.advance_sample_times()
214
215


216
    def advance_sample_times(self):
217
218
219
220
        """ 
        Advance sampling start and end time by one cycle interval
        """

221
        days = self['cyclelength'].days                
222

223
224
        self['time.sample.start'] = advance_time(self['time.sample.start'], days)
        self['time.sample.end'] = advance_time(self['time.sample.end'], days)
225
226
    

227
    def advance_cycle_times(self):
228
229
230
        """ 
        Advance cycle start and end time by one cycle interval
        """
231
232
              
        days = self['cyclelength'].days                  
Peters, Wouter's avatar
Peters, Wouter committed
233

234
235
        startdate = advance_time(self['time.start'], days)
        enddate = advance_time(self['time.end'], days)
236

karolina's avatar
karolina committed
237
238
        filtertime = startdate.strftime('%Y%m%d')
        self['dir.output'] = os.path.join(self['dir.da_run'], 'output', filtertime)
239

karolina's avatar
karolina committed
240
241
        self['time.start'] = startdate
        self['time.end'] = enddate
242
243


244
245
246
247
248
249
    def write_random_seed(self):
        filename = os.path.join(self['dir.restart'], 'randomseed_%s.pickle' % self['time.start'].strftime('%Y%m%d'))
        f = open(filename, 'wb')
        seed = np.random.get_state()
        cPickle.dump(seed, f, -1)
        f.close()
250

251
        logging.info("Saved the random seed generator values to file")
252
253


254
255
    def read_random_seed(self, first=False):
        if first:
256
            filename = self['random.seed.init']
257
258
259
260
            logging.info("Initialised random seed from: %s"%filename)
        else: 
            filename = os.path.join(self['dir.restart'], 'randomseed_%s.pickle' % self['da.restart.tstamp'].strftime('%Y%m%d'))
            logging.info("Retrieved the random seed generator values of last cycle from file")
261
262
263
264
        f = open(filename, 'rb')
        seed = cPickle.load(f)
        np.random.set_state(seed)
        f.close()
265

266

267
    def setup(self):
268
269
270
        """ 
        This method determines how to proceed with the cycle. Three options are implemented:

Peters, Wouter's avatar
Peters, Wouter committed
271
272
273
            1. *Fresh start*  : set up the required file structure for this simulation and start
            2. *Restart*      : use latest da_runtime variables from the exec dir and restart
            3. *Recover*      : restart after crash by getting data from restart/one-ago folder
274
275
276

        The choice that gets executed depends on the presence of 

Peters, Wouter's avatar
Peters, Wouter committed
277
278
            # the ``-r`` option on the command line, this triggers a recover
            # the ``time.restart : True`` option in the da.rc file
279
280

        The latter is automatically set if the filter submits the next cycle at the end of the current one, 
281
        through method :meth:`~da.tools.initexit.CycleControl.submit_next_cycle`.
Peters, Wouter's avatar
Peters, Wouter committed
282

Peters, Wouter's avatar
Peters, Wouter committed
283
        The specific call tree under each scenario is: 
Peters, Wouter's avatar
Peters, Wouter committed
284

Peters, Wouter's avatar
Peters, Wouter committed
285
            1. *Fresh Start*
286
                *  :meth:`~da.tools.initexit.CycleControl.setup_file_structure()`  <- Create directory tree
Peters, Wouter's avatar
Peters, Wouter committed
287
            2. *Restart*
288
289
                *  :meth:`~da.tools.initexit.CycleControl.setup_file_structure()`
                *  :meth:`~da.tools.initexit.CycleControl.random_seed`    <- Read the random seed from file
Peters, Wouter's avatar
Peters, Wouter committed
290
            3. *Recover*
291
292
293
                *  :meth:`~da.tools.initexit.CycleControl.setup_file_structure()`
                *  :meth:`~da.tools.initexit.CycleControl.recover_run()`          <- Recover files from restart/one-ago dir, reset ``time.start``
                *  :meth:`~da.tools.initexit.CycleControl.random_seed` 
Peters, Wouter's avatar
Peters, Wouter committed
294
295
296

        And is always followed by a call to

297
            * parse_times()
Peters, Wouter's avatar
Peters, Wouter committed
298
            * WriteRc('jobfilename')
299
        """        
300
301
302
303
304
305
306
307
        if self['transition']:
            logging.info("Transition of filter from previous step with od meteo from 25 to 34 levels")
            self.setup_file_structure()
            strippedname = os.path.split(self['jobrcfilename'])[-1]
            self['jobrcfilename'] = os.path.join(self['dir.exec'], strippedname)
            self.read_random_seed(False)

        elif self['time.restart']:
karolina's avatar
karolina committed
308
            logging.info("Restarting filter from previous step")
309
            self.setup_file_structure()
310
            strippedname = os.path.split(self['jobrcfilename'])[-1]
karolina's avatar
karolina committed
311
            self['jobrcfilename'] = os.path.join(self['dir.exec'], strippedname)
312
313
            self.read_random_seed(False)

314
        else: #assume that it is a fresh start, change this condition to more specific if crash recover added
karolina's avatar
karolina committed
315
            logging.info("First time step in filter sequence")
316
            self.setup_file_structure()
317
318

            # expand jobrcfilename to include exec dir from now on.
319
320
321
            # First strip current leading path from filename

            strippedname = os.path.split(self['jobrcfilename'])[-1]
karolina's avatar
karolina committed
322
            self['jobrcfilename'] = os.path.join(self['dir.exec'], strippedname)
323
324
325
326
327
328
329
330
331
332
            shutil.copy(os.path.join(self.dasystem['regionsfile']),os.path.join(self['dir.exec'],'da','analysis','copied_regions.nc'))
            logging.info('Copied regions file to the analysis directory: %s'%os.path.join(self.dasystem['regionsfile'])) 
            shutil.copy(os.path.join(self.dasystem['extendedregionsfile']),os.path.join(self['dir.exec'],'da','analysis','copied_regions_extended.nc')) 
            logging.info('Copied extended regions file to the analysis directory: %s'%os.path.join(self.dasystem['extendedregionsfile'])) 
            for filename in glob.glob(os.path.join(self['dir.exec'],'da','analysis','*.pickle')):
                logging.info('Deleting pickle file %s to make sure the correct regions are used'%os.path.split(filename)[1])
                os.remove(filename) 
            for filename in glob.glob(os.path.join(self['dir.exec'],'*.pickle')):
                logging.info('Deleting pickle file %s to make sure the correct regions are used'%os.path.split(filename)[1])
                os.remove(filename) 
333
            if self.has_key('random.seed.init'):
334
                self.read_random_seed(True)
335

336
        self.parse_times()
337
        #self.write_rc(self['jobrcfilename'])
338

339
    def setup_file_structure(self):
340
341
        """ 
        Create file structure needed for data assimilation system.
Peters, Wouter's avatar
Peters, Wouter committed
342
343
344
345
346
347
348
349
        In principle this looks like:

            * ``${da_rundir}``
            * ``${da_rundir}/input``
            * ``${da_rundir}/output``
            * ``${da_rundir}/exec``
            * ``${da_rundir}/analysis``
            * ``${da_rundir}/jobs``
350
351
            * ``${da_rundir}/restart/current``
            * ``${da_rundir}/restart/one-ago``
Peters, Wouter's avatar
Peters, Wouter committed
352

353
        .. note:: The exec dir will actually be a simlink to the directory where
354
355
356
                 the observation operator executable lives. This directory is passed through
                 the ``da.rc`` file. 

357
        .. note:: The observation input files will be placed in the exec dir,
358
                 and the resulting simulated values will be retrieved from there as well.
Peters, Wouter's avatar
Peters, Wouter committed
359

360
361
362
363
        """

# Create the run directory for this DA job, including I/O structure

karolina's avatar
karolina committed
364
        filtertime = self['time.start'].strftime('%Y%m%d')
365

karolina's avatar
karolina committed
366
367
368
369
370
371
        self['dir.exec'] = os.path.join(self['dir.da_run'], 'exec')
        self['dir.input'] = os.path.join(self['dir.da_run'], 'input')
        self['dir.output'] = os.path.join(self['dir.da_run'], 'output', filtertime)
        self['dir.analysis'] = os.path.join(self['dir.da_run'], 'analysis')
        self['dir.jobs'] = os.path.join(self['dir.da_run'], 'jobs')
        self['dir.restart'] = os.path.join(self['dir.da_run'], 'restart')
372

373
374
375
376
377
378
379
        create_dirs(self['dir.da_run'])
        create_dirs(os.path.join(self['dir.exec']))
        create_dirs(os.path.join(self['dir.input']))
        create_dirs(os.path.join(self['dir.output']))
        create_dirs(os.path.join(self['dir.analysis']))
        create_dirs(os.path.join(self['dir.jobs']))
        create_dirs(os.path.join(self['dir.restart']))
380

karolina's avatar
karolina committed
381
        logging.info('Succesfully created the file structure for the assimilation job')
382

383

384
    def finalize(self):
385
        """
386
        finalize the da cycle, this means writing the save data and rc-files for the next run. 
387
388
389
        The following sequence of actions occur:

            * Write the randomseed to file for reuse in next cycle
Peters, Wouter's avatar
Peters, Wouter committed
390
            * Write a new ``rc-file`` with ``time.restart : True``, and new ``time.start`` and ``time.end``
391
392
393
394
            * Collect all needed data needed for check-pointing (restart from current system state)
            * Move the previous check pointing data out of the way, and replace with current
            * Submit the next cycle

395
        """
396
397
        self.write_random_seed()                              
        self.write_new_rc_file()                              
398
        
399
400
401
        self.collect_restart_data()  # Collect restart data for next cycle into a clean restart/current folder
        self.collect_output()  # Collect restart data for next cycle into a clean restart/current folder
        self.submit_next_cycle()
402

403
    def collect_output(self):
404
        """ Collect files that are part of the requested output for this cycle. This function allows users to add files 
405
406
            to a list, and then the system will copy these to the current cycle's output directory.
            The list of files included is read from the 
407
            attribute "output_filelist" which is a simple list of files that can be appended by other objects/methods that
408
409
410
411
            require output data to be saved.


        """
karolina's avatar
karolina committed
412
        targetdir = os.path.join(self['dir.output'])
413
        create_dirs(targetdir)
414

karolina's avatar
karolina committed
415
416
        logging.info("Collecting the required output data") 
        logging.debug("           to   directory: %s " % targetdir)
417

418
        for file in set(self.output_filelist):
419
420
            if os.path.isdir(file): # skip dirs
                continue
421
            if not os.path.exists(file): # skip dirs
karolina's avatar
karolina committed
422
                logging.warning("           [not found] .... %s " % file)
423
                continue
424

karolina's avatar
karolina committed
425
426
            logging.debug("           [copy] .... %s " % file)
            shutil.copy(file, file.replace(os.path.split(file)[0], targetdir))
427
428
429



430
    def collect_restart_data(self):
431
        """ Collect files needed for the restart of this cycle in case of a crash, or for the continuation of the next cycle. 
432
            All files needed are written to the restart/current directory. The list of files included is read from the 
433
            attribute "restart_filelist" which is a simple list of files that can be appended by other objects/methods that
434
435
            require restart data to be saved.

436
            .. note:: Before collecting the files in the ``restart_filelist``, the restart/current directory will be emptied and
437
                     recreated. This prevents files from accumulating in the restart/current and restart/one-ago folders. It 
438
                     also means that if a file is missing from the ``restart_filelist``, it will not be available for check-pointing
439
440
441
                     if your run crashes or dies!

            Currently, the following files are included:
442

443
                * The ``da_runtime.rc`` file
444
                * The ``randomseed.pickle`` file
445
                * The savestate.nc file
446
                * The files in the ``ObservationOperator.restart_filelist``, i.e., restart data for the transport model
447

448

Peters, Wouter's avatar
Peters, Wouter committed
449
            .. note:: We assume that the restart files for the :ref:`ObservationOperator` 
450
                      reside in a separate folder, i.e, the ObservationOperator does *not* write directly to the CTDAS restart dir!
451
452
453

        """

454
        targetdir = os.path.join(self['dir.restart'])
455

456
        #logging.info("Purging the current restart directory before collecting new data")
457

458
        #create_dirs(targetdir, forceclean=True)
459

karolina's avatar
karolina committed
460
461
        logging.info("Collecting the required restart data")
        logging.debug("           to   directory: %s " % targetdir)
462

463
        for file in set(self.restart_filelist):
464
465
            if os.path.isdir(file): # skip dirs
                continue
466
            if not os.path.exists(file): 
karolina's avatar
karolina committed
467
                logging.warning("           [not found] .... %s " % file)
468
            else:
karolina's avatar
karolina committed
469
470
                logging.debug("           [copy] .... %s " % file)
                shutil.copy(file, file.replace(os.path.split(file)[0], targetdir))
471
472
473
474



#
475
    def write_new_rc_file(self):
476
477
478
479
480
481
        """ Write the rc-file for the next DA cycle. 

            .. note:: The start time for the next cycle is the end time of this one, while 
                      the end time for the next cycle is the current end time + one cycle length. 
                      
            The resulting rc-file is written to the ``dir.exec`` so that it can be used when resubmitting the next cycle
482
483
            
        """
karolina's avatar
karolina committed
484
        
485
        # We make a copy of the current dacycle object, and modify the start + end dates and restart value
486

karolina's avatar
karolina committed
487
488
489
490
        new_dacycle = copy.deepcopy(self)
        new_dacycle['da.restart.tstamp'] = self['time.start']
        new_dacycle.advance_cycle_times()
        new_dacycle['time.restart'] = True
491
        
492
        # Create the name of the rc-file that will hold this new input, and write it
493

494
495
        #fname = os.path.join(self['dir.exec'], 'da_runtime.rc')  # current exec dir holds next rc file
        
karolina's avatar
karolina committed
496
        fname = os.path.join(self['dir.restart'], 'da_runtime_%s.rc' % new_dacycle['time.start'].strftime('%Y%m%d'))#advanced time
497
        
karolina's avatar
karolina committed
498
        rc.write(fname, new_dacycle)
499
        logging.debug('Wrote new da_runtime.rc (%s) to restart dir' % fname)
500

501
        # The rest is info needed for a system restart, so it modifies the current dacycle object (self)
502

karolina's avatar
karolina committed
503
        self['da.restart.fname'] = fname    # needed for next job template
504
505
        #self.restart_filelist.append(fname)  # not that needed since it is already written to the restart dir...
        #logging.debug('Added da_runtime.rc to the restart_filelist for later collection')
506
507


508
    def write_rc(self, fname):
509
510
        """ Write RC file after each process to reflect updated info """

karolina's avatar
karolina committed
511
        rc.write(fname, self)
karolina's avatar
karolina committed
512
        logging.debug('Wrote expanded rc-file (%s)' % fname)
karolina's avatar
karolina committed
513
        
514

515
    def submit_next_cycle(self):
516
        """ 
Peters, Wouter's avatar
Peters, Wouter committed
517
        Submit the next job of a DA cycle, this consists of 
518
519
520
            * Changing to the working directory from which the job was started initially
            * create a line to start the master script again with a newly created rc-file
            * Submitting the jobfile 
Peters, Wouter's avatar
Peters, Wouter committed
521
522
523

        If the end of the cycle series is reached, no new job is submitted.

524
        """
karolina's avatar
karolina committed
525
        
526

527
        if self['time.end'] < self['time.finish']:
528

Peters, Wouter's avatar
test    
Peters, Wouter committed
529
            # file ID and names
karolina's avatar
karolina committed
530
531
532
            jobid = self['time.end'].strftime('%Y%m%d') 
            targetdir = os.path.join(self['dir.exec'])
            jobfile = os.path.join(targetdir, 'jb.%s.jb' % jobid)
533
            logfile = os.path.join(targetdir, 'jb.%s.log' % jobid)
Peters, Wouter's avatar
test    
Peters, Wouter committed
534
            # Template and commands for job
535
            jobparams = {'jobname':"j.%s" % jobid, 'jobnodes':self['da.resources.ntasks'], 'jobtime': self['da.resources.ntime'], 'logfile': logfile, 'errfile': logfile}
536
            template = self.daplatform.get_job_template(jobparams)
karolina's avatar
karolina committed
537
            execcommand = os.path.join(self['dir.da_submit'], sys.argv[0]) 
538
539
            if '-t' in self.opts:
                (self.opts).remove('-t') 
540

541
542
543
544
545
            if not os.environ.has_key('icycle_in_job'):
                logging.info('Environment variable icycle_in_job not found, resubmitting after this cycle')
                os.environ['icycle_in_job'] = self['da.resources.ncycles_per_job']  # assume that if no cycle number is set, we should submit the next job by default
            else:
                logging.info('Environment variable icycle_in_job was found, processing cycle %s of %s in this job'%(os.environ['icycle_in_job'],self['da.resources.ncycles_per_job']) )
546

547
            ncycles = int(self['da.resources.ncycles_per_job'])
548
549
550
551
            for cycle in range(ncycles): 
                nextjobid = '%s'% ( (self['time.end']+cycle*self['cyclelength']).strftime('%Y%m%d'),)
                nextrestartfilename = self['da.restart.fname'].replace(jobid,nextjobid)
                nextlogfilename = logfile.replace(jobid,nextjobid)
552
            	template += '\nexport icycle_in_job=%d\npython %s rc=%s %s >&%s\n' % (cycle+1,execcommand, nextrestartfilename, join(self.opts, ''), nextlogfilename,) 
553
                #template += '\nexport icycle_in_job=%d\npython %s rc=%s %s >&%s &\n' % (cycle+1,execcommand, nextrestartfilename, join(self.opts, ''), nextlogfilename,)
Peters, Wouter's avatar
test    
Peters, Wouter committed
554
555

            # write and submit 
556
            self.daplatform.write_job(jobfile, template, jobid)
557
	    if 'da.resources.ncycles_per_job' in self:
558
		do_submit = (int(os.environ['icycle_in_job']) >= int(self['da.resources.ncycles_per_job']))
559
	    else:
560
                dosubmit = False
561
562
563
564
          
            if do_submit:
                jobid = self.daplatform.submit_job(jobfile, joblog=logfile)

565
566
567
568
        else:
            logging.info('Final date reached, no new cycle started')


569
def start_logger(level=logging.INFO):
570
571
572
573
    """ start the logging of messages to screen"""

# start the logging basic configuration by setting up a log file

karolina's avatar
karolina committed
574
575
576
    logging.basicConfig(level=level,
                        format=' [%(levelname)-7s] (%(asctime)s) py-%(module)-20s : %(message)s',
                        datefmt='%Y-%m-%d %H:%M:%S')
577

578
def parse_options():
579
580
581
582
583
584
585
586
587
588
    """ 
    Function parses options from the command line and returns the arguments as a dictionary.
    Accepted command line arguments are:

    ========  =======
    Argument  Meaning
    ========  =======
    -v        verbose output in log files
    -h        display help
    -r        start a simulation by recovering from a previous crash
589
    -t        start a simulation by transitioning from 25 to 34 layers in December 2005 (od meteo)
590
591
592
    ========  =======

    """
593
594
595

# Parse keywords, the only option accepted so far is the "-h" flag for help

karolina's avatar
karolina committed
596
597
    opts = []
    args = []
598
    try:                                
599
        opts, args = getopt.gnu_getopt(sys.argv[1:], "-rvt")
600
    except getopt.GetoptError, msg:           
karolina's avatar
karolina committed
601
        logging.error('%s' % msg)
602
603
604
        sys.exit(2)      

    for options in opts:
karolina's avatar
karolina committed
605
        options = options[0].lower()
606
607
        if options == '-r':
            logging.info('-r flag specified on command line: recovering from crash')
608
609
        if options == '-t':
            logging.info('-t flag specified on command line: transition with od from December 2005')    
610
611
        if options == '-v':
            logging.info('-v flag specified on command line: extra verbose output')
karolina's avatar
karolina committed
612
            logging.root.setLevel(logging.DEBUG)
613

614
    if opts: 
615
        optslist = [item[0] for item in opts]
616
    else:
karolina's avatar
karolina committed
617
        optslist = []
618

619
620
# Parse arguments and return as dictionary

karolina's avatar
karolina committed
621
    arguments = {}
622
623
624
625
626
627
628
629
    for item in args:
        #item=item.lower()

# Catch arguments that are passed not in "key=value" format

        if '=' in item:
            key, arg = item.split('=')
        else:
karolina's avatar
karolina committed
630
631
            logging.error('%s' % 'Argument passed without description (%s)' % item)
            raise getopt.GetoptError, arg
632

karolina's avatar
karolina committed
633
        arguments[key] = arg
634
635


636
    return optslist, arguments
637

638
def validate_opts_args(opts, args):
639
640
641
642
643
    """ 
 Validate the options and arguments passed from the command line before starting the cycle. The validation consists of checking for the presence of an argument "rc", and the existence of
 the specified rc-file.  
 
    """
644
    if not args.has_key("rc"):
karolina's avatar
karolina committed
645
646
647
        msg = "There is no rc-file specified on the command line. Please use rc=yourfile.rc"
        logging.error(msg)
        raise IOError, msg
648
    elif not os.path.exists(args['rc']):
karolina's avatar
karolina committed
649
650
651
        msg = "The specified rc-file (%s) does not exist " % args['rc'] 
        logging.error(msg)
        raise IOError, msg
652
653
654
655
656
657
658
659
660

    # WP not needed anymore
    #if not args.has_key('process'):
    #    msg = "There is no process specified on the command line, assuming process=Start"   ; logging.info(msg)
    #    args['process'] = 'start'
    #if args['process'].lower() not in validprocesses:
    #    msg = "The specified process (%s) is not valid"%args['process']   ; logging.error(msg)
    #    raise IOError,msg

karolina's avatar
karolina committed
661
    return opts, args
662
663
664


if __name__ == "__main__":
665
    pass
666