Commit c8153d1b authored by karolina's avatar karolina
Browse files

minor changes before implementing things discussed in mail #3 and #4;

a lot of my helper comments, but they will disappear soon.
parent c0c34642
...@@ -208,7 +208,7 @@ class Optimizer(object): ...@@ -208,7 +208,7 @@ class Optimizer(object):
savedict['units'] = "mol mol-1" savedict['units'] = "mol mol-1"
savedict['dims'] = dimobs savedict['dims'] = dimobs
savedict['values'] = self.Hx.tolist() savedict['values'] = self.Hx.tolist()
savedict['comment'] = '%s mean mixing ratios based on %s state vector' % (type, type,) savedict['comment'] = '%s mean mixing ratios based on %s state vector' % (type, type)
f.AddData(savedict) f.AddData(savedict)
savedict = io.std_savedict.copy() savedict = io.std_savedict.copy()
...@@ -217,7 +217,7 @@ class Optimizer(object): ...@@ -217,7 +217,7 @@ class Optimizer(object):
savedict['units'] = "mol mol-1" savedict['units'] = "mol mol-1"
savedict['dims'] = dimobs + dimmembers savedict['dims'] = dimobs + dimmembers
savedict['values'] = self.HX_prime.tolist() savedict['values'] = self.HX_prime.tolist()
savedict['comment'] = '%s mixing ratio deviations based on %s state vector' % (type, type,) savedict['comment'] = '%s mixing ratio deviations based on %s state vector' % (type, type)
f.AddData(savedict) f.AddData(savedict)
# Continue with prior only data # Continue with prior only data
......
...@@ -180,7 +180,7 @@ class StateVector(object): ...@@ -180,7 +180,7 @@ class StateVector(object):
# Create a matrix for state <-> TransCom conversions # Create a matrix for state <-> TransCom conversions
self.tcmatrix = np.zeros((self.nparams, 23,), 'float') self.tcmatrix = np.zeros((self.nparams, 23), 'float')
for r in range(1, self.nparams + 1): for r in range(1, self.nparams + 1):
sel = (self.gridmap.flat == r).nonzero() sel = (self.gridmap.flat == r).nonzero()
......
...@@ -11,6 +11,7 @@ File created on 28 Jul 2010. ...@@ -11,6 +11,7 @@ File created on 28 Jul 2010.
import os import os
import sys import sys
import logging import logging
#from da.baseclasses.statevector import filename
sys.path.append(os.getcwd()) sys.path.append(os.getcwd())
sys.path.append('../../') sys.path.append('../../')
...@@ -26,7 +27,6 @@ class CtObservations(Observation): ...@@ -26,7 +27,6 @@ class CtObservations(Observation):
""" an object that holds data + methods and attributes needed to manipulate mixing ratio values """ """ an object that holds data + methods and attributes needed to manipulate mixing ratio values """
def Initialize(self): def Initialize(self):
self.startdate = self.DaCycle['time.sample.start'] self.startdate = self.DaCycle['time.sample.start']
self.enddate = self.DaCycle['time.sample.end'] self.enddate = self.DaCycle['time.sample.end']
...@@ -80,7 +80,7 @@ class CtObservations(Observation): ...@@ -80,7 +80,7 @@ class CtObservations(Observation):
lons = ncf.GetVariable('lon').take(subselect, axis=0) lons = ncf.GetVariable('lon').take(subselect, axis=0)
alts = ncf.GetVariable('alt').take(subselect, axis=0) alts = ncf.GetVariable('alt').take(subselect, axis=0)
obs = ncf.GetVariable('obs').take(subselect, axis=0) * 1.e-6 obs = ncf.GetVariable('obs').take(subselect, axis=0) * 1.e-6
msg = "Converting observed values from ppm to mol/mol!!!!"; logging.info(msg) logging.info("Converting observed values from ppm to mol/mol!!!!")
species = ncf.GetVariable('species').take(subselect, axis=0) species = ncf.GetVariable('species').take(subselect, axis=0)
species = [s.tostring().lower() for s in species] species = [s.tostring().lower() for s in species]
species = map(strip, species) species = map(strip, species)
...@@ -314,14 +314,14 @@ class CtObservations(Observation): ...@@ -314,14 +314,14 @@ class CtObservations(Observation):
self.SiteInfo = SiteInfo self.SiteInfo = SiteInfo
def write_obs_to_file(self): def write_obs_to_file(self,filenam="oldstyle"):
""" """
Write selected information contained in the Observation object to a file. Write selected information contained in the Observation object to a file.
""" """
import da.tools.io4 as io import da.tools.io4 as io
outfile = os.path.join(self.DaCycle['dir.output'], 'sampleinfo_%s.nc' % self.DaCycle['time.sample.stamp']) outfile = os.path.join(self.DaCycle['dir.output'], 'sampleinfo_%s__%s.nc' % (self.DaCycle['time.sample.stamp'], filenam))
f = io.CT_CDF(outfile, method='create') f = io.CT_CDF(outfile, method='create')
logging.debug('Creating new Sample output file for postprocessing (%s)' % outfile) logging.debug('Creating new Sample output file for postprocessing (%s)' % outfile)
...@@ -346,7 +346,7 @@ class CtObservations(Observation): ...@@ -346,7 +346,7 @@ class CtObservations(Observation):
savedict['comment'] = "Unique index number within this dataset ranging from 0 to UNLIMITED." savedict['comment'] = "Unique index number within this dataset ranging from 0 to UNLIMITED."
f.AddData(savedict) f.AddData(savedict)
data = [[d.year, d.month, d.day, d.hour, d.minute, d.second] for d in self.getvalues('xdate') ] data = [[d.year, d.month, d.day, d.hour, d.minute, d.second] for d in self.getvalues('xdate')]
savedict = io.std_savedict.copy() savedict = io.std_savedict.copy()
savedict['dtype'] = "int" savedict['dtype'] = "int"
......
...@@ -48,7 +48,6 @@ class CtOptimizer(Optimizer): ...@@ -48,7 +48,6 @@ class CtOptimizer(Optimizer):
if not self.localization: if not self.localization:
return return
if self.localizetype == 'CT2007': if self.localizetype == 'CT2007':
tvalue = 1.97591 tvalue = 1.97591
if np.sqrt(self.R[n, n]) >= 1.5: if np.sqrt(self.R[n, n]) >= 1.5:
for r in range(self.nlag * self.nparams): for r in range(self.nlag * self.nparams):
......
...@@ -40,7 +40,7 @@ class CtStateVector(StateVector): ...@@ -40,7 +40,7 @@ class CtStateVector(StateVector):
# Get the needed matrices from the specified covariance files # Get the needed matrices from the specified covariance files
file_ocn_cov = self.DaCycle.DaSystem['ocn.covariance'] file_ocn_cov = self.DaCycle.DaSystem['ocn.covariance']
file_bio_cov = self.DaCycle.DaSystem['bio.covariance'] file_bio_cov = self.DaCycle.DaSystem['bio.covariance'] #LU logika tego to powrot do dacycle zeby potem z systemu(CT) pobrac parametr
# replace YYYY.MM in the ocean covariance file string # replace YYYY.MM in the ocean covariance file string
......
...@@ -50,7 +50,7 @@ class TM5ObservationOperator(ObservationOperator): ...@@ -50,7 +50,7 @@ class TM5ObservationOperator(ObservationOperator):
[]> tm=TM5('/Users/peters/Modeling/TM5/tutorial.rc') []> tm=TM5('/Users/peters/Modeling/TM5/tutorial.rc')
[]> tm.WriteRc() []> tm.WriteRc()
[]> tm.WriteRunRc() []> tm.WriteRunRc()
[]> tm.Run() []> tm.run()
To use this class inside a data assimilation cycle, a stand-alone method "Initialize()" is included which modifies the TM5 To use this class inside a data assimilation cycle, a stand-alone method "Initialize()" is included which modifies the TM5
settings according to an external dictionary of values to overwrite, and then runs the TM5 model. settings according to an external dictionary of values to overwrite, and then runs the TM5 model.
...@@ -157,7 +157,7 @@ class TM5ObservationOperator(ObservationOperator): ...@@ -157,7 +157,7 @@ class TM5ObservationOperator(ObservationOperator):
else: else:
logging.info('Compilation successful, continuing') logging.info('Compilation successful, continuing')
def PrepareRun(self): def prepare_run(self):
""" """
Prepare a forward model TM5 run, this consists of: Prepare a forward model TM5 run, this consists of:
...@@ -173,15 +173,15 @@ class TM5ObservationOperator(ObservationOperator): ...@@ -173,15 +173,15 @@ class TM5ObservationOperator(ObservationOperator):
# Write a modified TM5 model rc-file in which run/break times are defined by our da system # Write a modified TM5 model rc-file in which run/break times are defined by our da system
NewItems = { NewItems = {
'submit.options' : DaPlatForm.ReturnBlockingFlag() , 'submit.options': DaPlatForm.ReturnBlockingFlag(),
self.timestartkey : self.DaCycle['time.sample.start'] , self.timestartkey: self.DaCycle['time.sample.start'],
self.timefinalkey : self.DaCycle['time.sample.end'] , self.timefinalkey: self.DaCycle['time.sample.end'],
'jobstep.timerange.start' : self.DaCycle['time.sample.start'] , 'jobstep.timerange.start': self.DaCycle['time.sample.start'],
'jobstep.timerange.end' : self.DaCycle['time.sample.end'] , 'jobstep.timerange.end': self.DaCycle['time.sample.end'],
'jobstep.length' : 'inf' , 'jobstep.length': 'inf',
'ct.params.input.dir' : self.DaCycle['dir.input'] , 'ct.params.input.dir': self.DaCycle['dir.input'],
'ct.params.input.file' : os.path.join(self.DaCycle['dir.input'], 'parameters') , 'ct.params.input.file': os.path.join(self.DaCycle['dir.input'], 'parameters'),
'output.flask.infile' : self.DaCycle['ObsOperator.inputfile'] 'output.flask.infile': self.DaCycle['ObsOperator.inputfile']
} }
if self.DaCycle['time.restart']: # If this is a restart from a previous cycle, the TM5 model should do a restart if self.DaCycle['time.restart']: # If this is a restart from a previous cycle, the TM5 model should do a restart
...@@ -190,6 +190,10 @@ class TM5ObservationOperator(ObservationOperator): ...@@ -190,6 +190,10 @@ class TM5ObservationOperator(ObservationOperator):
else: else:
NewItems[self.istartkey] = self.coldstartvalue # if not, start TM5 'cold' NewItems[self.istartkey] = self.coldstartvalue # if not, start TM5 'cold'
logging.debug('Resetting TM5 to perform cold start') logging.debug('Resetting TM5 to perform cold start')
#LU to ponizej: po prostu jesli to jest inny lag niz pierwszy
#LU czyli: restart wtedy kiedy albo time.restart = true czyli w kazdym nastepnym cyklu, albo kiedy jest to nie pierwszy step w sample step. tylko wtedy to skad robimy restart sie rozni...czy nie , bo robimy po advance?
#LU z drugiej strony to time.sample.window to nic innego jak lag.
#LU ale: nowy RC zapisywany jst przy koncu cyklu. czyli time. sample.window bedzie zawsze 0.
if self.DaCycle['time.sample.window'] != 0: # If this is a restart from a previous time step within the filter lag, the TM5 model should do a restart if self.DaCycle['time.sample.window'] != 0: # If this is a restart from a previous time step within the filter lag, the TM5 model should do a restart
NewItems[self.istartkey] = self.restartvalue NewItems[self.istartkey] = self.restartvalue
...@@ -200,7 +204,7 @@ class TM5ObservationOperator(ObservationOperator): ...@@ -200,7 +204,7 @@ class TM5ObservationOperator(ObservationOperator):
self.ModifyRC(NewItems) self.ModifyRC(NewItems)
self.WriteRc(self.RcFileName) self.WriteRc(self.RcFileName)
def LoadRc(self, RcFileName): def LoadRc(self, RcFileName):#LU tutaj bylo wczesniej rcfIletype. to znaczy ze pewnie zawsze bylo pycasso. bo wpp byloby blad.
""" """
This method loads a TM5 rc-file with settings for this simulation This method loads a TM5 rc-file with settings for this simulation
""" """
...@@ -209,11 +213,11 @@ class TM5ObservationOperator(ObservationOperator): ...@@ -209,11 +213,11 @@ class TM5ObservationOperator(ObservationOperator):
self.rcfile = rc.RcFile(RcFileName) self.rcfile = rc.RcFile(RcFileName)
self.tm_settings = self.rcfile.values self.tm_settings = self.rcfile.values
self.RcFileName = RcFileName self.RcFileName = RcFileName
self.Tm5RcLoaded = True self.Tm5RcLoaded = True #LU to wyglada na zupelnie niepotrzebne
if 'my.source.dirs' in self.tm_settings.keys(): if 'my.source.dirs' in self.tm_settings.keys():
self.RcFileType = 'pycasso' self.RcFileType = 'pycasso'
else: else:
self.RcfIleType = 'pre-pycasso' self.RcFileType = 'pre-pycasso'
logging.debug('TM5 rc-file loaded successfully') logging.debug('TM5 rc-file loaded successfully')
def ValidateRc(self): def ValidateRc(self):
...@@ -247,12 +251,12 @@ class TM5ObservationOperator(ObservationOperator): ...@@ -247,12 +251,12 @@ class TM5ObservationOperator(ObservationOperator):
self.coldstartvalue = 9 self.coldstartvalue = 9
needed_rc_items = [ needed_rc_items = [
self.projectkey , self.projectkey,
self.rundirkey , self.rundirkey,
self.outputdirkey , self.outputdirkey,
self.savedirkey , self.savedirkey,
self.timestartkey , self.timestartkey,
self.timefinalkey , self.timefinalkey,
self.timelengthkey, self.timelengthkey,
self.istartkey self.istartkey
] ]
...@@ -296,7 +300,7 @@ class TM5ObservationOperator(ObservationOperator): ...@@ -296,7 +300,7 @@ class TM5ObservationOperator(ObservationOperator):
self.tm_settings[k] = v self.tm_settings[k] = v
#replace all instances of old with new, but only if it concerns a name of a path!!! #replace all instances of old with new, but only if it concerns a name of a path!!!
if os.path.exists(str(v)): if os.path.exists(str(v)):
for k_old, v_old in self.tm_settings.iteritems(): for k_old, v_old in self.tm_settings.iteritems(): #LU nie wydaje mi sie zeby to powyzej mialo sens, bo wowczas v jest tym samym co v_old
if not isinstance(v_old, str): if not isinstance(v_old, str):
continue continue
if str(v_orig) in str(v_old): if str(v_orig) in str(v_old):
...@@ -322,7 +326,8 @@ class TM5ObservationOperator(ObservationOperator): ...@@ -322,7 +326,8 @@ class TM5ObservationOperator(ObservationOperator):
""" """
Make sure that parameter files are written to the TM5 inputdir, and that observation lists are present Make sure that parameter files are written to the TM5 inputdir, and that observation lists are present
""" """
#LU tutaj generalnie sprawdzamy czy input sie zgadza, czyli czy istnieja wszystkie pliki parametrow oraz plik obserwacji wskazywany przez dacycle[obsopertor.inputfile]
#LU a potem jeszcze sprawdzmay czy jest odpowiedni executable, z tym ze nie bardzo wiem skad sie go bierze.
datadir = self.tm_settings['ct.params.input.dir'] datadir = self.tm_settings['ct.params.input.dir']
if not os.path.exists(datadir): if not os.path.exists(datadir):
msg = "The specified input directory for the TM5 model to read from does not exist (%s), exiting..." % datadir msg = "The specified input directory for the TM5 model to read from does not exist (%s), exiting..." % datadir
...@@ -360,7 +365,8 @@ class TM5ObservationOperator(ObservationOperator): ...@@ -360,7 +365,8 @@ class TM5ObservationOperator(ObservationOperator):
logging.error("Please compile the model with the specified rc-file and the regular TM5 scripts first") logging.error("Please compile the model with the specified rc-file and the regular TM5 scripts first")
raise IOError raise IOError
#LU zmienic definicje w dokumentacji bo mowi o save.hdf podczas gdy juz takiego nie ma.
#LU kopiujemy tm5-owe restarty , chociaz w sumie juz je skopiowalismy wczesniej
def get_initial_data(self): def get_initial_data(self):
""" This method places all initial data needed by an ObservationOperator in the proper folder for the model. """ This method places all initial data needed by an ObservationOperator in the proper folder for the model.
For TM5, this means copying the save_*.hdf* files to the dir.save directory from which TM5 will read initial For TM5, this means copying the save_*.hdf* files to the dir.save directory from which TM5 will read initial
...@@ -398,14 +404,14 @@ class TM5ObservationOperator(ObservationOperator): ...@@ -398,14 +404,14 @@ class TM5ObservationOperator(ObservationOperator):
def Run(self): def run(self):
""" """
Start the TM5 executable. A new log file is started for the TM5 model IO, and then a subprocess is Start the TM5 executable. A new log file is started for the TM5 model IO, and then a subprocess is
spawned with the tm5_mpi_wrapper and the tm5.x executable. The exit code of the model is caught and spawned with the tm5_mpi_wrapper and the tm5.x executable. The exit code of the model is caught and
only if successfull on all processors will execution of the shell continue. only if successfull on all processors will execution of the shell continue.
""" """
#LU mam wrazneie ze ten status sie i tak d oniczego nie przydaje.
cwd = os.getcwd() cwd = os.getcwd()
# From here on, several options should be implemented. # From here on, several options should be implemented.
...@@ -496,7 +502,7 @@ class TM5ObservationOperator(ObservationOperator): ...@@ -496,7 +502,7 @@ class TM5ObservationOperator(ObservationOperator):
return code return code
#LU nie iwem czy to dobrze zadziala z tym kodem bo moze jednak sa lepsze sposoby sprawdzenia statusu...
def TM5_With_N_tracers(self): def TM5_With_N_tracers(self):
""" Method handles the case where one TM5 model instance with N tracers does the sampling of all ensemble members""" """ Method handles the case where one TM5 model instance with N tracers does the sampling of all ensemble members"""
import datetime import datetime
...@@ -640,7 +646,7 @@ if __name__ == "__main__": ...@@ -640,7 +646,7 @@ if __name__ == "__main__":
tm = TM5ObservationOperator() tm = TM5ObservationOperator()
tm.Setup() tm.Setup()
tm.Initialize() tm.Initialize()
tm.Run() tm.run()
tm.save_data() tm.save_data()
......
...@@ -106,15 +106,6 @@ class CycleControl(dict): ...@@ -106,15 +106,6 @@ class CycleControl(dict):
self.RestartFileList = [] # List of files needed for restart, to be extended later self.RestartFileList = [] # List of files needed for restart, to be extended later
self.OutputFileList = [] # List of files needed for output, to be extended later self.OutputFileList = [] # List of files needed for output, to be extended later
def __str__(self):
msg = "==============================================================="
msg += "DA Cycle rc-file is %s" % self.RcFileName
msg += "DA Cycle run directory is %s" % self['dir.da_run']
msg += "DA Cycle inverse system is %s" % self['da.system']
msg += "DA Cycle obs operator is %s" % self['da.obsoperator']
msg += "==============================================================="
return msg
def LoadRc(self, RcFileName): def LoadRc(self, RcFileName):
""" """
...@@ -143,7 +134,8 @@ class CycleControl(dict): ...@@ -143,7 +134,8 @@ class CycleControl(dict):
self[k] = True self[k] = True
if v in ['False', 'false', 'f', 'F', 'n', 'no']: if v in ['False', 'false', 'f', 'F', 'n', 'no']:
self[k] = False self[k] = False
if 'date' in k : self[k] = ToDatetime(v) if 'date' in k :
self[k] = ToDatetime(v)
if 'time.start' in k : if 'time.start' in k :
self[k] = ToDatetime(v) self[k] = ToDatetime(v)
if 'time.end' in k : if 'time.end' in k :
...@@ -194,7 +186,7 @@ class CycleControl(dict): ...@@ -194,7 +186,7 @@ class CycleControl(dict):
logging.info("===============================================================") logging.info("===============================================================")
logging.info("DA Cycle start date is %s" % startdate.strftime('%Y-%m-%d %H:%M')) logging.info("DA Cycle start date is %s" % startdate.strftime('%Y-%m-%d %H:%M'))
logging.info("DA Cycle end date is %s" % enddate.strftime('%Y-%m-%d %H:%M')) logging.info("DA Cycle end date is %s" % enddate.strftime('%Y-%m-%d %H:%M'))
logging.info("DA Cycle final date is %s" % finaldate.strftime('%Y-%m-%d %H:%M')) logging.info("DA Cycle final date is %s" % finaldate.strftime('%Y-%m-%d %H:%M'))
logging.info("DA Cycle cycle length is %s" % cyclelength) logging.info("DA Cycle cycle length is %s" % cyclelength)
logging.info("DA Cycle restart is %s" % str(self['time.restart'])) logging.info("DA Cycle restart is %s" % str(self['time.restart']))
...@@ -404,7 +396,7 @@ class CycleControl(dict): ...@@ -404,7 +396,7 @@ class CycleControl(dict):
CreateDirs(os.path.join(self['dir.restart.oneago'])) CreateDirs(os.path.join(self['dir.restart.oneago']))
logging.info('Succesfully created the file structure for the assimilation job') logging.info('Succesfully created the file structure for the assimilation job')
#LU tutaj chyba brakuje move restart data
def recover_run(self): def recover_run(self):
""" """
Prepare a recovery from a crashed run. This consists of: Prepare a recovery from a crashed run. This consists of:
...@@ -432,7 +424,7 @@ class CycleControl(dict): ...@@ -432,7 +424,7 @@ class CycleControl(dict):
logging.debug("Next cycle start date is %s" % self['time.start']) logging.debug("Next cycle start date is %s" % self['time.start'])
# Copy randomseed.pickle file to exec dir # Copy randomseed.pickle file to exec dir
source = os.path.join(self['dir.restart.current'], 'randomseed.pickle') source = os.path.join(self['dir.restart.current'], 'randomseed.pickle') #LU wydaje mi sie ze tutaj nie trzeba podawac nazwy pliku w folderze docelowym, jesli sie obczai ze to folder to sie kopiuje.
dest = os.path.join(self['dir.exec'], 'randomseed.pickle') dest = os.path.join(self['dir.exec'], 'randomseed.pickle')
shutil.copy(source, dest) shutil.copy(source, dest)
...@@ -564,7 +556,7 @@ class CycleControl(dict): ...@@ -564,7 +556,7 @@ class CycleControl(dict):
if io_option == 'store': if io_option == 'store':
CreateDirs(os.path.join(targetdir), forceclean=True) CreateDirs(os.path.join(targetdir), forceclean=True)
logging.debug("Performing a %s of data" % (io_option)) logging.debug("Performing a %s of data" % io_option)
logging.debug(" from directory: %s " % sourcedir) logging.debug(" from directory: %s " % sourcedir)
logging.debug(" to directory: %s " % targetdir) logging.debug(" to directory: %s " % targetdir)
...@@ -610,7 +602,7 @@ class CycleControl(dict): ...@@ -610,7 +602,7 @@ class CycleControl(dict):
# The rest is info needed for a system restart, so it modifies the current DaCycle object (self) # The rest is info needed for a system restart, so it modifies the current DaCycle object (self)
self['da.restart.fname'] = fname # needed for next job template self['da.restart.fname'] = fname # needed for next job template
self.RestartFileList.extend([fname]) # current restart list holds next rc file name self.RestartFileList.append(fname) # current restart list holds next rc file name
logging.debug('Added da_runtime.rc to the RestartFileList for later collection') logging.debug('Added da_runtime.rc to the RestartFileList for later collection')
...@@ -635,25 +627,23 @@ class CycleControl(dict): ...@@ -635,25 +627,23 @@ class CycleControl(dict):
""" """
from string import join from string import join
DaPlatForm = self.DaPlatForm
if self['time.end'] < self['time.finish']: if self['time.end'] < self['time.finish']:
# file ID and names # file ID and names
jobid = self['time.end'].strftime('%Y%m%d') jobid = self['time.end'].strftime('%Y%m%d')
targetdir = os.path.join(self['dir.exec']) targetdir = os.path.join(self['dir.exec'])
jobfile = os.path.join(targetdir, 'jb.%s.jb' % jobid) jobfile = os.path.join(targetdir, 'jb.%s.jb' % jobid)
logfile = jobfile.replace('.jb', '.log') logfile = os.path.join(targetdir, 'jb.%s.log' % jobid)
#LU tutaj sa parametry ktore ida na gore do pliku job. nie zawsze koniecznie potrzebne.
# Template and commands for job # Template and commands for job
jobparams = {'jobname':"j.%s" % jobid, 'jobtime':'24:00:00', 'logfile':logfile, 'errfile':logfile} jobparams = {'jobname':"j.%s" % jobid, 'jobtime':'06:00:00', 'logfile': logfile, 'errfile': logfile}
template = DaPlatForm.get_job_template(jobparams) template = self.DaPlatForm.get_job_template(jobparams)
execcommand = os.path.join(self['dir.da_submit'], sys.argv[0]) execcommand = os.path.join(self['dir.da_submit'], sys.argv[0])
template += 'python %s rc=%s %s' % (execcommand, self['da.restart.fname'], join(self.opts, ''),) template += 'python %s rc=%s %s' % (execcommand, self['da.restart.fname'], join(self.opts, ''))
# write and submit # write and submit
DaPlatForm.write_job(jobfile, template, jobid) self.DaPlatForm.write_job(jobfile, template, jobid)
jobid = DaPlatForm.submit_job(jobfile, joblog=logfile) jobid = self.DaPlatForm.submit_job(jobfile, joblog=logfile)
else: else:
logging.info('Final date reached, no new cycle started') logging.info('Final date reached, no new cycle started')
...@@ -706,7 +696,7 @@ def ParseOptions(): ...@@ -706,7 +696,7 @@ def ParseOptions():
logging.root.setLevel(logging.DEBUG) logging.root.setLevel(logging.DEBUG)
if opts: if opts:
optslist = [item[0] for item in opts] optslist = [item[0] for item in opts] #LU ze co same minusy zwroci?
else: else:
optslist = [] optslist = []
......
...@@ -41,14 +41,14 @@ def EnsembleSmootherPipeline(DaCycle, PlatForm, DaSystem, Samples, StateVector, ...@@ -41,14 +41,14 @@ def EnsembleSmootherPipeline(DaCycle, PlatForm, DaSystem, Samples, StateVector,
def start_job(DaCycle, DaSystem, DaPlatForm, StateVector, Samples, ObsOperator): def start_job(DaCycle, DaSystem, DaPlatForm, StateVector, Samples, ObsOperator):
""" Set up the job specific directory structure and create an expanded rc-file """ """ Set up the job specific directory structure and create an expanded rc-file """
DaSystem.Validate() DaSystem.Validate() #LU tylko sprawdza needed rc items in the file
DaCycle.DaSystem = DaSystem DaCycle.DaSystem = DaSystem #LU przypisuje dacyclowi liste parametrow
DaCycle.DaPlatForm = DaPlatForm DaCycle.DaPlatForm = DaPlatForm #LU przypisuje cyklowi platforme (tez liste parametrow)
DaCycle.Initialize() DaCycle.Initialize() #LU nastepnie cykl zostaje inicjalizowany...bardzo logiczne
StateVector.DaCycle = DaCycle # also embed object in StateVector so it can access cycle information for I/O etc StateVector.DaCycle = DaCycle # also embed object in StateVector so it can access cycle information for I/O etc #LU cykl zostaje przypisany state vectorowi
Samples.DaCycle = DaCycle # also embed object in Samples object so it can access cycle information for I/O etc Samples.DaCycle = DaCycle # also embed object in Samples object so it can access cycle information for I/O etc #LU cykl zostaje przypisany probkom
ObsOperator.DaCycle = DaCycle # also embed object in ObsOperator object so it can access cycle information for I/O etc ObsOperator.DaCycle = DaCycle # also embed object in ObsOperator object so it can access cycle information for I/O etc #LU cykl zostaje przypisany obsoperatorowi
ObsOperator.Initialize() # Setup Observation Operator ObsOperator.Initialize() # Setup Observation Operator #LU a pote mobsoperator jest inicjalizowany
def prepare_state(DaCycle, StateVector): def prepare_state(DaCycle, StateVector):
...@@ -61,15 +61,16 @@ def prepare_state(DaCycle, StateVector): ...@@ -61,15 +61,16 @@ def prepare_state(DaCycle, StateVector):
logging.info(header + "starting prepare_state" + footer) logging.info(header + "starting prepare_state" + footer)
StateVector.Initialize() StateVector.Initialize() #LU w prepare state inicjalizujemy wektor stanu ktoremu dopiero co przypisalismy wartosci.
#LU to jest zalezne od cyklu, i cykl pojedynczy moze miec time.restart lub moze nie miec.
if not DaCycle['time.restart']: if not DaCycle['time.restart']: #LU jesli jest to pierwszy cykl
# Fill each week from n=1 to n=nlag with a new ensemble # Fill each week from n=1 to n=nlag with a new ensemble
nlag = StateVector.nlag nlag = StateVector.nlag #LU dla kazdego od zera do dlugosc(cykl) wyznaczamy date oraz znajdujemy kowariancje i robimy nowa ensemble.
for n in range(0, nlag): for n in range(0, nlag):
date = DaCycle['time.start'] + datetime.timedelta(days=(n + 0.5) * int(DaCycle['time.cycle'])) date = DaCycle['time.start'] + datetime.timedelta(days=(n + 0.5) * int(DaCycle['time.cycle'])) #LU ta data jest tutaj potrzebna tylko do znalezienia odpowiedniego pliku z kowariancja.
cov = StateVector.get_covariance(date) cov = StateVector.get_covariance(date)
StateVector.make_new_ensemble(n + 1, cov) StateVector.make_new_ensemble(n + 1, cov)
...@@ -77,9 +78,8 @@ def prepare_state(DaCycle, StateVector): ...@@ -77,9 +78,8 @@ def prepare_state(DaCycle, StateVector):
# Read the StateVector data from file # Read the StateVector data from file
savedir = DaCycle['dir.restart.current']
filtertime = DaCycle['time.start'].strftime('%Y%m%d') filename = os.path.join(DaCycle['dir.restart.current'], 'savestate.nc') #LU teraz czytamy savestate.nc
filename = os.path.join(savedir, 'savestate.nc')
StateVector.ReadFromFile(filename) # by default will read "opt"(imized) variables, and then propagate StateVector.ReadFromFile(filename) # by default will read "opt"(imized) variables, and then propagate
...@@ -89,8 +89,8 @@ def prepare_state(DaCycle, StateVector): ...@@ -89,8 +89,8 @@ def prepare_state(DaCycle, StateVector):
# Finally, also write the StateVector to a file so that we can always access the a-priori information # Finally, also write the StateVector to a file so that we can always access the a-priori information
savedir = DaCycle['dir.output']