Commit c0706f73 authored by Woude, Auke van der's avatar Woude, Auke van der
Browse files

write to py3

parent b4e4a62a
...@@ -44,14 +44,14 @@ class CO2DaSystem(DaSystem): ...@@ -44,14 +44,14 @@ class CO2DaSystem(DaSystem):
'run.obsflag'] 'run.obsflag']
for k, v in self.iteritems(): for k, v in self.items():
if v == 'True' : if v == 'True' :
self[k] = True self[k] = True
if v == 'False': if v == 'False':
self[k] = False self[k] = False
for key in needed_rc_items: for key in needed_rc_items:
if not self.has_key(key): if key not in self:
logging.warning('Missing a required value in rc-file : %s' % key) logging.warning('Missing a required value in rc-file : %s' % key)
logging.debug('DA System Info settings have been validated succesfully') logging.debug('DA System Info settings have been validated succesfully')
......
...@@ -314,7 +314,8 @@ class EmisModel(object): ...@@ -314,7 +314,8 @@ class EmisModel(object):
infile = os.path.join(self.emisdir, 'temporal_data_%03d.nc'%n) infile = os.path.join(self.emisdir, 'temporal_data_%03d.nc'%n)
tpr = io.ct_read(infile, method='read') tpr = io.ct_read(infile, method='read')
itimes = tpr.get_variable('Times') itimes = tpr.get_variable('Times')
times = array([dtm.datetime(int(''.join(d[:4])),int(''.join(d[5:7])),int(''.join(d[8:10])),int(''.join(d[11:13])),int(''.join(d[14:16]))) for d in itimes]) itimes = [b''.join(d) for d in itimes]
times = array([dtm.datetime.strptime(d.decode(), '%Y-%m-%d_%H:%M:%S') for d in itimes])
if psdo == 1: if psdo == 1:
startdum=dtm.datetime(self.startdate.year,self.startdate.month,self.startdate.day-1,1,0) startdum=dtm.datetime(self.startdate.year,self.startdate.month,self.startdate.day-1,1,0)
subselect = logical_and(times >= startdum, times <= self.enddate).nonzero()[0] subselect = logical_and(times >= startdum, times <= self.enddate).nonzero()[0]
......
...@@ -44,7 +44,7 @@ class RdamObservations(Observations): ...@@ -44,7 +44,7 @@ class RdamObservations(Observations):
if not os.path.exists(op_dir): if not os.path.exists(op_dir):
msg = 'Could not find the required ObsPack distribution (%s) ' % op_dir msg = 'Could not find the required ObsPack distribution (%s) ' % op_dir
logging.error(msg) logging.error(msg)
raise IOError, msg raise IOError(msg)
else: else:
self.obspack_dir = op_dir self.obspack_dir = op_dir
self.obspack_id = op_id self.obspack_id = op_id
...@@ -80,10 +80,10 @@ class RdamObservations(Observations): ...@@ -80,10 +80,10 @@ class RdamObservations(Observations):
infile = os.path.join(self.obspack_dir, ncfile + '.nc') infile = os.path.join(self.obspack_dir, ncfile + '.nc')
ncf = io.ct_read(infile, 'read') ncf = io.ct_read(infile, 'read')
idates = ncf.get_variable('Times') idates = ncf.get_variable('Times')
dates = array([dtm.datetime(int(''.join(d[:4])),int(''.join(d[5:7])),int(''.join(d[8:10])),int(''.join(d[11:13])),0) for d in idates]) idates = [b''.join(d) for d in idates]
dates = array([dtm.datetime.strptime(d.decode(), '%Y-%m-%d_%H:%M:%S') for d in idates])
subselect = logical_and(dates >= self.startdate , dates <= self.enddate).nonzero()[0] subselect = logical_and(dates >= self.startdate , dates <= self.enddate).nonzero()[0]
dates = dates.take(subselect, axis=0) dates = dates.take(subselect, axis=0)
datasetname = ncfile # use full name of dataset to propagate for clarity datasetname = ncfile # use full name of dataset to propagate for clarity
...@@ -111,7 +111,7 @@ class RdamObservations(Observations): ...@@ -111,7 +111,7 @@ class RdamObservations(Observations):
logging.error(msg) logging.error(msg)
logging.error("Did the sampling step succeed?") logging.error("Did the sampling step succeed?")
logging.error("...exiting") logging.error("...exiting")
raise IOError, msg raise IOError(msg)
ncf = io.ct_read(filename, method='read') ncf = io.ct_read(filename, method='read')
ids = ncf.get_variable('obs_num') ids = ncf.get_variable('obs_num')
...@@ -120,7 +120,7 @@ class RdamObservations(Observations): ...@@ -120,7 +120,7 @@ class RdamObservations(Observations):
logging.info("Successfully read data from model sample file (%s)" % filename) logging.info("Successfully read data from model sample file (%s)" % filename)
obs_ids = self.getvalues('id').tolist() obs_ids = self.getvalues('id').tolist()
ids = map(int, ids) ids = list(map(int, ids))
missing_samples = [] missing_samples = []
...@@ -209,13 +209,12 @@ class RdamObservations(Observations): ...@@ -209,13 +209,12 @@ class RdamObservations(Observations):
savedict['missing_value'] = -999.9 savedict['missing_value'] = -999.9
f.add_data(savedict) f.add_data(savedict)
data = np.array(self.tracer_list) data = np.array(self.getvalues('species'))#tracer_list)
savedict = io.std_savedict.copy() savedict = io.std_savedict.copy()
savedict['dtype'] = "int" savedict['dtype'] = "char"
savedict['name'] = "source_type" savedict['name'] = "source_type"
savedict['units'] = "NA" savedict['units'] = "NA"
savedict['dims'] = dimid savedict['dims'] = dimid + dim10char
savedict['values'] = data.tolist() savedict['values'] = data.tolist()
savedict['missing_value'] = -9 savedict['missing_value'] = -9
f.add_data(savedict) f.add_data(savedict)
...@@ -231,29 +230,29 @@ class RdamObservations(Observations): ...@@ -231,29 +230,29 @@ class RdamObservations(Observations):
savedict['missing_value'] = '!' savedict['missing_value'] = '!'
f.add_data(savedict) f.add_data(savedict)
data = self.getvalues('obs') data = self.getvalues('obs')
savedict = io.std_savedict.copy()
savedict['name'] = "observed"
savedict['long_name'] = "observedvalues"
savedict['units'] = "mol mol-1"
savedict['dims'] = dimid
savedict['values'] = data.tolist()
savedict['comment'] = 'Observations used in optimization'
f.add_data(savedict)
data = self.getvalues('mdm')
savedict = io.std_savedict.copy() savedict = io.std_savedict.copy()
savedict['name'] = "modeldatamismatch" savedict['name'] = "observed"
savedict['long_name'] = "modeldatamismatch" savedict['long_name'] = "observedvalues"
savedict['units'] = "[mol mol-1]" savedict['units'] = "mol mol-1"
savedict['dims'] = dimid savedict['dims'] = dimid
savedict['values'] = data.tolist() savedict['values'] = data.tolist()
savedict['comment'] = 'Standard deviation of mole fractions resulting from model-data mismatch' savedict['comment'] = 'Observations used in optimization'
f.add_data(savedict) f.add_data(savedict)
f.close()
data = self.getvalues('mdm')
savedict = io.std_savedict.copy()
savedict['name'] = "modeldatamismatch"
savedict['long_name'] = "modeldatamismatch"
savedict['units'] = "[mol mol-1]"
savedict['dims'] = dimid
savedict['values'] = data.tolist()
savedict['comment'] = 'Standard deviation of mole fractions resulting from model-data mismatch'
f.add_data(savedict)
f.close()
logging.debug("Successfully wrote data to obs file") logging.debug("Successfully wrote data to obs file")
logging.info("Sample input file for obs operator now in place [%s]" % obsinputfile) logging.info("Sample input file for obs operator now in place [%s]" % obsinputfile)
...@@ -271,7 +270,7 @@ class RdamObservations(Observations): ...@@ -271,7 +270,7 @@ class RdamObservations(Observations):
if not os.path.exists(filename): if not os.path.exists(filename):
msg = 'Could not find the required sites.rc input file (%s) ' % filename msg = 'Could not find the required sites.rc input file (%s) ' % filename
logging.error(msg) logging.error(msg)
raise IOError, msg raise IOError(msg)
else: else:
self.sites_file = filename self.sites_file = filename
...@@ -285,7 +284,7 @@ class RdamObservations(Observations): ...@@ -285,7 +284,7 @@ class RdamObservations(Observations):
logging.warning('Model-data mismatch scaling factor : %f ' % self.global_R_scaling) logging.warning('Model-data mismatch scaling factor : %f ' % self.global_R_scaling)
logging.debug('Model-data mismatch site categories : %d ' % self.n_site_categories) logging.debug('Model-data mismatch site categories : %d ' % self.n_site_categories)
cats = [k for k in sites_weights.keys() if 'site.category' in k] cats = [k for k in list(sites_weights.keys()) if 'site.category' in k]
site_categories = {} site_categories = {}
for key in cats: for key in cats:
...@@ -300,7 +299,7 @@ class RdamObservations(Observations): ...@@ -300,7 +299,7 @@ class RdamObservations(Observations):
site_move = {} site_move = {}
site_hourly = {} # option added to include only certain hours of the day (for e.g. PAL) IvdL site_hourly = {} # option added to include only certain hours of the day (for e.g. PAL) IvdL
site_incalt = {} # option to increase sampling altitude for sites specified in sites and weights file site_incalt = {} # option to increase sampling altitude for sites specified in sites and weights file
for key, value in sites_weights.iteritems(): for key, value in sites_weights.items():
if 'obsfile' in key: # to be fixed later, do not yet know how to parse valid keys from rc-files yet.... WP if 'obsfile' in key: # to be fixed later, do not yet know how to parse valid keys from rc-files yet.... WP
sitename, sitecategory = key, value sitename, sitecategory = key, value
sitename = sitename.strip() sitename = sitename.strip()
...@@ -325,8 +324,8 @@ class RdamObservations(Observations): ...@@ -325,8 +324,8 @@ class RdamObservations(Observations):
identifier = obs.code identifier = obs.code
# species, site, method, lab, datasetnr = identifier.split('_') # species, site, method, lab, datasetnr = identifier.split('_')
if site_info.has_key(identifier): if identifier in site_info:
if site_hourly.has_key(identifier): if identifier in site_hourly:
obs.samplingstrategy = 2 obs.samplingstrategy = 2
hourf, hourt = site_hourly[identifier] hourf, hourt = site_hourly[identifier]
if int(obs.xdate.hour) >= hourf and int(obs.xdate.hour) <= hourt: if int(obs.xdate.hour) >= hourf and int(obs.xdate.hour) <= hourt:
...@@ -351,7 +350,7 @@ class RdamObservations(Observations): ...@@ -351,7 +350,7 @@ class RdamObservations(Observations):
else: else:
logging.warning("Observation NOT found (%s, %d), please check sites.rc file (%s) !!!" % (identifier, obs.id, self.sites_file)) logging.warning("Observation NOT found (%s, %d), please check sites.rc file (%s) !!!" % (identifier, obs.id, self.sites_file))
if site_move.has_key(identifier): if identifier in site_move:
movelat, movelon = site_move[identifier] movelat, movelon = site_move[identifier]
obs.lat = obs.lat + movelat obs.lat = obs.lat + movelat
...@@ -359,7 +358,7 @@ class RdamObservations(Observations): ...@@ -359,7 +358,7 @@ class RdamObservations(Observations):
logging.warning("Observation location for (%s, %d), is moved by %3.2f degrees latitude and %3.2f degrees longitude" % (identifier, obs.id, movelat, movelon)) logging.warning("Observation location for (%s, %d), is moved by %3.2f degrees latitude and %3.2f degrees longitude" % (identifier, obs.id, movelat, movelon))
if site_incalt.has_key(identifier): if identifier in site_incalt:
incalt = site_incalt[identifier] incalt = site_incalt[identifier]
obs.height = obs.height + incalt obs.height = obs.height + incalt
...@@ -387,7 +386,7 @@ class RdamObservations(Observations): ...@@ -387,7 +386,7 @@ class RdamObservations(Observations):
logging.error(msg) logging.error(msg)
logging.error("Did the sampling step succeed?") logging.error("Did the sampling step succeed?")
logging.error("...exiting") logging.error("...exiting")
raise IOError, msg raise IOError(msg)
ncf = io.ct_read(filename, method='read') ncf = io.ct_read(filename, method='read')
ids = ncf.get_variable('obs_num') ids = ncf.get_variable('obs_num')
...@@ -407,7 +406,7 @@ class RdamObservations(Observations): ...@@ -407,7 +406,7 @@ class RdamObservations(Observations):
f.close() f.close()
#return outfile #return outfile
for key, value in self.site_move.iteritems(): for key, value in self.site_move.items():
msg = "Site is moved by %3.2f degrees latitude and %3.2f degrees longitude" % value msg = "Site is moved by %3.2f degrees latitude and %3.2f degrees longitude" % value
f.add_attribute(key, msg) f.add_attribute(key, msg)
......
...@@ -427,7 +427,7 @@ class STILTObservationOperator(object): ...@@ -427,7 +427,7 @@ class STILTObservationOperator(object):
if not os.path.exists(datadir): if not os.path.exists(datadir):
msg = "The specified input directory for the OPS model to read from does not exist (%s), exiting..." % datadir msg = "The specified input directory for the OPS model to read from does not exist (%s), exiting..." % datadir
logging.error(msg) logging.error(msg)
raise IOError, msg raise IOError(msg)
datafiles = os.listdir(datadir) datafiles = os.listdir(datadir)
...@@ -436,15 +436,15 @@ class STILTObservationOperator(object): ...@@ -436,15 +436,15 @@ class STILTObservationOperator(object):
if not os.path.exists(obsfile): if not os.path.exists(obsfile):
msg = "The specified obs input file for the OPS model to read from does not exist (%s), exiting..." % obsfile msg = "The specified obs input file for the OPS model to read from does not exist (%s), exiting..." % obsfile
logging.error(msg) logging.error(msg)
if not self.dacycle.has_key('forward.savestate.dir'): if 'forward.savestate.dir' not in self.dacycle:
raise IOError, msg raise IOError(msg)
for n in range(int(self.dacycle['da.optimizer.nmembers'])): for n in range(int(self.dacycle['da.optimizer.nmembers'])):
paramfile = 'parameters.%03d.nc' % n paramfile = 'parameters.%03d.nc' % n
if paramfile not in datafiles: if paramfile not in datafiles:
msg = "The specified parameter input file for the OPS model to read from does not exist (%s), exiting..." % paramfile msg = "The specified parameter input file for the OPS model to read from does not exist (%s), exiting..." % paramfile
logging.error(msg) logging.error(msg)
raise IOError, msg raise IOError(msg)
self.ops_exec = self.model_settings['run.opsexec'] self.ops_exec = self.model_settings['run.opsexec']
if not os.path.exists(self.ops_exec): if not os.path.exists(self.ops_exec):
...@@ -513,7 +513,7 @@ class STILTObservationOperator(object): ...@@ -513,7 +513,7 @@ class STILTObservationOperator(object):
elif psdo == 0: elif psdo == 0:
#varname = '%sbg_true'%(self.spname[s]) #varname = '%sbg_true'%(self.spname[s])
varname = '%sbg_prior'%(self.spname[s]) varname = '%sbg_prior'%(self.spname[s])
if varname in bf.variables.keys(): if varname in list(bf.variables.keys()):
bgc = bf.get_variable(varname)[idx1[0]:idx2[0]+1] bgc = bf.get_variable(varname)[idx1[0]:idx2[0]+1]
else: else:
bgc = np.zeros(idx2-idx1+1) bgc = np.zeros(idx2-idx1+1)
...@@ -604,8 +604,8 @@ class STILTObservationOperator(object): ...@@ -604,8 +604,8 @@ class STILTObservationOperator(object):
import da.tools.io4 as io import da.tools.io4 as io
f = io.CT_CDF(self.simulated_file, method='create') f = io.CT_CDF(self.simulated_file, method='create')
logging.debug('Creating new simulated observation file in ObservationOperator (%s)' % self.simulated_file) logging.debug('Creating new simulated observation file in ObservationOperator (%s)' % self.simulated_file)
dimid = f.createDimension('obs_num', size=None) dimid = f.createDimension('obs_num', size=None)
dimid = ('obs_num',) dimid = ('obs_num',)
savedict = io.std_savedict.copy() savedict = io.std_savedict.copy()
...@@ -632,7 +632,7 @@ class STILTObservationOperator(object): ...@@ -632,7 +632,7 @@ class STILTObservationOperator(object):
ids = f_in.get_variable('obs_num') ids = f_in.get_variable('obs_num')
for i,data in enumerate(zip(ids,self.mod)): for i,data in enumerate(zip(ids,self.mod)):
f.variables['obs_num'][i] = data[0] f.variables['obs_num'][i] = data[0]
f.variables['model'][i,:] = data[1] f.variables['model'][i,:] = data[1]
dum=f.variables['model'][:] dum=f.variables['model'][:]
......
...@@ -44,18 +44,18 @@ def forward_pipeline(dacycle, platform, dasystem, samples, statevector, obsopera ...@@ -44,18 +44,18 @@ def forward_pipeline(dacycle, platform, dasystem, samples, statevector, obsopera
logging.info(header + "Initializing current cycle" + footer) logging.info(header + "Initializing current cycle" + footer)
start_job(dacycle, dasystem, platform, statevector, samples, obsoperator) start_job(dacycle, dasystem, platform, statevector, samples, obsoperator)
if dacycle.has_key('forward.savestate.exceptsam'): if 'forward.savestate.exceptsam' in dacycle:
sam = (dacycle['forward.savestate.exceptsam'].upper() in ["TRUE","T","YES","Y"]) sam = (dacycle['forward.savestate.exceptsam'].upper() in ["TRUE","T","YES","Y"])
else: else:
sam = False sam = False
if dacycle.has_key('forward.savestate.dir'): if 'forward.savestate.dir' in dacycle:
fwddir = dacycle['forward.savestate.dir'] fwddir = dacycle['forward.savestate.dir']
else: else:
logging.debug("No forward.savestate.dir key found in rc-file, proceeding with self-constructed prior parameters") logging.debug("No forward.savestate.dir key found in rc-file, proceeding with self-constructed prior parameters")
fwddir = False fwddir = False
if dacycle.has_key('forward.savestate.legacy'): if 'forward.savestate.legacy' in dacycle:
legacy = (dacycle['forward.savestate.legacy'].upper() in ["TRUE","T","YES","Y"]) legacy = (dacycle['forward.savestate.legacy'].upper() in ["TRUE","T","YES","Y"])
else: else:
legacy = False legacy = False
...@@ -172,7 +172,7 @@ def analysis_pipeline(dacycle, platform, dasystem, samples, statevector): ...@@ -172,7 +172,7 @@ def analysis_pipeline(dacycle, platform, dasystem, samples, statevector):
def archive_pipeline(dacycle, platform, dasystem): def archive_pipeline(dacycle, platform, dasystem):
""" Main entry point for archiving of output from one disk/system to another """ """ Main entry point for archiving of output from one disk/system to another """
if not dacycle.has_key('task.rsync'): if 'task.rsync' not in dacycle:
logging.info('rsync task not found, not starting automatic backup...') logging.info('rsync task not found, not starting automatic backup...')
return return
else: else:
...@@ -374,7 +374,7 @@ def invert(dacycle, statevector, optimizer, obsoperator): ...@@ -374,7 +374,7 @@ def invert(dacycle, statevector, optimizer, obsoperator):
int(dacycle.dasystem['obs.spec.nr']), int(dacycle.dasystem['obs.spec.nr']),
dacycle.dasystem['datadir']) dacycle.dasystem['datadir'])
if not dacycle.dasystem.has_key('opt.algorithm'): if 'opt.algorithm' not in dacycle.dasystem:
logging.info("There was no minimum least squares algorithm specified in the DA System rc file (key : opt.algorithm)") logging.info("There was no minimum least squares algorithm specified in the DA System rc file (key : opt.algorithm)")
logging.info("...using serial algorithm as default...") logging.info("...using serial algorithm as default...")
optimizer.set_algorithm() optimizer.set_algorithm()
......
!!! Info for the CarbonTracker data assimilation system !!! Info for the CarbonTracker data assimilation system
datadir : /Storage/CO2/super004/STILT_model/Data datadir : /home/awoude/ffdas/test/Data
! list of all observation sites ! list of all observation sites
obs.input.id : obsfiles.csv obs.input.id : obsfiles.csv
......
...@@ -103,11 +103,9 @@ class STILTObservationOperator(ObservationOperator): ...@@ -103,11 +103,9 @@ class STILTObservationOperator(ObservationOperator):
obs_ids = [b''.join(obs_id).decode() for obs_id in obs_ids] obs_ids = [b''.join(obs_id).decode() for obs_id in obs_ids]
self.tracer, self.site, self.obs_id = [], [], [] self.tracer, self.site, self.obs_id = [], [], []
for obs_id in obs_ids: for obs_id in obs_ids:
tracer, site, *_ = obs_id.split('~')[1].split('_') tracer, site, *_ = obs_id.split('-')[0].split('_')
self.tracer.append(tracer) self.tracer.append(tracer)
self.site.append(site) self.site.append(site)
self.obs_id.append(obs_id.split('~')[-1])
# Times and latitude and longitudes are static; only need once
self.times = infile['date_components'][:] self.times = infile['date_components'][:]
self.lat = infile['latitude'][:] self.lat = infile['latitude'][:]
...@@ -357,7 +355,7 @@ class STILTObservationOperator(ObservationOperator): ...@@ -357,7 +355,7 @@ class STILTObservationOperator(ObservationOperator):
# Add the simulated concentrations # Add the simulated concentrations
dimmember = f.createDimension('nmembers', size=self.dacycle['da.optimizer.nmembers']) dimmember = f.createDimension('nmembers', size=self.dacycle['da.optimizer.nmembers'])
savedict = io.std_savedict.copy() savedict = io.std_savedict.copy()
savedict['name'] = "simulated" savedict['name'] = "flask"
savedict['dtype'] = "float" savedict['dtype'] = "float"
savedict['long_name'] = "Simulated_concentration" savedict['long_name'] = "Simulated_concentration"
savedict['units'] = "mol mol-1" savedict['units'] = "mol mol-1"
......
#!/usr/bin/env python
# control.py
"""
Author : peters
Revision History:
File created on 26 Aug 2010.
Adapted by super004 on 26 Jan 2017.
"""
import logging
################### Begin Class CO2DaSystem ###################
from da.baseclasses.dasystem import DaSystem
class CO2DaSystem(DaSystem):
""" Information on the data assimilation system used. This is normally an rc-file with settings.
"""
def validate(self):
"""
validate the contents of the rc-file given a dictionary of required keys
"""
needed_rc_items = ['obs.input.id',
'obs.input.nr',
'obs.spec.nr',
'obs.cat.nr',
'nparameters',
'random.seed',
'emis.pparam',
'ff.covariance',
'obs.bgswitch',
'obs.background',
'emis.input.spatial',
'emis.input.tempobs',
'emis.input.tempprior',
'emis.paramfile',
'emis.paramfile2',
'run.emisflag',
'run.emisflagens',
'run.obsflag']
for k, v in self.iteritems():
if v == 'True' :
self[k] = True
if v == 'False':
self[k] = False
for key in needed_rc_items:
if not self.has_key(key):
logging.warning('Missing a required value in rc-file : %s' % key)
logging.debug('DA System Info settings have been validated succesfully')
################### End Class CO2DaSystem ###################
if __name__ == "__main__":
pass
#!/usr/bin/env python
# stilt_tools.py
"""
Author : I. Super
Revision History:
Newly developed code, September 2017
This module holds an emission model that prepares emission files used by the observation operator and
to create pseudo-data
"""
import shutil
import os
import logging
import datetime as dtm
import numpy as np
from numpy import array, logical_and
import da.tools.io4 as io
import math
import da.tools.rc as rc
from da.tools.general import create_dirs, to_datetime
identifier = 'EmissionModel ensemble '
version = '1.0'
################### Begin Class Emission model ###################
class EmisModel(object):
def __init__(self, dacycle=None):
if dacycle != None:
self.dacycle = dacycle
else:
self.dacycle = {}
def setup(self, dacycle):
self.dacycle = dacycle
self.startdate = self.dacycle['time.fxstart']
self.enddate = self.dacycle['time.finish']
self.emisdir = dacycle.dasystem['datadir']
self.proxyfile = dacycle.dasystem['emis.input.spatial']
self.tempfileo = dacycle.dasystem['emis.input.tempobs']
self.tempfilep = dacycle.dasystem['emis.input.tempprior']
self.btime = int(dacycle.dasystem['run.backtime'])
self.obsfile = dacycle.dasystem['obs.input.id']
self.nrspc = int(dacycle.dasystem['obs.spec.nr'])
self.nrcat = int(dacycle.dasystem['obs.cat.nr'])
self.nparams = int(dacycle.dasystem['nparameters'])
self.nmembers = int(dacycle['da.optimizer.nmembers'])
self.pparam = dacycle.dasystem['emis.pparam']
self.paramfile = dacycle.dasystem['emis.paramfile']
#self.paramfile2 = dacycle.dasystem['emis.paramfile2']
def get_emis(self, dacycle, psdo):