Commit 4face3de authored by karolina's avatar karolina
Browse files

minor changes

parent 56c61c3e
...@@ -74,8 +74,10 @@ class DaSystem(dict): ...@@ -74,8 +74,10 @@ class DaSystem(dict):
needed_rc_items = {} needed_rc_items = {}
for k, v in self.iteritems(): for k, v in self.iteritems():
if v == 'True' : self[k] = True if v == 'True' :
if v == 'False': self[k] = False self[k] = True
if v == 'False':
self[k] = False
for key in needed_rc_items: for key in needed_rc_items:
if not self.has_key(key): if not self.has_key(key):
......
...@@ -14,6 +14,10 @@ import os ...@@ -14,6 +14,10 @@ import os
import sys import sys
import logging import logging
import datetime import datetime
import numpy as np
import numpy.linalg as la
import da.tools.io4 as io
identifier = 'Optimizer baseclass' identifier = 'Optimizer baseclass'
version = '0.0' version = '0.0'
...@@ -43,7 +47,6 @@ class Optimizer(object): ...@@ -43,7 +47,6 @@ class Optimizer(object):
def create_matrices(self): def create_matrices(self):
""" Create Matrix space needed in optimization routine """ """ Create Matrix space needed in optimization routine """
import numpy as np
# mean state [X] # mean state [X]
self.x = np.zeros((self.nlag * self.nparams,), float) self.x = np.zeros((self.nlag * self.nparams,), float)
...@@ -63,7 +66,7 @@ class Optimizer(object): ...@@ -63,7 +66,7 @@ class Optimizer(object):
self.R = np.zeros((self.nobs,), float) self.R = np.zeros((self.nobs,), float)
self.HPHR = np.zeros((self.nobs,), float) self.HPHR = np.zeros((self.nobs,), float)
else: else:
self.R = np.zeros((self.nobs,self.nobs,), float) self.R = np.zeros((self.nobs, self.nobs,), float)
self.HPHR = np.zeros((self.nobs, self.nobs,), float) self.HPHR = np.zeros((self.nobs, self.nobs,), float)
# localization of obs # localization of obs
self.may_localize = np.zeros(self.nobs, bool) self.may_localize = np.zeros(self.nobs, bool)
...@@ -83,8 +86,6 @@ class Optimizer(object): ...@@ -83,8 +86,6 @@ class Optimizer(object):
self.KG = np.zeros((self.nlag * self.nparams, self.nobs,), float) self.KG = np.zeros((self.nlag * self.nparams, self.nobs,), float)
def state_to_matrix(self, StateVector): def state_to_matrix(self, StateVector):
import numpy as np
allsites = [] # collect all obs for n=1,..,nlag allsites = [] # collect all obs for n=1,..,nlag
allobs = [] # collect all obs for n=1,..,nlag allobs = [] # collect all obs for n=1,..,nlag
allmdm = [] # collect all mdm for n=1,..,nlag allmdm = [] # collect all mdm for n=1,..,nlag
...@@ -101,7 +102,7 @@ class Optimizer(object): ...@@ -101,7 +102,7 @@ class Optimizer(object):
self.x[n * self.nparams:(n + 1) * self.nparams] = members[0].ParameterValues self.x[n * self.nparams:(n + 1) * self.nparams] = members[0].ParameterValues
self.X_prime[n * self.nparams:(n + 1) * self.nparams, :] = np.transpose(np.array([m.ParameterValues for m in members])) self.X_prime[n * self.nparams:(n + 1) * self.nparams, :] = np.transpose(np.array([m.ParameterValues for m in members]))
if Samples != None: if Samples != None: #LU jednoznaczne z if Samples
self.rejection_threshold = Samples.rejection_threshold self.rejection_threshold = Samples.rejection_threshold
allreject.extend(Samples.getvalues('may_reject')) allreject.extend(Samples.getvalues('may_reject'))
...@@ -115,7 +116,7 @@ class Optimizer(object): ...@@ -115,7 +116,7 @@ class Optimizer(object):
simulatedensemble = Samples.getvalues('simulated') simulatedensemble = Samples.getvalues('simulated')
if allsimulated == None : if allsimulated == None : #LU if allsimulated: np.concatenate; else: np.array
allsimulated = np.array(simulatedensemble) allsimulated = np.array(simulatedensemble)
else: else:
allsimulated = np.concatenate((allsimulated, np.array(simulatedensemble)), axis=0) allsimulated = np.concatenate((allsimulated, np.array(simulatedensemble)), axis=0)
...@@ -139,7 +140,7 @@ class Optimizer(object): ...@@ -139,7 +140,7 @@ class Optimizer(object):
self.R[i] = mdm ** 2 self.R[i] = mdm ** 2
else: else:
for i, mdm in enumerate(allmdm): for i, mdm in enumerate(allmdm):
self.R[i,i] = mdm ** 2 self.R[i, i] = mdm ** 2
def matrix_to_state(self, StateVector): def matrix_to_state(self, StateVector):
for n in range(self.nlag): for n in range(self.nlag):
...@@ -164,8 +165,6 @@ class Optimizer(object): ...@@ -164,8 +165,6 @@ class Optimizer(object):
The type designation refers to the writing of prior or posterior data and is used in naming the variables" The type designation refers to the writing of prior or posterior data and is used in naming the variables"
""" """
import da.tools.io4 as io
#import da.tools.io as io
outdir = DaCycle['dir.diagnostics'] outdir = DaCycle['dir.diagnostics']
filename = os.path.join(outdir, 'optimizer.%s.nc' % DaCycle['time.start'].strftime('%Y%m%d')) filename = os.path.join(outdir, 'optimizer.%s.nc' % DaCycle['time.start'].strftime('%Y%m%d'))
...@@ -309,8 +308,6 @@ class Optimizer(object): ...@@ -309,8 +308,6 @@ class Optimizer(object):
def serial_minimum_least_squares(self): def serial_minimum_least_squares(self):
""" Make minimum least squares solution by looping over obs""" """ Make minimum least squares solution by looping over obs"""
import numpy as np
for n in range(self.nobs): for n in range(self.nobs):
# Screen for flagged observations (for instance site not found, or no sample written from model) # Screen for flagged observations (for instance site not found, or no sample written from model)
...@@ -336,10 +333,10 @@ class Optimizer(object): ...@@ -336,10 +333,10 @@ class Optimizer(object):
self.KG[:, n] = PHt / self.HPHR[n] self.KG[:, n] = PHt / self.HPHR[n]
if self.may_localize[n]: if self.may_localize[n]:
logging.debug('Trying to localize observation %s, %i' % (self.sitecode[n],self.obs_ids[n])) logging.debug('Trying to localize observation %s, %i' % (self.sitecode[n], self.obs_ids[n]))
self.localize(n) self.localize(n)
else: else:
logging.debug('Not allowed to localize observation %s, %i' % (self.sitecode[n],self.obs_ids[n])) logging.debug('Not allowed to localize observation %s, %i' % (self.sitecode[n], self.obs_ids[n]))
alpha = np.double(1.0) / (np.double(1.0) + np.sqrt((self.R[n]) / self.HPHR[n])) alpha = np.double(1.0) / (np.double(1.0) + np.sqrt((self.R[n]) / self.HPHR[n]))
...@@ -366,8 +363,7 @@ class Optimizer(object): ...@@ -366,8 +363,7 @@ class Optimizer(object):
def bulk_minimum_least_squares(self): def bulk_minimum_least_squares(self):
""" Make minimum least squares solution by solving matrix equations""" """ Make minimum least squares solution by solving matrix equations"""
import numpy as np
import numpy.linalg as la
# Create full solution, first calculate the mean of the posterior analysis # Create full solution, first calculate the mean of the posterior analysis
...@@ -411,8 +407,7 @@ class Optimizer(object): ...@@ -411,8 +407,7 @@ class Optimizer(object):
def localize(self, n): def localize(self, n):
""" localize the Kalman Gain matrix """ """ localize the Kalman Gain matrix """
logging.debug('Not localized observation %s, %i' % (self.sitecode[n],self.obs_ids[n])) logging.debug('Not localized observation %s, %i' % (self.sitecode[n], self.obs_ids[n]))
pass
def set_algorithm(self): def set_algorithm(self):
self.algorithm = 'Serial' self.algorithm = 'Serial'
...@@ -429,7 +424,6 @@ if __name__ == "__main__": ...@@ -429,7 +424,6 @@ if __name__ == "__main__":
from da.tools.initexit import start_logger from da.tools.initexit import start_logger
from da.tools.initexit import CycleControl from da.tools.initexit import CycleControl
from da.ct.obs import CtObservations from da.ct.obs import CtObservations
import numpy as np
opts = ['-v'] opts = ['-v']
args = {'rc':'../../da.rc', 'logfile':'da_initexit.log', 'jobrcfilename':'test.rc'} args = {'rc':'../../da.rc', 'logfile':'da_initexit.log', 'jobrcfilename':'test.rc'}
......
...@@ -282,7 +282,7 @@ class StateVector(object): ...@@ -282,7 +282,7 @@ class StateVector(object):
logging.debug('%d new ensemble members were added to the state vector # %d' % (self.nmembers, lag)) logging.debug('%d new ensemble members were added to the state vector # %d' % (self.nmembers, lag))
def propagate(self): def propagate(self, startdate, cycle):
""" """
:rtype: None :rtype: None
...@@ -301,7 +301,7 @@ class StateVector(object): ...@@ -301,7 +301,7 @@ class StateVector(object):
self.EnsembleMembers.append([]) self.EnsembleMembers.append([])
# And now create a new time step of mean + members for n=nlag # And now create a new time step of mean + members for n=nlag
date = self.DaCycle['time.start'] + timedelta(days=(self.nlag - 0.5) * int(self.DaCycle['time.cycle'])) date = startdate + timedelta(days=(self.nlag - 0.5) * int(cycle))
cov = self.get_covariance(date) cov = self.get_covariance(date)
self.make_new_ensemble(self.nlag, cov) self.make_new_ensemble(self.nlag, cov)
...@@ -401,7 +401,7 @@ class StateVector(object): ...@@ -401,7 +401,7 @@ class StateVector(object):
logging.info('Successfully read the State Vector from file (%s) ' % filename) logging.info('Successfully read the State Vector from file (%s) ' % filename)
def write_members_to_file(self, lag): def write_members_to_file(self, lag, outdir):
""" """
:param: lag: Which lag step of the filter to write, must lie in range [1,...,nlag] :param: lag: Which lag step of the filter to write, must lie in range [1,...,nlag]
:rtype: None :rtype: None
...@@ -423,7 +423,6 @@ class StateVector(object): ...@@ -423,7 +423,6 @@ class StateVector(object):
#import da.tools.io as io #import da.tools.io as io
#import da.tools.io4 as io #import da.tools.io4 as io
outdir = self.DaCycle['dir.input']
members = self.EnsembleMembers[lag - 1] members = self.EnsembleMembers[lag - 1]
for mem in members: for mem in members:
......
...@@ -318,7 +318,7 @@ class ObsPackObservations(Observation): ...@@ -318,7 +318,7 @@ class ObsPackObservations(Observation):
species, site, method, lab, nr = os.path.split(obs.fromfile)[-1].split('_') species, site, method, lab, nr = os.path.split(obs.fromfile)[-1].split('_')
identifier = "%s_%02d_%s" % (site, int(lab), method,) identifier = "%s_%02d_%s" % (site, int(lab), method)
identifier = name_convert(name="%s_%s_%s" % (site.lower(), method.lower(), lab.lower(),), to='GV') identifier = name_convert(name="%s_%s_%s" % (site.lower(), method.lower(), lab.lower(),), to='GV')
......
...@@ -118,12 +118,12 @@ class CtStateVector(StateVector): ...@@ -118,12 +118,12 @@ class CtStateVector(StateVector):
for n in range(self.nlag): for n in range(self.nlag):
if qual == 'opt': if qual == 'opt':
MeanState = f.get_variable('xac_%02d'%(n+1)) MeanState = f.get_variable('xac_%02d' % (n + 1))
EnsembleMembers = f.get_variable('adX_%02d'%(n+1)) EnsembleMembers = f.get_variable('adX_%02d' % (n + 1))
elif qual == 'prior': elif qual == 'prior':
MeanState = f.get_variable('xpc_%02d'%(n+1)) MeanState = f.get_variable('xpc_%02d' % (n + 1))
EnsembleMembers = f.get_variable('pdX_%02d'%(n+1)) EnsembleMembers = f.get_variable('pdX_%02d' % (n + 1))
if not self.EnsembleMembers[n] == []: if not self.EnsembleMembers[n] == []:
self.EnsembleMembers[n] = [] self.EnsembleMembers[n] = []
......
...@@ -7,82 +7,70 @@ ...@@ -7,82 +7,70 @@
import sys import sys
import os import os
import logging import logging
dummy = sys.path.append(os.getcwd()) sys.path.append(os.getcwd())
################################################################################################# #################################################################################################
# Next, import the tools needed to initialize a data assimilation cycle # Next, import the tools needed to initialize a data assimilation cycle
################################################################################################# #################################################################################################
from da.tools.initexit import start_logger from da.tools.initexit import start_logger, validate_opts_args, parse_options, CycleControl
from da.tools.initexit import validate_opts_args from da.tools.pipeline import ensemble_smoother_pipeline
from da.tools.initexit import parse_options from da.platform.maunaloa import MaunaloaPlatForm
from da.ct.dasystem import CtDaSystem
from da.ct.statevector import CtStateVector
#from da.ct.obspack import ObsPackObservations
from da.ct.obs import CtObservations
from da.tm5.observationoperator import TM5ObservationOperator
from da.ct.optimizer import CtOptimizer
from da.analysis.expand_fluxes import save_weekly_avg_1x1_data, save_weekly_avg_state_data, save_weekly_avg_tc_data, save_weekly_avg_ext_tc_data
from da.analysis.expand_mixingratios import write_mixing_ratios
################################################################################################# #################################################################################################
# Parse and validate the command line options, start logging # Parse and validate the command line options, start logging
################################################################################################# #################################################################################################
dummy = start_logger() start_logger()
opts, args = parse_options() opts, args = validate_opts_args(parse_options())
opts,args = validate_opts_args(opts,args)
################################################################################################# #################################################################################################
# Create the Cycle Control object for this job # Create the Cycle Control object for this job
################################################################################################# #################################################################################################
from da.tools.initexit import CycleControl DaCycle = CycleControl(opts, args)
DaCycle = CycleControl(opts,args)
###########################################################################################
### IMPORT THE APPLICATION SPECIFIC MODULES HERE, TO BE PASSED INTO THE MAIN PIPELINE!!! ##
###########################################################################################
from da.tools.pipeline import ensemble_smoother_pipeline PlatForm = MaunaloaPlatForm()
from da.platform.maunaloa import MaunaloaPlatForm DaSystem = CtDaSystem(DaCycle['da.system.rc'])
from da.ct.dasystem import CtDaSystem
from da.ct.statevector import CtStateVector
#from da.ct.obspack import ObsPackObservations
from da.ct.obs import CtObservations
from da.tm5.observationoperator import TM5ObservationOperator
from da.ct.optimizer import CtOptimizer
PlatForm = MaunaloaPlatForm()
DaSystem = CtDaSystem(DaCycle['da.system.rc'])
ObsOperator = TM5ObservationOperator(DaCycle['da.obsoperator.rc']) ObsOperator = TM5ObservationOperator(DaCycle['da.obsoperator.rc'])
#Samples = ObsPackObservations() #Samples = ObsPackObservations()
Samples = CtObservations() Samples = CtObservations()
StateVector = CtStateVector() StateVector = CtStateVector()
Optimizer = CtOptimizer() Optimizer = CtOptimizer()
########################################################################################## ##########################################################################################
################### ENTER THE PIPELINE WITH THE OBJECTS PASSED BY THE USER ############### ################### ENTER THE PIPELINE WITH THE OBJECTS PASSED BY THE USER ###############
########################################################################################## ##########################################################################################
from da.tools.pipeline import header,footer from da.tools.pipeline import header, footer
msg = header+"Entering Pipeline "+footer ; logging.info(msg) logging.info(header + "Entering Pipeline " + footer)
ensemble_smoother_pipeline(DaCycle,PlatForm, DaSystem, Samples,StateVector,ObsOperator,Optimizer) ensemble_smoother_pipeline(DaCycle, PlatForm, DaSystem, Samples, StateVector, ObsOperator, Optimizer)
########################################################################################## ##########################################################################################
################### All done, extra stuff can be added next, such as analysis ################### All done, extra stuff can be added next, such as analysis
########################################################################################## ##########################################################################################
msg = header+"Starting analysis"+footer ; logging.info(msg) logging.info(header + "Starting analysis" + footer)
from da.analysis.expand_fluxes import save_weekly_avg_1x1_data
from da.analysis.expand_fluxes import save_weekly_avg_state_data
from da.analysis.expand_fluxes import save_weekly_avg_tc_data
from da.analysis.expand_fluxes import save_weekly_avg_ext_tc_data
from da.analysis.expand_mixingratios import write_mixing_ratios
savedas = save_weekly_avg_1x1_data(DaCycle, StateVector) save_weekly_avg_1x1_data(DaCycle, StateVector)
savedas = save_weekly_avg_state_data(DaCycle, StateVector) save_weekly_avg_state_data(DaCycle, StateVector)
savedas = save_weekly_avg_tc_data(DaCycle, StateVector) save_weekly_avg_tc_data(DaCycle, StateVector)
savedas = save_weekly_avg_ext_tc_data(DaCycle) save_weekly_avg_ext_tc_data(DaCycle)
savedas = write_mixing_ratios(DaCycle) write_mixing_ratios(DaCycle)
sys.exit(0) sys.exit(0)
......
...@@ -7,80 +7,68 @@ ...@@ -7,80 +7,68 @@
import sys import sys
import os import os
import logging import logging
dummy = sys.path.append(os.getcwd())
sys.path.append(os.getcwd())
################################################################################################# #################################################################################################
# Next, import the tools needed to initialize a data assimilation cycle # Next, import the tools needed to initialize a data assimilation cycle
################################################################################################# #################################################################################################
from da.tools.initexit import start_logger from da.tools.initexit import start_logger, parse_options, validate_opts_args, CycleControl
from da.tools.initexit import validate_opts_args
from da.tools.initexit import parse_options from da.tools.pipeline import ensemble_smoother_pipeline
from da.platform.maunaloa import MaunaloaPlatForm
from da.ctgridded.dasystem import CtGriddedDaSystem
from da.ctgridded.statevector import CtGriddedStateVector
from da.ct.obs import CtObservations
from da.tm5.observationoperator import TM5ObservationOperator
from da.ct.optimizer import CtOptimizer
from da.tools.pipeline import header, footer
from da.analysis.expand_fluxes import save_weekly_avg_1x1_data, save_weekly_avg_state_data, save_weekly_avg_tc_data, save_weekly_avg_ext_tc_data
from da.analysis.expand_mixingratios import write_mixing_ratios
################################################################################################# #################################################################################################
# Parse and validate the command line options, start logging # Parse and validate the command line options, start logging
################################################################################################# #################################################################################################
dummy = start_logger() start_logger()
opts, args = parse_options()
opts,args = validate_opts_args(opts,args) opts, args = validate_opts_args(parse_options())
################################################################################################# #################################################################################################
# Create the Cycle Control object for this job # Create the Cycle Control object for this job
################################################################################################# #################################################################################################
from da.tools.initexit import CycleControl DaCycle = CycleControl(opts, args)
DaCycle = CycleControl(opts,args)
########################################################################################### PlatForm = MaunaloaPlatForm()
### IMPORT THE APPLICATION SPECIFIC MODULES HERE, TO BE PASSED INTO THE MAIN PIPELINE!!! ## DaSystem = CtGriddedDaSystem(DaCycle['da.system.rc'])
###########################################################################################
from da.tools.pipeline import ensemble_smoother_pipeline
from da.platform.maunaloa import MaunaloaPlatForm
from da.ctgridded.dasystem import CtGriddedDaSystem
from da.ctgridded.statevector import CtGriddedStateVector
from da.ct.obs import CtObservations
from da.tm5.observationoperator import TM5ObservationOperator
from da.ct.optimizer import CtOptimizer
PlatForm = MaunaloaPlatForm()
DaSystem = CtGriddedDaSystem(DaCycle['da.system.rc'])
ObsOperator = TM5ObservationOperator(DaCycle['da.obsoperator.rc']) ObsOperator = TM5ObservationOperator(DaCycle['da.obsoperator.rc'])
Samples = CtObservations() Samples = CtObservations()
StateVector = CtGriddedStateVector() StateVector = CtGriddedStateVector()
Optimizer = CtOptimizer() Optimizer = CtOptimizer()
########################################################################################## ##########################################################################################
################### ENTER THE PIPELINE WITH THE OBJECTS PASSED BY THE USER ############### ################### ENTER THE PIPELINE WITH THE OBJECTS PASSED BY THE USER ###############
########################################################################################## ##########################################################################################
from da.tools.pipeline import header,footer logging.info(header + "Entering Pipeline " + footer)
ensemble_smoother_pipeline(DaCycle, PlatForm, DaSystem, Samples, StateVector, ObsOperator, Optimizer)
msg = header+"Entering Pipeline "+footer ; logging.info(msg)
ensemble_smoother_pipeline(DaCycle,PlatForm, DaSystem, Samples,StateVector,ObsOperator,Optimizer)
########################################################################################## ##########################################################################################
################### All done, extra stuff can be added next, such as analysis ################### All done, extra stuff can be added next, such as analysis
########################################################################################## ##########################################################################################
msg = header+"Starting analysis"+footer ; logging.info(msg) logging.info(header + "Starting analysis" + footer)
from da.analysis.expand_fluxes import save_weekly_avg_1x1_data
from da.analysis.expand_fluxes import save_weekly_avg_state_data
from da.analysis.expand_fluxes import save_weekly_avg_tc_data
from da.analysis.expand_fluxes import save_weekly_avg_ext_tc_data
from da.analysis.expand_mixingratios import write_mixing_ratios
savedas = save_weekly_avg_1x1_data(DaCycle, StateVector) save_weekly_avg_1x1_data(DaCycle, StateVector)
savedas = save_weekly_avg_state_data(DaCycle, StateVector) save_weekly_avg_state_data(DaCycle, StateVector)
savedas = save_weekly_avg_tc_data(DaCycle, StateVector) save_weekly_avg_tc_data(DaCycle, StateVector)
savedas = save_weekly_avg_ext_tc_data(DaCycle) save_weekly_avg_ext_tc_data(DaCycle)
savedas = write_mixing_ratios(DaCycle) write_mixing_ratios(DaCycle)
sys.exit(0) sys.exit(0)
......
...@@ -7,60 +7,51 @@ ...@@ -7,60 +7,51 @@
import sys import sys
import os import os
import logging import logging
dummy = sys.path.append(os.getcwd()) sys.path.append(os.getcwd())
################################################################################################# #################################################################################################
# Next, import the tools needed to initialize a data assimilation cycle # Next, import the tools needed to initialize a data assimilation cycle
################################################################################################# #################################################################################################
from da.tools.initexit import start_logger from da.tools.initexit import start_logger, validate_opts_args, parse_options, CycleControl
from da.tools.initexit import validate_opts_args from da.tools.pipeline import ensemble_smoother_pipeline
from da.tools.initexit import parse_options from da.platform.jet import JetPlatForm
from da.ct.dasystem import CtDaSystem
from da.ct.statevector import CtStateVector
from da.ct.obs import CtObservations
from da.tm5.observationoperator import TM5ObservationOperator
from da.ct.optimizer import CtOptimizer
from da.tools.pipeline import header, footer
################################################################################################# #################################################################################################
# Parse and validate the command line options, start logging # Parse and validate the command line options, start logging
################################################################################################# #################################################################################################