Commit d82f30c8 authored by Florentie, Liesbeth's avatar Florentie, Liesbeth
Browse files

added option for latitude-dependent mdm scaling for column observations

parent 83d93d63
......@@ -91,11 +91,11 @@ class TotalColumnObservations(Observations):
self.enddate = dacycle['time.sample.end']
# Path to the input data (daily files)
sat_files = dacycle.dasystem['obs.column.ncfile'].split(',')
sat_dirs = dacycle.dasystem['obs.column.input.dir'].split(',')
sat_files = dacycle.dasystem['obs.column.ncfile'].split(',')
sat_dirs = dacycle.dasystem['obs.column.input.dir'].split(',')
self.sat_dirs = []
self.sat_files = []
self.sat_dirs = []
self.sat_files = []
for i in range(len(sat_dirs)):
if not os.path.exists(sat_dirs[i].strip()):
msg = 'Could not find the required satellite input directory (%s) ' % sat_dirs[i]
......@@ -127,13 +127,20 @@ class TotalColumnObservations(Observations):
# Model data mismatch approach
self.mdm_calculation = dacycle.dasystem.get('obs.column.mdm.calculation')
logging.debug('mdm.calculation = %s' %self.mdm_calculation)
if not self.mdm_calculation in ['parametrization','empirical']:
logging.warning('No valid model data mismatch method found. Valid options are \'parametrization\' and \'empirical\'. ' + \
'Using a constant estimate for the model uncertainty of 1ppm everywhere.')
else:
logging.info('Model data mismatch approach = %s' %self.mdm_calculation)
self.mdm_weight = float(dacycle.dasystem['obs.column.mdm.weight'])
self.mdm_weight = float(dacycle.dasystem['obs.column.mdm.weight'])
# Scaling function for mdm to tweak relative weight obs w.r.t. surface obs: none, gaussian (gauss) or inverse gaussian (inv_gauss)
self.mdm_scaling_fnc = dacycle.dasystem.get('obs.column.mdm.scaling.function')
self.mdm_scaling_peak = float(dacycle.dasystem.get('obs.column.mdm.scaling.peak'))
self.mdm_scaling_mean = float(dacycle.dasystem.get('obs.column.mdm.scaling.mean'))
self.mdm_scaling_std = float(dacycle.dasystem.get('obs.column.mdm.scaling.std'))
logging.info('MDM uniform weight = %s, MDM scaling function = %s with parameters %s, %s, %s' %(str(self.mdm_weight), \
self.mdm_scaling_fnc,str(self.mdm_scaling_peak),str(self.mdm_scaling_mean),str(self.mdm_scaling_std)))
# Path to file with observation error settings for column observations
# Currently the same settings for all assimilated retrieval products: should this be one file per product?
......@@ -246,13 +253,23 @@ class TotalColumnObservations(Observations):
def gauss_correction(self, lat):
scaling = 1.0 + self.mdm_scaling_peak*np.exp(-(lat-self.mdm_scaling_mean)**2/(2.0*self.mdm_scaling_std**2))
return scaling
def inv_gauss_correction(self, lat):
scaling = self.mdm_scaling_peak - (self.mdm_scaling_peak-1.0)*np.exp(-(lat-self.mdm_scaling_mean)**2/(2.0*self.mdm_scaling_std**2))
return scaling
def add_model_data_mismatch(self, filename=None, advance=False):
""" This function is empty: model data mismatch calculation is done during sampling in observation operator (TM5) to enhance computational efficiency
(i.e. to prevent reading all soundings twice and writing large additional files)
"""
obs_weights = rc.read(self.obs_file)
self.rejection_threshold = int(obs_weights['obs.rejection.threshold'])
obs_data = rc.read(self.obs_file)
self.rejection_threshold = int(obs_data['obs.rejection.threshold'])
# At this point mdm is set to the measurement uncertainty only, added in the add_observations function.
# Here this value is used to set the combined mdm by adding an estimate for the model uncertainty as a sum of squares.
......@@ -261,11 +278,20 @@ class TotalColumnObservations(Observations):
# parametrization as used by Frederic Chevallier
if self.mdm_calculation == 'parametrization':
obs.mdm = self.mdm_weight * ( obs.mdm*obs.mdm + (0.8*np.exp((90.0+obs.lat)/300.0))**2 )**0.5
elif self.mdm_calculation == 'none':
obs.mdm = self.mdm_weight * obs.mdm
# empirical approach of Andy Jacobson, TO BE IMPLEMENTED
# elif self.mdm_calculation == 'empirical':
# obs.mdm = ...
else: # assume general model uncertainty of 1 ppm (arbitrary value)
obs.mdm = self.mdm_weight * ( obs.mdm*obs.mdm + 1**2 )**0.5
# latitude-dependent scaling of mdm to account for varying number of surface observations
# when combining column obs with flask obs
if self.mdm_scaling_fnc is 'gauss':
obs.mdm = obs.mdm * self.gauss_correction(obs.lat)
elif self.mdm_scaling_fnc is 'inv_gauss':
obs.mdm = obs.mdm * self.inv_gauss_correction(obs.lat)
del obs
meanmdm = np.average(np.array( [obs.mdm for obs in self.datalist] ))
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment