Commit f96c45ff authored by brunner's avatar brunner
Browse files

no bias correction - whole Hx gets updated and bias removes to many data

parent 8ffdbf8c
......@@ -23,6 +23,7 @@ File created on 28 Jul 2010.
"""
import logging
import sys
import numpy as np
import numpy.linalg as la
import da.tools.io4 as io
......@@ -313,10 +314,13 @@ class Optimizer(object):
def serial_minimum_least_squares(self):
""" Make minimum least squares solution by looping over obs"""
# Corrected for bias over all stations
bias = np.mean(self.obs) - np.mean(self.Hx)
for n in range(self.nobs):
# DON'T # Corrected for bias over all stations
# obs_clean = self.obs[self.obs>0]
# Hx_clean = self.Hx[self.obs>0]
# bias = np.mean(obs_clean) - np.mean(Hx_clean)
# logging.debug('Corrected for bias %f' % (bias))
for n in range(self.nobs):
# Screen for flagged observations (for instance site not found, or no sample written from model)
if self.flags[n] != 0:
......@@ -325,14 +329,17 @@ class Optimizer(object):
# Screen for outliers greather than 3x model-data mismatch, only apply if obs may be rejected
# Calculate residual for rejecting the observations (corrected for bias) - res_rej
res_rej = self.obs[n] - self.Hx[n] - bias
# res_rej = self.obs[n] - self.Hx[n] - bias
res = self.obs[n] - self.Hx[n]
if self.may_reject[n]:
threshold = self.rejection_threshold * np.sqrt(self.R[n])
#if np.abs(res) > threshold + abs(bias):
if np.abs(res_rej) > threshold:
if np.abs(res) > threshold:
# if np.abs(res) > threshold + abs(bias):
# if np.abs(res_rej) > threshold:
logging.debug('Rejecting observation (%s,%i) because residual (%f) exceeds threshold (%f)' % (self.sitecode[n], self.obs_ids[n], res, threshold + abs(bias)))
# logging.debug('Rejecting observation (%s,%i) because residual (%f) exceeds threshold (%f)' % (self.sitecode[n], self.obs_ids[n], res_rej, threshold))
self.flags[n] = 2
continue
......
......@@ -128,28 +128,28 @@ class ObservationOperator(object):
logging.info('Starting COSMO')
os.system('python run_chain.py '+self.dacycle['run.name']+' '+abs_start_time_ch+' '+str(starth+lag*168)+' '+str(endh+lag*168)+' -j meteo icbc int2lm post_int2lm oae octe online_vprm cosmo')
# os.system('python run_chain.py '+self.dacycle['run.name']+' '+abs_start_time_ch+' '+str(starth+lag*168)+' '+str(endh+lag*168)+' -j meteo icbc int2lm post_int2lm oae octe online_vprm cosmo -f')
logging.info('COSMO done!')
# Here the extraction of COSMO output starts
dicts = self.read_csv(dacycle)
rlat, rlon, dicts, path_in = self.get_hhl_data(dacycle, lag, 'lffd'+abs_start_time+'c.nc', dicts, starth, endh)
# rlat, rlon, dicts, path_in = self.get_hhl_data(dacycle, lag, 'lffd'+abs_start_time+'c.nc', dicts, starth, endh)
logging.info('Starting parallel extraction \m/')
args = [
(dacycle, dacycle['time.sample.start']+timedelta(hours = 24*n), dicts, rlat, rlon, path_in)
for n in range(self.days)
]
# args = [
# (dacycle, dacycle['time.sample.start']+timedelta(hours = 24*n), dicts, rlat, rlon, path_in)
# for n in range(self.days)
# ]
with Pool(self.days) as pool:
pool.starmap(self.get_cosmo_data, args)
# with Pool(self.days) as pool:
# pool.starmap(self.get_cosmo_data, args)
logging.info('Finished parallel extraction \m/')
self.cat_cosmo_data(advance, dacycle)
# self.cat_cosmo_data(advance, dacycle)
for i in range(0,self.forecast_nmembers):
for i in range(self.forecast_nmembers):
idx = str(i+1).zfill(3)
cosmo_file = os.path.join(self.dacycle['dir.ct_save'], 'Hx_'+idx+'_%s.nc' % dacycle['time.sample.stamp'])
ifile = Dataset(cosmo_file, mode='r')
......@@ -157,6 +157,9 @@ class ObservationOperator(object):
ifile.close()
for j,data in enumerate(zip(ids,obs,mdm)):
print('j', j)
print('data', data)
print('model data', model_data[0,j])
f.variables['obs_num'][j] = data[0]
f.variables['flask'][j,:] = model_data[:,j]
f.close()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment