Skip to content
GitLab
Menu
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
NearRealTimeCTDAS
CTDAS
Commits
8ffdbf8c
Commit
8ffdbf8c
authored
Aug 29, 2019
by
brunner
Browse files
no cdo afternoon mean
parent
640e5e92
Changes
8
Show whitespace changes
Inline
Side-by-side
config.rc
View file @
8ffdbf8c
...
@@ -29,17 +29,16 @@
...
@@ -29,17 +29,16 @@
! The time for which to start and end the data assimilation experiment in format YYYY-MM-DD HH:MM:SS
! The time for which to start and end the data assimilation experiment in format YYYY-MM-DD HH:MM:SS
! the following 3 lines are for initial start
! the following 3 lines are for initial start
time.start : 2019-01-01 00:00:00
time.start : 2019-03-01 00:00:00
time.finish : 2019-01-07 23:00:00
time.finish : 2019-03-07 23:00:00
time.end : 2019-01-07 23:00:00
time.end : 2019-03-07 23:00:00
abs.time.start : 2019-03-01 00:00:00
abs.time.start : 2019-01-01 00:00:00
! Whether to restart the CTDAS system from a previous cycle, or to start the sequence fresh. Valid entries are T/F/True/False/TRUE/FALSE
! Whether to restart the CTDAS system from a previous cycle, or to start the sequence fresh. Valid entries are T/F/True/False/TRUE/FALSE
time.restart : F
time.restart : F
!da.restart.tstamp : 201
3
-0
1
-08 00:00:00
!
da.restart.tstamp : 201
9
-0
3
-08 00:00:00
da.restart.tstamp : 201
3
-0
1
-01 00:00:00
da.restart.tstamp : 201
9
-0
3
-01 00:00:00
! The length of a cycle is given in days, such that the integer 7 denotes the typically used weekly cycle. Valid entries are integers > 1
! The length of a cycle is given in days, such that the integer 7 denotes the typically used weekly cycle. Valid entries are integers > 1
...
@@ -54,6 +53,7 @@ time.nlag : 2
...
@@ -54,6 +53,7 @@ time.nlag : 2
run.name : real
run.name : real
dir.da_run : /scratch/snx3000/parsenov/${run.name}
dir.da_run : /scratch/snx3000/parsenov/${run.name}
dir.ct_save : /store/empa/em05/parsenov/ct_data/${run.name}/
restartmap.dir : ${dir.da_run}/input
restartmap.dir : ${dir.da_run}/input
! The resources used to complete the data assimilation experiment. This depends on your computing platform.
! The resources used to complete the data assimilation experiment. This depends on your computing platform.
...
@@ -86,6 +86,7 @@ da.system : CarbonTracker
...
@@ -86,6 +86,7 @@ da.system : CarbonTracker
! The specific settings for your system are read from a separate rc-file, which points to the data directories, observations, etc
! The specific settings for your system are read from a separate rc-file, which points to the data directories, observations, etc
da.system.rc : da/rc/carbontracker_cosmo.rc
da.system.rc : da/rc/carbontracker_cosmo.rc
locations : /store/empa/em05/parsenov/ct_data/locations.csv
! This flag should probably be moved to the da.system.rc file. It denotes which type of filtering to use in the optimizer
! This flag should probably be moved to the da.system.rc file. It denotes which type of filtering to use in the optimizer
...
...
cosmo.py
View file @
8ffdbf8c
...
@@ -85,5 +85,3 @@ save_weekly_avg_ext_tc_data(dacycle)
...
@@ -85,5 +85,3 @@ save_weekly_avg_ext_tc_data(dacycle)
write_mole_fractions
(
dacycle
)
write_mole_fractions
(
dacycle
)
sys
.
exit
(
0
)
sys
.
exit
(
0
)
da/cosmo/base_optimizer.py
View file @
8ffdbf8c
...
@@ -313,6 +313,8 @@ class Optimizer(object):
...
@@ -313,6 +313,8 @@ class Optimizer(object):
def
serial_minimum_least_squares
(
self
):
def
serial_minimum_least_squares
(
self
):
""" Make minimum least squares solution by looping over obs"""
""" Make minimum least squares solution by looping over obs"""
# Corrected for bias over all stations
bias
=
np
.
mean
(
self
.
obs
)
-
np
.
mean
(
self
.
Hx
)
for
n
in
range
(
self
.
nobs
):
for
n
in
range
(
self
.
nobs
):
# Screen for flagged observations (for instance site not found, or no sample written from model)
# Screen for flagged observations (for instance site not found, or no sample written from model)
...
@@ -322,13 +324,15 @@ class Optimizer(object):
...
@@ -322,13 +324,15 @@ class Optimizer(object):
continue
continue
# Screen for outliers greather than 3x model-data mismatch, only apply if obs may be rejected
# Screen for outliers greather than 3x model-data mismatch, only apply if obs may be rejected
# Calculate residual for rejecting the observations (corrected for bias) - res_rej
res_rej
=
self
.
obs
[
n
]
-
self
.
Hx
[
n
]
-
bias
res
=
self
.
obs
[
n
]
-
self
.
Hx
[
n
]
res
=
self
.
obs
[
n
]
-
self
.
Hx
[
n
]
if
self
.
may_reject
[
n
]:
if
self
.
may_reject
[
n
]:
threshold
=
self
.
rejection_threshold
*
np
.
sqrt
(
self
.
R
[
n
])
threshold
=
self
.
rejection_threshold
*
np
.
sqrt
(
self
.
R
[
n
])
if
np
.
abs
(
res
)
>
threshold
:
#if np.abs(res) > threshold + abs(bias):
logging
.
debug
(
'Rejecting observation (%s,%i) because residual (%f) exceeds threshold (%f)'
%
(
self
.
sitecode
[
n
],
self
.
obs_ids
[
n
],
res
,
threshold
))
if
np
.
abs
(
res_rej
)
>
threshold
:
logging
.
debug
(
'Rejecting observation (%s,%i) because residual (%f) exceeds threshold (%f)'
%
(
self
.
sitecode
[
n
],
self
.
obs_ids
[
n
],
res
,
threshold
+
abs
(
bias
)))
self
.
flags
[
n
]
=
2
self
.
flags
[
n
]
=
2
continue
continue
...
...
da/cosmo/covariances.py
View file @
8ffdbf8c
...
@@ -64,17 +64,29 @@ class CO2StateVector(StateVector):
...
@@ -64,17 +64,29 @@ class CO2StateVector(StateVector):
fullcov
=
np
.
zeros
(
shape
=
(
90
,
90
))
fullcov
=
np
.
zeros
(
shape
=
(
90
,
90
))
# partcov = np.array([ \
# (0.64, 0.36, 0.16, 0.16, 0.16, 0.16, 0.04, 0.04, 0.04, 0.01), \
# (0.36, 0.64, 0.16, 0.16, 0.16, 0.16, 0.04, 0.04, 0.04, 0.01), \
# (0.16, 0.16, 0.64, 0.36, 0.16, 0.16, 0.04, 0.04, 0.04, 0.01), \
# (0.16, 0.16, 0.36, 0.64, 0.16, 0.16, 0.04, 0.04, 0.04, 0.01), \
# (0.16, 0.16, 0.16, 0.16, 0.64, 0.36, 0.04, 0.04, 0.04, 0.01), \
# (0.16, 0.16, 0.16, 0.16, 0.36, 0.64, 0.04, 0.04, 0.04, 0.01), \
# (0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.64, 0.16, 0.16, 0.16), \
#(0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.16, 0.64, 0.16, 0.16), \
# (0.04, 0.04, 0.04, 0.04, 0.04, 0.04, 0.16, 0.16, 0.64, 0.16), \
# (0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.16, 0.16, 0.16, 0.64) ])
partcov
=
np
.
array
([
\
partcov
=
np
.
array
([
\
(
0.
64
,
0.36
,
0.16
,
0.16
,
0.16
,
0.16
,
0.0
4
,
0.0
4
,
0.0
4
,
0.0
1
),
\
(
0.
1089
,
0.0900
,
0.0900
,
0.0900
,
0.0900
,
0.0900
,
0.0
900
,
0.0
900
,
0.0
900
,
0.0
900
),
\
(
0.
36
,
0.64
,
0.16
,
0.16
,
0.16
,
0.16
,
0.0
4
,
0.0
4
,
0.0
4
,
0.0
1
),
\
(
0.
0900
,
0.1089
,
0.0900
,
0.0900
,
0.0900
,
0.0900
,
0.0
900
,
0.0
900
,
0.0
900
,
0.0
900
),
\
(
0.
16
,
0.16
,
0.64
,
0.36
,
0.16
,
0.16
,
0.0
4
,
0.0
4
,
0.0
4
,
0.0
1
),
\
(
0.
0900
,
0.0900
,
0.1089
,
0.0900
,
0.0900
,
0.0900
,
0.0
900
,
0.0
900
,
0.0
900
,
0.0
900
),
\
(
0.
16
,
0.16
,
0.36
,
0.64
,
0.16
,
0.16
,
0.0
4
,
0.0
4
,
0.0
4
,
0.0
1
),
\
(
0.
0900
,
0.0900
,
0.0900
,
0.1089
,
0.0900
,
0.0900
,
0.0
900
,
0.0
900
,
0.0
900
,
0.0
900
),
\
(
0.
16
,
0.16
,
0.16
,
0.16
,
0.64
,
0.36
,
0.0
4
,
0.0
4
,
0.0
4
,
0.0
1
),
\
(
0.
0900
,
0.0900
,
0.0900
,
0.0900
,
0.1089
,
0.0900
,
0.0
900
,
0.0
900
,
0.0
900
,
0.0
900
),
\
(
0.
16
,
0.16
,
0.16
,
0.16
,
0.36
,
0.64
,
0.0
4
,
0.0
4
,
0.0
4
,
0.0
1
),
\
(
0.
0900
,
0.0900
,
0.0900
,
0.0900
,
0.0900
,
0.1089
,
0.0
900
,
0.0
900
,
0.0
900
,
0.0
900
),
\
(
0.0
4
,
0.0
4
,
0.0
4
,
0.0
4
,
0.0
4
,
0.0
4
,
0.
64
,
0.16
,
0.16
,
0.16
),
\
(
0.0
900
,
0.0
900
,
0.0
900
,
0.0
900
,
0.0
900
,
0.0
900
,
0.
1089
,
0.0900
,
0.0900
,
0.0900
),
\
(
0.0
4
,
0.0
4
,
0.0
4
,
0.0
4
,
0.0
4
,
0.0
4
,
0.
16
,
0.64
,
0.16
,
0.16
),
\
(
0.0
900
,
0.0
900
,
0.0
900
,
0.0
900
,
0.0
900
,
0.0
900
,
0.
0900
,
0.1089
,
0.0900
,
0.0900
),
\
(
0.0
4
,
0.0
4
,
0.0
4
,
0.0
4
,
0.0
4
,
0.0
4
,
0.
16
,
0.16
,
0.64
,
0.16
),
\
(
0.0
900
,
0.0
900
,
0.0
900
,
0.0
900
,
0.0
900
,
0.0
900
,
0.
0900
,
0.0900
,
0.1089
,
0.0900
),
\
(
0.0
1
,
0.0
1
,
0.0
1
,
0.0
1
,
0.0
1
,
0.0
1
,
0.
16
,
0.16
,
0.16
,
0.64
)
])
(
0.0
900
,
0.0
900
,
0.0
900
,
0.0
900
,
0.0
900
,
0.0
900
,
0.
0900
,
0.0900
,
0.0900
,
0.1089
)
])
# L = 300 km
# L = 300 km
...
...
da/cosmo/observationoperator_cosmo.py
View file @
8ffdbf8c
...
@@ -5,23 +5,22 @@ import logging
...
@@ -5,23 +5,22 @@ import logging
import
os
import
os
import
sys
import
sys
import
subprocess
import
subprocess
import
csv
import
da.cosmo.io4
as
io
import
da.cosmo.io4
as
io
import
numpy
as
np
import
numpy
as
np
from
netCDF4
import
Dataset
from
netCDF4
import
Dataset
from
scipy
import
interpolate
from
datetime
import
datetime
,
timedelta
from
datetime
import
datetime
,
timedelta
from
dateutil
import
rrule
from
dateutil
import
rrule
from
cdo
import
*
from
.
import
site_height
from
.
import
site_height
from
da.cosmo.icbc4ctdas
import
ct
from
itertools
import
repeat
from
itertools
import
repeat
from
multiprocessing
import
Pool
from
multiprocessing
import
Pool
from
da.tools.general
import
to_datetime
from
da.tools.general
import
to_datetime
import
amrs.misc.transform
as
transform
identifier
=
'ObservationOperator'
identifier
=
'ObservationOperator'
version
=
'10'
version
=
'10'
#cdo = Cdo()
cdo
=
Cdo
(
logging
=
True
,
logFile
=
'cdo_commands.log'
)
################### Begin Class ObservationOperator ###################
################### Begin Class ObservationOperator ###################
class
ObservationOperator
(
object
):
class
ObservationOperator
(
object
):
...
@@ -57,15 +56,19 @@ class ObservationOperator(object):
...
@@ -57,15 +56,19 @@ class ObservationOperator(object):
self
.
simulated_file
=
os
.
path
.
join
(
self
.
outputdir
,
'samples_simulated.%s.nc'
%
self
.
dacycle
[
'time.sample.stamp'
])
self
.
simulated_file
=
os
.
path
.
join
(
self
.
outputdir
,
'samples_simulated.%s.nc'
%
self
.
dacycle
[
'time.sample.stamp'
])
self
.
forecast_nmembers
=
int
(
self
.
dacycle
[
'da.optimizer.nmembers'
])
self
.
forecast_nmembers
=
int
(
self
.
dacycle
[
'da.optimizer.nmembers'
])
def
run
(
self
,
lag
,
dacycle
,
statevector
,
advance
=
False
):
def
run
(
self
,
lag
,
dacycle
,
statevector
,
advance
=
False
):
members
=
statevector
.
ensemble_members
[
lag
]
members
=
statevector
.
ensemble_members
[
lag
]
self
.
forecast_nmembers
=
int
(
self
.
dacycle
[
'da.optimizer.nmembers'
])
self
.
forecast_nmembers
=
int
(
self
.
dacycle
[
'da.optimizer.nmembers'
])
self
.
nparams
=
int
(
self
.
dacycle
[
'nparameters'
])
self
.
nparams
=
int
(
self
.
dacycle
[
'nparameters'
])
absolute_start_time
=
str
((
to_datetime
(
dacycle
[
'abs.time.start'
])).
strftime
(
'%Y%m%d%H'
))
self
.
days
=
int
(
dacycle
[
'time.cycle'
])
absolute_start_time_ch
=
str
((
to_datetime
(
dacycle
[
'abs.time.start'
])).
strftime
(
'%Y-%m-%d'
))
abs_start_time
=
str
((
to_datetime
(
dacycle
[
'abs.time.start'
])).
strftime
(
'%Y%m%d%H'
))
abs_start_time_ch
=
str
((
to_datetime
(
dacycle
[
'abs.time.start'
])).
strftime
(
'%Y-%m-%d'
))
starth
=
abs
((
to_datetime
(
dacycle
[
'abs.time.start'
])
-
dacycle
[
'time.start'
]).
days
)
*
24
starth
=
abs
((
to_datetime
(
dacycle
[
'abs.time.start'
])
-
dacycle
[
'time.start'
]).
days
)
*
24
endh
=
abs
((
to_datetime
(
dacycle
[
'abs.time.start'
])
-
dacycle
[
'time.finish'
]).
days
)
*
24
endh
=
abs
((
to_datetime
(
dacycle
[
'abs.time.start'
])
-
dacycle
[
'time.finish'
]).
days
)
*
24
start
=
dacycle
[
'time.start'
]
end
=
dacycle
[
'time.finish'
]
f
=
io
.
CT_CDF
(
self
.
simulated_file
,
method
=
'create'
)
f
=
io
.
CT_CDF
(
self
.
simulated_file
,
method
=
'create'
)
logging
.
debug
(
'Creating new simulated observation file in ObservationOperator (%s)'
%
self
.
simulated_file
)
logging
.
debug
(
'Creating new simulated observation file in ObservationOperator (%s)'
%
self
.
simulated_file
)
...
@@ -116,36 +119,41 @@ class ObservationOperator(object):
...
@@ -116,36 +119,41 @@ class ObservationOperator(object):
for
m
in
range
(
0
,
self
.
forecast_nmembers
):
for
m
in
range
(
0
,
self
.
forecast_nmembers
):
co2
[
m
,:]
=
members
[
m
].
param_values
co2
[
m
,:]
=
members
[
m
].
param_values
# co2[co2<0] = 0.
l
[:]
=
co2
l
[:]
=
co2
ofile
.
close
()
ofile
.
close
()
os
.
system
(
'cp '
+
self
.
lambda_file
+
' '
+
dacycle
[
'da.vprm'
]
+
'/lambdas.nc'
)
os
.
system
(
'cp '
+
self
.
lambda_file
+
' '
+
dacycle
[
'da.vprm'
]
+
'/lambdas.nc'
)
os
.
chdir
(
dacycle
[
'da.obsoperator.home'
])
os
.
chdir
(
dacycle
[
'da.obsoperator.home'
])
if
os
.
path
.
exists
(
dacycle
[
'dir.da_run'
]
+
'/'
+
absolute_start_time
+
"_"
+
str
(
starth
+
lag
*
168
)
+
"_"
+
str
(
endh
+
lag
*
168
)
+
"/cosmo/output/"
):
logging
.
info
(
'Starting COSMO'
)
if
os
.
path
.
exists
(
dacycle
[
'dir.da_run'
]
+
"/non_opt_"
+
absolute_start_time
+
"_"
+
str
(
starth
+
lag
*
168
)
+
"_"
+
str
(
endh
+
lag
*
168
)
+
"/cosmo/output/"
):
os
.
rename
(
dacycle
[
'dir.da_run'
]
+
"/"
+
absolute_start_time
+
"_"
+
str
(
starth
+
lag
*
168
)
+
"_"
+
str
(
endh
+
lag
*
168
),
dacycle
[
'dir.da_run'
]
+
"/old_non_opt_"
+
dacycle
[
'time.start'
].
strftime
(
'%Y%m%d%H'
)
+
"_"
+
str
(
starth
+
lag
*
168
)
+
"_"
+
str
(
endh
+
lag
*
168
))
os
.
system
(
'python run_chain.py '
+
self
.
dacycle
[
'run.name'
]
+
' '
+
abs_start_time_ch
+
' '
+
str
(
starth
+
lag
*
168
)
+
' '
+
str
(
endh
+
lag
*
168
)
+
' -j meteo icbc int2lm post_int2lm oae octe online_vprm cosmo'
)
else
:
os
.
rename
(
dacycle
[
'dir.da_run'
]
+
"/"
+
absolute_start_time
+
"_"
+
str
(
starth
+
lag
*
168
)
+
"_"
+
str
(
endh
+
lag
*
168
),
dacycle
[
'dir.da_run'
]
+
"/non_opt_"
+
dacycle
[
'time.start'
].
strftime
(
'%Y%m%d%H'
)
+
"_"
+
str
(
starth
+
lag
*
168
)
+
"_"
+
str
(
endh
+
lag
*
168
))
os
.
system
(
'python run_chain.py '
+
self
.
dacycle
[
'run.name'
]
+
' '
+
absolute_start_time_ch
+
' '
+
str
(
starth
+
lag
*
168
)
+
' '
+
str
(
endh
+
lag
*
168
)
+
' -j meteo icbc int2lm post_int2lm oae octe online_vprm cosmo'
)
logging
.
info
(
'COSMO done!'
)
logging
.
info
(
'COSMO done!'
)
os
.
chdir
(
dacycle
[
'dir.da_run'
])
# Here the extraction of COSMO output starts
dicts
=
self
.
read_csv
(
dacycle
)
rlat
,
rlon
,
dicts
,
path_in
=
self
.
get_hhl_data
(
dacycle
,
lag
,
'lffd'
+
abs_start_time
+
'c.nc'
,
dicts
,
starth
,
endh
)
logging
.
info
(
'Starting parallel extraction \m/'
)
args
=
[
args
=
[
(
dacycle
,
starth
+
168
*
lag
,
endh
+
168
*
lag
-
1
,
n
)
(
dacycle
,
dacycle
[
'time.sample.start'
]
+
timedelta
(
hours
=
24
*
n
),
dicts
,
rlat
,
rlon
,
path_i
n
)
for
n
in
range
(
1
,
self
.
forecast_nmembers
+
1
)
for
n
in
range
(
self
.
days
)
]
]
with
Pool
(
self
.
forecast_nmembers
)
as
pool
:
with
Pool
(
self
.
days
)
as
pool
:
pool
.
starmap
(
self
.
extract_model_data
,
args
)
pool
.
starmap
(
self
.
get_cosmo_data
,
args
)
logging
.
info
(
'Finished parallel extraction \m/'
)
self
.
cat_cosmo_data
(
advance
,
dacycle
)
for
i
in
range
(
0
,
self
.
forecast_nmembers
):
for
i
in
range
(
0
,
self
.
forecast_nmembers
):
idx
=
str
(
i
+
1
).
zfill
(
3
)
idx
=
str
(
i
+
1
).
zfill
(
3
)
cosmo_file
=
os
.
path
.
join
(
'/store/empa/em05/parsenov/cosmo_data/model
_'
+
idx
+
'_%s.nc'
%
dacycle
[
'time.sample.stamp'
])
cosmo_file
=
os
.
path
.
join
(
self
.
dacycle
[
'dir.ct_save'
],
'Hx
_'
+
idx
+
'_%s.nc'
%
dacycle
[
'time.sample.stamp'
])
ifile
=
Dataset
(
cosmo_file
,
mode
=
'r'
)
ifile
=
Dataset
(
cosmo_file
,
mode
=
'r'
)
model_data
[
i
,:]
=
(
np
.
squeeze
(
ifile
.
variables
[
'CO2'
][:])
*
29.
/
44.01
)
*
1E6
# in ppm
model_data
[
i
,:]
=
np
.
squeeze
(
ifile
.
variables
[
'CO2'
][:])
ifile
.
close
()
ifile
.
close
()
for
j
,
data
in
enumerate
(
zip
(
ids
,
obs
,
mdm
)):
for
j
,
data
in
enumerate
(
zip
(
ids
,
obs
,
mdm
)):
...
@@ -159,195 +167,261 @@ class ObservationOperator(object):
...
@@ -159,195 +167,261 @@ class ObservationOperator(object):
self
.
prepare_run
()
self
.
prepare_run
()
self
.
run
(
lag
,
dacycle
,
statevector
,
advance
)
self
.
run
(
lag
,
dacycle
,
statevector
,
advance
)
def
extract_model_data
(
self
,
dacycle
,
hstart
,
hstop
,
ensnum
):
def
read_csv
(
self
,
dacycle
):
"""Reads csv file where information about stations is written"""
self
.
dacycle
=
dacycle
ensnum
=
int
(
dacycle
[
'da.optimizer.nmembers'
])
time_stamp
=
dacycle
[
'time.sample.stamp'
]
csvfile
=
dacycle
[
'locations'
]
dicts
=
[]
with
open
(
csvfile
)
as
csv_file
:
csv_reader
=
csv
.
reader
(
csv_file
,
delimiter
=
','
)
for
row
in
csv_reader
:
for
e
in
range
(
ensnum
):
e
=
str
(
e
+
1
).
zfill
(
3
)
dicts
=
np
.
append
(
dicts
,
{
'ensnum'
:
e
,
'name'
:
row
[
1
],
'lon'
:
row
[
2
],
'lat'
:
row
[
3
],
\
'rlon'
:
None
,
'rlat'
:
None
,
'h1'
:
None
,
'h2'
:
None
,
\
'hidx1'
:
None
,
'hidx2'
:
None
,
\
'alt'
:
float
(
row
[
4
])
+
float
(
row
[
5
]),
'time'
:[],
'co2'
:[],
\
'co2_bg'
:[],
'co2_gpp'
:[],
'co2_ra'
:[],
'co2_a'
:[]})
return
dicts
def
get_hhl_data
(
self
,
dacycle
,
lag
,
ncc
,
dicts
,
starth
,
endh
):
abs_start_time
=
str
((
to_datetime
(
dacycle
[
'abs.time.start'
])).
strftime
(
'%Y%m%d%H'
))
abs_start_time
=
str
((
to_datetime
(
dacycle
[
'abs.time.start'
])).
strftime
(
'%Y%m%d%H'
))
path_in
=
os
.
path
.
join
(
dacycle
[
'dir.da_run'
],
abs_start_time
+
'_'
+
str
(
starth
+
lag
*
168
)
+
'_'
+
str
(
endh
+
lag
*
168
),
"cosmo/output/"
)
hhl
=
np
.
empty
(
shape
=
(
60
))
hhl60
=
np
.
empty
(
shape
=
(
60
,
300
,
450
))
with
Dataset
(
path_in
+
ncc
)
as
nc1
:
rotpole
=
nc1
.
variables
[
'rotated_pole'
]
pollon
=
rotpole
.
getncattr
(
'grid_north_pole_longitude'
)
pollat
=
rotpole
.
getncattr
(
'grid_north_pole_latitude'
)
rlat
=
nc1
.
variables
[
'rlat'
][:]
rlon
=
nc1
.
variables
[
'rlon'
][:]
hhl_3d
=
np
.
squeeze
(
nc1
.
variables
[
'HHL'
][:])
for
h
in
range
(
0
,
60
):
hhl60
[
h
,
:,
:]
=
(
hhl_3d
[
h
,
:,
:]
+
hhl_3d
[
h
+
1
,
:,
:])
/
2.
for
station
in
dicts
:
myrlon
,
myrlat
=
transform
.
rotpole2wgs
(
float
(
station
[
'lon'
]),
\
float
(
station
[
'lat'
]),
\
pollon
,
pollat
,
inverse
=
True
)
for
h
in
range
(
0
,
60
):
hhl
[
h
]
=
interpolate
.
interpn
((
rlat
,
rlon
),
hhl60
[
h
,:,:],
[
myrlat
,
myrlon
],
method
=
'linear'
)
if
float
(
station
[
'alt'
])
<
hhl
[
59
]:
station
[
'h1'
]
=
hhl
[
59
]
station
[
'h2'
]
=
hhl
[
59
]
station
[
'hidx1'
]
=
59
station
[
'hidx2'
]
=
59
station
[
'alt'
]
=
hhl
[
59
]
cosmo_out
=
dacycle
[
'dir.da_run'
]
+
"/"
+
abs_start_time
+
"_"
+
str
(
hstart
)
+
"_"
+
str
(
hstop
+
1
)
+
"/cosmo/output/"
else
:
hhl_cosmo_out
=
dacycle
[
'dir.da_run'
]
+
"/"
+
abs_start_time
+
"_0_168/cosmo/output/"
for
l
,
ll
in
enumerate
(
hhl
):
cosmo_save
=
"/store/empa/em05/parsenov/cosmo_data/"
if
float
(
station
[
'alt'
])
<
ll
:
hhl_fn
=
hhl_cosmo_out
+
'lffd'
+
abs_start_time
+
'c.nc'
station
[
'h1'
]
=
hhl
[
l
]
station
[
'h2'
]
=
hhl
[
l
+
1
]
ens
=
str
(
ensnum
).
zfill
(
3
)
station
[
'hidx1'
]
=
l
station
[
'hidx2'
]
=
l
+
1
files2cat_albs
=
[]
# The following line: we interpolate on the middle of alt - lower level
files2cat_bntg
=
[]
station
[
'alt'
]
=
float
(
station
[
'alt'
])
-
(
float
(
station
[
'alt'
])
-
hhl
[
l
+
1
])
/
2.
files2cat_brm
=
[]
files2cat_chri
=
[]
station
[
'rlon'
]
=
myrlon
files2cat_due1
=
[]
station
[
'rlat'
]
=
myrlat
files2cat_esmo
=
[]
files2cat_frob
=
[]
return
rlat
,
rlon
,
dicts
,
path_in
files2cat_gimm
=
[]
files2cat_hae
=
[]
def
get_cosmo_data
(
self
,
dacycle
,
date_begin
,
dicts
,
rlat
,
rlon
,
path_in
):
files2cat_laeg
=
[]
hours
=
[
'12'
,
'13'
,
'14'
,
'15'
]
files2cat_magn
=
[]
qv_int
=
np
.
empty
(
shape
=
(
60
))
files2cat_payn
=
[]
co2_bg_int
=
np
.
empty
(
shape
=
(
60
))
files2cat_reck
=
[]
co2_gpp_int
=
np
.
empty
(
shape
=
(
60
))
files2cat_rig
=
[]
co2_ra_int
=
np
.
empty
(
shape
=
(
60
))
files2cat_save
=
[]
co2_a_int
=
np
.
empty
(
shape
=
(
60
))
files2cat_semp
=
[]
qv_int
=
np
.
empty
(
shape
=
(
60
))
files2cat_sott
=
[]
files2cat_ssal
=
[]
co2_bg_daily
=
[]
files2cat_taen
=
[]
co2_gpp_daily
=
[]
files2cat_zhbr
=
[]
co2_ra_daily
=
[]
files2cat_zsch
=
[]
co2_a_daily
=
[]
files2cat_zue
=
[]
co2_daily
=
[]
if
ens
==
"001"
:
for
hrs
in
hours
:
cdo
.
selname
(
"HHL"
,
input
=
hhl_fn
,
output
=
cosmo_out
+
"hhl.nc"
)
with
Dataset
(
path_in
+
'lffd'
+
date_begin
.
strftime
(
"%Y%m%d"
)
+
hrs
+
'.nc'
)
as
nc2
:
cdo
.
remapnn
(
"lon=8.51_lat=47.31,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_albs.nc"
)
qv
=
np
.
squeeze
(
nc2
.
variables
[
'QV'
][:])
cdo
.
remapnn
(
"lon=7.53_lat=46.98,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_bntg.nc"
)
co2_a
=
np
.
squeeze
(
nc2
.
variables
[
'CO2_A'
][:])
cdo
.
remapnn
(
"lon=8.18_lat=47.19,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_brm.nc"
)
for
station
in
dicts
:
cdo
.
remapnn
(
"lon=7.69_lat=47.57,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_chri.nc"
)
myrlat
=
station
[
'rlat'
]
cdo
.
remapnn
(
"lon=8.61_lat=47.40,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_due1.nc"
)
myrlon
=
station
[
'rlon'
]
cdo
.
remapnn
(
"lon=8.57_lat=47.52,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_esmo.nc"
)
e
=
station
[
'ensnum'
]
cdo
.
remapnn
(
"lon=7.90_lat=47.38,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_frob.nc"
)
h1
=
station
[
'h1'
]
cdo
.
remapnn
(
"lon=7.25_lat=47.05,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_gimm.nc"
)
h2
=
station
[
'h2'
]
cdo
.
remapnn
(
"lon=7.82_lat=47.31,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_hae.nc"
)
i1
=
station
[
'hidx1'
]
cdo
.
remapnn
(
"lon=8.40_lat=47.48,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_laeg.nc"
)
i2
=
station
[
'hidx2'
]
cdo
.
remapnn
(
"lon=8.93_lat=46.16,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_magn.nc"
)
cdo
.
remapnn
(
"lon=6.94_lat=46.81,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_payn.nc"
)
co2_bg
=
np
.
squeeze
(
nc2
.
variables
[
'CO2_BG'
+
e
][:])
cdo
.
remapnn
(
"lon=8.52_lat=47.43,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_reck.nc"
)
co2_gpp
=
np
.
squeeze
(
nc2
.
variables
[
'CO2_GPP'
+
e
][:])
cdo
.
remapnn
(
"lon=8.46_lat=47.07,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_rig.nc"
)
co2_ra
=
np
.
squeeze
(
nc2
.
variables
[
'CO2_RA'
+
e
][:])
cdo
.
remapnn
(
"lon=7.36_lat=46.24,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_save.nc"
)
cdo
.
remapnn
(
"lon=8.21_lat=47.12,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_semp.nc"
)
for
h
in
range
(
60
):
cdo
.
remapnn
(
"lon=6.74_lat=46.66,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_sott.nc"
)
qv_int
[
h
]
=
interpolate
.
interpn
((
rlat
,
rlon
),
qv
[
h
,:,:],
[
myrlat
,
myrlon
],
method
=
'linear'
)
cdo
.
remapnn
(
"lon=8.95_lat=45.98,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_ssal.nc"
)
co2_bg_int
[
h
]
=
interpolate
.
interpn
((
rlat
,
rlon
),
co2_bg
[
h
,:,:],
[
myrlat
,
myrlon
],
method
=
'linear'
)
cdo
.
remapnn
(
"lon=8.90_lat=47.48,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_taen.nc"
)
co2_gpp_int
[
h
]
=
interpolate
.
interpn
((
rlat
,
rlon
),
co2_gpp
[
h
,:,:],
[
myrlat
,
myrlon
],
method
=
'linear'
)
cdo
.
remapnn
(
"lon=8.57_lat=47.38,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_zhbr.nc"
)
co2_ra_int
[
h
]
=
interpolate
.
interpn
((
rlat
,
rlon
),
co2_ra
[
h
,:,:],
[
myrlat
,
myrlon
],
method
=
'linear'
)
cdo
.
remapnn
(
"lon=8.52_lat=47.37,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_zsch.nc"
)
co2_a_int
[
h
]
=
interpolate
.
interpn
((
rlat
,
rlon
),
co2_a
[
h
,:,:],
[
myrlat
,
myrlon
],
method
=
'linear'
)
cdo
.
remapnn
(
"lon=8.53_lat=47.38,"
,
input
=
cosmo_out
+
"hhl.nc"
,
output
=
cosmo_out
+
"hhl_zue.nc"
)
co2_bg1
=
co2_bg_int
[
i1
]
for
dt
in
rrule
.
rrule
(
rrule
.
HOURLY
,
dtstart
=
to_datetime
(
dacycle
[
'abs.time.start'
])
+
timedelta
(
hours
=
hstart
),
until
=
to_datetime
(
dacycle
[
'abs.time.start'
])
+
timedelta
(
hours
=
hstop
)):
co2_bg2
=
co2_bg_int
[
i2
]
dt
=
dt
.
strftime
(
'%Y%m%d%H'
)
co2_gpp1
=
co2_gpp_int
[
i1
]
if
ens
==
"001"
:
co2_gpp2
=
co2_gpp_int
[
i2
]
logging
.
info
(
'Extracting output for time %s'
%
(
str
(
dt
)))
co2_ra1
=
co2_ra_int
[
i1
]
co2_in_fn
=
cosmo_out
+
'lffd'
+
dt
+
'.nc'
co2_ra2
=
co2_ra_int
[
i2
]
co2_a1
=
co2_a_int
[
i1
]
co2_out_albs
=
cosmo_out
+
'CO2_albs_'
+
ens
+
'_'
+
dt
+
'.nc'
co2_a2
=
co2_a_int
[
i2
]
co2_out_bntg
=
cosmo_out
+
'CO2_bntg_'
+
ens
+
'_'
+
dt
+
'.nc'
co2_a2
=
co2_a_int
[
i2
]
co2_out_brm
=
cosmo_out
+
'CO2_brm_'
+
ens
+
'_'
+
dt
+
'.nc'
qv1
=
qv_int
[
i1
]
co2_out_chri
=
cosmo_out
+
'CO2_chri_'
+
ens
+
'_'
+
dt
+
'.nc'
qv2
=
qv_int
[
i2
]
co2_out_due1
=
cosmo_out
+
'CO2_due1_'
+
ens
+
'_'
+
dt
+
'.nc'
if
h1
==
h2
:
co2_out_esmo
=
cosmo_out
+
'CO2_esmo_'
+
ens
+
'_'
+
dt
+
'.nc'
co2_bg_final
=
co2_bg1
co2_out_frob
=
cosmo_out
+
'CO2_frob_'
+
ens
+
'_'
+
dt
+
'.nc'
co2_gpp_final
=
co2_gpp1
co2_out_gimm
=
cosmo_out
+
'CO2_gimm_'
+
ens
+
'_'
+
dt
+
'.nc'
co2_ra_final
=
co2_ra1
co2_out_hae
=
cosmo_out
+
'CO2_hae_'
+
ens
+
'_'
+
dt
+
'.nc'
co2_a_final
=
co2_a1
co2_out_laeg
=
cosmo_out
+
'CO2_laeg_'
+
ens
+
'_'
+
dt
+
'.nc'
qv_final
=
qv1
co2_out_magn
=
cosmo_out
+
'CO2_magn_'
+
ens
+
'_'
+
dt
+
'.nc'
else
:
co2_out_payn
=
cosmo_out
+
'CO2_payn_'
+
ens
+
'_'
+
dt
+
'.nc'
co2_bg_final
=
co2_bg1
+
(
float
(
station
[
'alt'
])
-
h1
)
*
(
co2_bg2
-
co2_bg1
)
/
(
h2
-
h1
)
co2_out_reck
=
cosmo_out
+
'CO2_reck_'
+
ens
+
'_'
+
dt
+
'.nc'
co2_gpp_final
=
co2_gpp1
+
(
float
(
station
[
'alt'
])
-
h1
)
*
(
co2_gpp2
-
co2_gpp1
)
/
(
h2
-
h1
)
co2_out_rig
=
cosmo_out
+
'CO2_rig_'
+
ens
+
'_'
+
dt
+
'.nc'
co2_ra_final
=
co2_ra1
+
(
float
(
station
[
'alt'
])
-
h1
)
*
(
co2_ra2
-
co2_ra1
)
/
(
h2
-
h1
)
co2_out_save
=
cosmo_out
+
'CO2_save_'
+
ens
+
'_'
+
dt
+
'.nc'
co2_a_final
=
co2_a1
+
(
float
(
station
[
'alt'
])
-
h1
)
*
(
co2_a2
-
co2_a1
)
/
(
h2
-
h1
)
co2_out_semp
=
cosmo_out
+
'CO2_semp_'
+
ens
+
'_'
+
dt
+
'.nc'
qv_final
=
qv1
+
(
float
(
station
[
'alt'
])
-
h1
)
*
(
qv2
-
qv1
)
/
(
h2
-
h1
)
co2_out_sott
=
cosmo_out
+
'CO2_sott_'
+
ens
+
'_'
+
dt
+
'.nc'
co2_out_ssal
=
cosmo_out
+
'CO2_ssal_'
+
ens
+
'_'
+
dt
+
'.nc'
kgkg2ppm
=
658941.149738696
/
(
1
-
qv_final
)
co2_out_taen
=
cosmo_out
+
'CO2_taen_'
+
ens
+
'_'
+
dt
+
'.nc'
co2_out_zhbr
=
cosmo_out
+
'CO2_zhbr_'
+
ens
+
'_'
+
dt
+
'.nc'
co2_bg_final
=
kgkg2ppm
*
co2_bg_final
co2_out_zsch
=
cosmo_out
+
'CO2_zsch_'
+
ens
+
'_'
+
dt
+
'.nc'
co2_gpp_final
=
kgkg2ppm
*
co2_gpp_final
co2_out_zue
=
cosmo_out
+
'CO2_zue_'
+
ens
+
'_'
+
dt
+
'.nc'
co2_ra_final
=
kgkg2ppm
*
co2_ra_final
co2_a_final
=
kgkg2ppm
*
co2_a_final
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=8.51_lat=47.31 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_albs
)
co2_final
=
co2_bg_final
-
co2_gpp_final
+
co2_ra_final
+
co2_a_final
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=7.53_lat=46.98 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_bntg
)
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=8.18_lat=47.19 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_brm
)
station
[
'co2_bg'
].
append
(
co2_bg_final
)
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=7.69_lat=47.57 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_chri
)
station
[
'co2_gpp'
].
append
(
co2_gpp_final
)
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=8.61_lat=47.40 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_due1
)
station
[
'co2_ra'
].
append
(
co2_ra_final
)
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=8.57_lat=47.52 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_esmo
)
station
[
'co2_a'
].
append
(
co2_a_final
)
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=7.90_lat=47.38 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_frob
)
station
[
'co2'
].
append
(
co2_final
)
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=7.25_lat=47.05 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_gimm
)
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=7.82_lat=47.31 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_hae
)
for
station
in
dicts
:
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=8.40_lat=47.48 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_laeg
)
station
[
'co2_bg'
]
=
np
.
mean
(
np
.
asarray
(
station
[
'co2_bg'
]))
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=8.93_lat=46.16 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_magn
)
station
[
'co2_gpp'
]
=
np
.
mean
(
np
.
asarray
(
station
[
'co2_gpp'
]))
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=6.94_lat=46.81 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_payn
)
station
[
'co2_ra'
]
=
np
.
mean
(
np
.
asarray
(
station
[
'co2_ra'
]))
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=8.52_lat=47.43 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_reck
)
station
[
'co2_a'
]
=
np
.
mean
(
np
.
asarray
(
station
[
'co2_a'
]))
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=8.46_lat=47.07 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_rig
)
station
[
'co2'
]
=
np
.
mean
(
np
.
asarray
(
station
[
'co2'
]))
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=7.36_lat=46.24 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_save
)
station
[
'time'
].
append
((
date_begin
-
datetime
(
1970
,
1
,
1
)).
total_seconds
())
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=8.21_lat=47.12 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_semp
)
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=6.74_lat=46.66 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_sott
)
self
.
write_cosmo_data
(
dacycle
,
dicts
,
date_begin
)
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=8.95_lat=45.98 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_ssal
)
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=8.90_lat=47.48 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_taen
)
def
write_cosmo_data
(
self
,
dacycle
,
dicts
,
date
):
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=8.57_lat=47.38 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_zhbr
)
co2_all
=
[]
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=8.52_lat=47.37 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_zsch
)
time_all
=
[]
cdo
.
expr
(
"'CO2=(CO2_BG"
+
ens
+
"-CO2_GPP"
+
ens
+
"+CO2_RA"
+
ens
+
"+CO2_A)/(1.-QV)'"
,
input
=
"-remapnn,lon=8.53_lat=47.38 -selname,QV,CO2_BG"
+
ens
+
",CO2_GPP"
+
ens
+
",CO2_RA"
+
ens
+
",CO2_A "
+
co2_in_fn
,
output
=
co2_out_zue
)
date
=
date
.
strftime
(
"%Y%m%d"
)
for
station
in
dicts
:
files2cat_albs
.
append
(
co2_out_albs
)
e
=
station
[
'ensnum'
]