Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
C
CTDAS
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Deploy
Releases
Package registry
Container registry
Model registry
Operate
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
NearRealTimeCTDAS
CTDAS
Commits
d6b4742c
Commit
d6b4742c
authored
6 years ago
by
brunner
Browse files
Options
Downloads
Patches
Plain Diff
octe ok
parent
449d1ffb
Branches
Branches containing commit
No related tags found
No related merge requests found
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
da/cosmo/observationoperator.py
+8
-4
8 additions, 4 deletions
da/cosmo/observationoperator.py
da/cosmo/observationoperator_octe.py
+7
-7
7 additions, 7 deletions
da/cosmo/observationoperator_octe.py
octe.rc
+144
-0
144 additions, 0 deletions
octe.rc
template.py
+2
-1
2 additions, 1 deletion
template.py
with
161 additions
and
12 deletions
da/cosmo/observationoperator.py
+
8
−
4
View file @
d6b4742c
...
...
@@ -127,12 +127,16 @@ class ObservationOperator(object):
# for ncfile in ncfilelist:
# infile = os.path.join(ncfile + '.nc')
self
.
lambda_file
=
os
.
path
.
join
(
self
.
outputdir
,
'
lambda.%s.nc
'
%
self
.
dacycle
[
'
time.sample.stamp
'
])
self
.
lambda_file
=
os
.
path
.
join
(
'
/store/empa/em05/parsenov/cosmo_input/online_vprm
'
,
'
lambdas.nc
'
)
#self.lambda_file = os.path.join(self.outputdir, 'lambda.%s.nc' % self.dacycle['time.sample.stamp'])
ofile
=
Dataset
(
self
.
lambda_file
,
mode
=
'
w
'
)
opar
=
ofile
.
createDimension
(
'
param
'
,
self
.
nparams
)
omem
=
ofile
.
createDimension
(
'
member
'
,
self
.
forecast_nmembers
)
#len(members.nmembers))
opar
=
ofile
.
createDimension
(
'
nparam
'
,
self
.
nparams
)
#opar = ofile.createDimension('param', self.nparams)
omem
=
ofile
.
createDimension
(
'
nensembles
'
,
self
.
forecast_nmembers
)
#len(members.nmembers))
#omem = ofile.createDimension('member', self.forecast_nmembers)#len(members.nmembers))
l
=
ofile
.
createVariable
(
'
lambda
'
,
np
.
float32
,
(
'
member
'
,
'
param
'
),
fill_value
=-
999.99
)
#l = ofile.createVariable('lambda', np.float32, ('member','param'),fill_value=-999.99)
l
=
ofile
.
createVariable
(
'
lambda
'
,
np
.
float32
,
(
'
nensembles
'
,
'
nparam
'
),
fill_value
=-
999.99
)
co2
=
np
.
empty
(
shape
=
(
self
.
forecast_nmembers
,
self
.
nparams
))
for
m
in
range
(
0
,
20
):
...
...
This diff is collapsed.
Click to expand it.
da/cosmo/observationoperator_octe.py
+
7
−
7
View file @
d6b4742c
...
...
@@ -93,7 +93,7 @@ class ObservationOperator(object):
# Open file with x,y,z,t of model samples that need to be sampled
f_in
=
io
.
ct_read
(
self
.
dacycle
[
'
ObsOperator.inputfile
'
],
method
=
'
read
'
)
f_in
=
io
.
ct_read
(
self
.
dacycle
[
'
ObsOperator.inputfile
'
],
method
=
'
read
'
)
# Get simulated values and ID
...
...
@@ -133,7 +133,7 @@ class ObservationOperator(object):
args
=
[
(
dacycle
,
starth
+
168
*
lag
,
endh
+
168
*
lag
-
1
,
n
)
for
n
in
range
(
0
,
self
.
forecast_nmembers
)
for
n
in
range
(
1
,
self
.
forecast_nmembers
+
1
)
]
with
Pool
(
self
.
forecast_nmembers
)
as
pool
:
...
...
@@ -174,7 +174,7 @@ class ObservationOperator(object):
files2cat_brm
=
[]
files2cat_ssl
=
[]
if
ens
==
"
00
0
"
:
if
ens
==
"
00
1
"
:
cdo
.
selname
(
"
HHL
"
,
input
=
hhl_fn
,
output
=
cosmo_out
+
"
hhl.nc
"
)
cdo
.
remapnn
(
"
lon=7.99_lat=46.54,
"
,
input
=
cosmo_out
+
"
hhl.nc
"
,
output
=
cosmo_out
+
"
hhl_jfj.nc
"
)
cdo
.
remapnn
(
"
lon=8.40_lat=47.48,
"
,
input
=
cosmo_out
+
"
hhl.nc
"
,
output
=
cosmo_out
+
"
hhl_lhw.nc
"
)
...
...
@@ -189,10 +189,10 @@ class ObservationOperator(object):
co2_out_lhw
=
cosmo_out
+
'
CO2_lhw_
'
+
ens
+
'
_
'
+
dt
+
'
.nc
'
co2_out_brm
=
cosmo_out
+
'
CO2_brm_
'
+
ens
+
'
_
'
+
dt
+
'
.nc
'
co2_out_ssl
=
cosmo_out
+
'
CO2_ssl_
'
+
ens
+
'
_
'
+
dt
+
'
.nc
'
cdo
.
expr
(
"'
CO2=(BG
_
"
+
ens
+
"
-GPP
_
"
+
ens
+
"
+
RESP_
"
+
ens
+
"
+CO2_A_CH+CO2_A)/(1.-QV)
'"
,
input
=
"
-remapnn,lon=7.99_lat=46.54 -selname,QV,BG
_
"
+
ens
+
"
,GPP
_
"
+
ens
+
"
,
RESP_
"
+
ens
+
"
,CO2_A_CH,CO2_A
"
+
co2_in_fn
,
output
=
co2_out_jfj
)
cdo
.
expr
(
"'
CO2=(BG
_
"
+
ens
+
"
-GPP
_
"
+
ens
+
"
+
RESP_
"
+
ens
+
"
+CO2_A_CH+CO2_A)/(1.-QV)
'"
,
input
=
"
-remapnn,lon=8.40_lat=47.48 -selname,QV,BG
_
"
+
ens
+
"
,GPP
_
"
+
ens
+
"
,
RESP_
"
+
ens
+
"
,CO2_A_CH,CO2_A
"
+
co2_in_fn
,
output
=
co2_out_lhw
)
cdo
.
expr
(
"'
CO2=(BG
_
"
+
ens
+
"
-GPP
_
"
+
ens
+
"
+
RESP_
"
+
ens
+
"
+CO2_A_CH+CO2_A)/(1.-QV)
'"
,
input
=
"
-remapnn,lon=8.18_lat=47.19 -selname,QV,BG
_
"
+
ens
+
"
,GPP
_
"
+
ens
+
"
,
RESP_
"
+
ens
+
"
,CO2_A_CH,CO2_A
"
+
co2_in_fn
,
output
=
co2_out_brm
)
cdo
.
expr
(
"'
CO2=(BG
_
"
+
ens
+
"
-GPP
_
"
+
ens
+
"
+
RESP_
"
+
ens
+
"
+CO2_A_CH+CO2_A)/(1.-QV)
'"
,
input
=
"
-remapnn,lon=7.92_lat=47.92 -selname,QV,BG
_
"
+
ens
+
"
,GPP
_
"
+
ens
+
"
,
RESP_
"
+
ens
+
"
,CO2_A_CH,CO2_A
"
+
co2_in_fn
,
output
=
co2_out_ssl
)
cdo
.
expr
(
"'
CO2=(
CO2_
BG
"
+
ens
+
"
-
CO2_
GPP
"
+
ens
+
"
+
CO2_RA
"
+
ens
+
"
+CO2_A_CH+CO2_A)/(1.-QV)
'"
,
input
=
"
-remapnn,lon=7.99_lat=46.54 -selname,QV,
CO2_
BG
"
+
ens
+
"
,
CO2_
GPP
"
+
ens
+
"
,
CO2_RA
"
+
ens
+
"
,CO2_A_CH,CO2_A
"
+
co2_in_fn
,
output
=
co2_out_jfj
)
cdo
.
expr
(
"'
CO2=(
CO2_
BG
"
+
ens
+
"
-
CO2_
GPP
"
+
ens
+
"
+
CO2_RA
"
+
ens
+
"
+CO2_A_CH+CO2_A)/(1.-QV)
'"
,
input
=
"
-remapnn,lon=8.40_lat=47.48 -selname,QV,
CO2_
BG
"
+
ens
+
"
,
CO2_
GPP
"
+
ens
+
"
,
CO2_RA
"
+
ens
+
"
,CO2_A_CH,CO2_A
"
+
co2_in_fn
,
output
=
co2_out_lhw
)
cdo
.
expr
(
"'
CO2=(
CO2_
BG
"
+
ens
+
"
-
CO2_
GPP
"
+
ens
+
"
+
CO2_RA
"
+
ens
+
"
+CO2_A_CH+CO2_A)/(1.-QV)
'"
,
input
=
"
-remapnn,lon=8.18_lat=47.19 -selname,QV,
CO2_
BG
"
+
ens
+
"
,
CO2_
GPP
"
+
ens
+
"
,
CO2_RA
"
+
ens
+
"
,CO2_A_CH,CO2_A
"
+
co2_in_fn
,
output
=
co2_out_brm
)
cdo
.
expr
(
"'
CO2=(
CO2_
BG
"
+
ens
+
"
-
CO2_
GPP
"
+
ens
+
"
+
CO2_RA
"
+
ens
+
"
+CO2_A_CH+CO2_A)/(1.-QV)
'"
,
input
=
"
-remapnn,lon=7.92_lat=47.92 -selname,QV,
CO2_
BG
"
+
ens
+
"
,
CO2_
GPP
"
+
ens
+
"
,
CO2_RA
"
+
ens
+
"
,CO2_A_CH,CO2_A
"
+
co2_in_fn
,
output
=
co2_out_ssl
)
files2cat_jfj
.
append
(
co2_out_jfj
)
files2cat_lhw
.
append
(
co2_out_lhw
)
files2cat_brm
.
append
(
co2_out_brm
)
...
...
This diff is collapsed.
Click to expand it.
octe.rc
0 → 100644
+
144
−
0
View file @
d6b4742c
! CarbonTracker Data Assimilation Shell (CTDAS) Copyright (C) 2017 Wouter Peters.
! Users are recommended to contact the developers (wouter.peters@wur.nl) to receive
! updates of the code. See also: http://www.carbontracker.eu.
!
! This program is free software: you can redistribute it and/or modify it under the
! terms of the GNU General Public License as published by the Free Software Foundation,
! version 3. This program is distributed in the hope that it will be useful, but
! WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
! FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
!
! You should have received a copy of the GNU General Public License along with this
! program. If not, see <http://www.gnu.org/licenses/>.
! author: Wouter Peters
!
! This is a blueprint for an rc-file used in CTDAS. Feel free to modify it, and please go to the main webpage for further documentation.
!
! Note that rc-files have the convention that commented lines start with an exclamation mark (!), while special lines start with a hashtag (#).
!
! When running the script start_ctdas.sh, this /.rc file will be copied to your run directory, and some items will be replaced for you.
! The result will be a nearly ready-to-go rc-file for your assimilation job. The entries and their meaning are explained by the comments below.
!
!
! HISTORY:
!
! Created on August 20th, 2013 by Wouter Peters
!
!
! The time for which to start and end the data assimilation experiment in format YYYY-MM-DD HH:MM:SS
! the following 3 lines are for initial start
time.start : 2013-04-01 00:00:00
time.finish : 2013-04-07 23:00:00
time.end : 2013-04-07 23:00:00
abs.time.start : 2013-04-01 00:00:00
! Whether to restart the CTDAS system from a previous cycle, or to start the sequence fresh. Valid entries are T/F/True/False/TRUE/FALSE
time.restart : F
da.restart.tstamp : 2013-01-01 00:00:00
! The length of a cycle is given in days, such that the integer 7 denotes the typically used weekly cycle. Valid entries are integers > 1
time.cycle : 7
! The number of cycles of lag to use for a smoother version of CTDAS. CarbonTracker CO2 typically uses 5 weeks of lag. Valid entries are integers > 0
time.nlag : 2
! The directory under which the code, input, and output will be stored. This is the base directory for a run. The word
! '/' will be replaced through the start_ctdas.sh script by a user-specified folder name. DO NOT REPLACE
run.name : octe
dir.da_run : /scratch/snx3000/parsenov/${run.name}
restartmap.dir : ${dir.da_run}/input
! The resources used to complete the data assimilation experiment. This depends on your computing platform.
! The number of cycles per job denotes how many cycles should be completed before starting a new process or job, this
! allows you to complete many cycles before resubmitting a job to the queue and having to wait again for resources.
! Valid entries are integers > 0
da.resources.ncycles_per_job : 1
! The ntasks specifies the number of threads to use for the MPI part of the code, if relevant. Note that the CTDAS code
! itself is not parallelized and the python code underlying CTDAS does not use multiple processors. The chosen observation
! operator though might use many processors, like TM5. Valid entries are integers > 0
da.resources.ntasks : 1
! This specifies the amount of wall-clock time to request for each job. Its value depends on your computing platform and might take
! any form appropriate for your system. Typically, HPC queueing systems allow you a certain number of hours of usage before
! your job is killed, and you are expected to finalize and submit a next job before that time. Valid entries are strings.
da.resources.ntime : 44:00:00
! The resource settings above will cause the creation of a job file in which 2 cycles will be run, and 30 threads
! are asked for a duration of 4 hours
!
! Info on the DA system used, this depends on your application of CTDAS and might refer to for instance CO2, or CH4 optimizations.
!
da.system : CarbonTracker
! The specific settings for your system are read from a separate rc-file, which points to the data directories, observations, etc
da.system.rc : da/rc/carbontracker_cosmo.rc
! This flag should probably be moved to the da.system.rc file. It denotes which type of filtering to use in the optimizer
da.system.localization : CT2007
! Info on the observation operator to be used, these keys help to identify the settings for the transport model in this case
da.obsoperator : cosmo
!
! The TM5 transport model is controlled by an rc-file as well. The value below refers to the configuration of the TM5 model to
! be used as observation operator in this experiment.
!
!da.obsoperator.home : /store/empa/em05/parsenov/cosmo_my_prc_chain
da.obsoperator.home : /store/empa/em05/parsenov/cosmo_processing_chain
da.bio.input : /store/empa/em05/parsenov/cosmo_input/vprm/processed
da.bg.input : /store/empa/em05/parsenov/cosmo_input/icbc/processed
da.obsoperator.rc : ${da.obsoperator.home}/tm5-ctdas-ei-zoom.rc
!forward.savestate.exceptsam : TRUE
!
! The number of ensemble members used in the experiment. Valid entries are integers > 2
!
da.optimizer.nmembers : 21
nparameters : 181
! Finally, info on the archive task, if any. Archive tasks are run after each cycle to ensure that the results of each cycle are
! preserved, even if you run on scratch space or a temporary disk. Since an experiment can take multiple weeks to complete, moving
! your results out of the way, or backing them up, is usually a good idea. Note that the tasks are commented and need to be uncommented
! to use this feature.
! The following key identifies that two archive tasks will be executed, one called 'alldata' and the other 'resultsonly'.
!task.rsync : alldata onlyresults
! The specifics for the first task.
! 1> Which source directories to back up. Valid entry is a list of folders separated by spaces
! 2> Which destination directory to use. Valid entries are a folder name, or server and folder name in rsync format as below
! 3> Which flags to add to the rsync command
! The settings below will result in an rsync command that looks like:
!
! rsync -auv -e ssh ${dir.da_run} you@yourserver.com:/yourfolder/
!
!task.rsync.alldata.sourcedirs : ${dir.da_run}
!task.rsync.alldata.destinationdir : you@yourserver.com:/yourfolder/
!task.rsync.alldata.flags g -auv -e ssh
! Repeated for rsync task 2, note that we only back up the analysis and output dirs here
!task.rsync.onlyresults.sourcedirs : ${dir.da_run}/analysis ${dir.da_run}/output
!task.rsync.onlyresults.destinationdir : you@yourserver.com:/yourfolder/
!task.rsync.onlyresults.flags : -auv -e ssh
This diff is collapsed.
Click to expand it.
template.py
+
2
−
1
View file @
d6b4742c
...
...
@@ -35,7 +35,8 @@ from da.cosmo.obspack_globalviewplus2 import ObsPackObservations
#from da.cosmo.obs import Obs
from
da.cosmo.optimizer
import
CO2Optimizer
#from da.cosmo.observationoperator_parallel import ObservationOperator # does not fully work
from
da.cosmo.observationoperator
import
ObservationOperator
from
da.cosmo.observationoperator_octe
import
ObservationOperator
#from da.cosmo.observationoperator import ObservationOperator
#from da.cosmo.expand_fluxes import save_weekly_avg_1x1_data, save_weekly_avg_state_data, save_weekly_avg_tc_data, save_weekly_avg_ext_tc_data
#from da.analysis.expand_molefractions import write_mole_fractions
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment