Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
NearRealTimeCTDAS
CTDAS
Commits
eb723e28
Commit
eb723e28
authored
Jul 17, 2020
by
Woude, Auke van der
Browse files
Revert "Revert "Merge branch 'master' into 'master'""
This reverts commit
53c24cb8
parent
53c24cb8
Changes
5
Hide whitespace changes
Inline
Side-by-side
da/baseclasses/statevector.py
View file @
eb723e28
...
...
@@ -34,6 +34,7 @@ your own baseclass StateVector we refer to :ref:`tut_chapter5`.
"""
import
os
import
sys
import
logging
import
numpy
as
np
from
datetime
import
timedelta
...
...
@@ -162,46 +163,14 @@ class StateVector(object):
self
.
ensemble_members
[
n
]
=
[]
# This specifies the file to read with the gridded mask at 1x1 degrees. Each gridbox holds a number that specifies the parametermember
# that maps onto it. From this map, a dictionary is created that allows a reverse look-up so that we can map parameters to a grid.
mapfile
=
os
.
path
.
join
(
dacycle
.
dasystem
[
'regionsfile'
])
ncf
=
io
.
ct_read
(
mapfile
,
'read'
)
self
.
gridmap
=
ncf
.
get_variable
(
'regions'
)
self
.
tcmap
=
ncf
.
get_variable
(
'transcom_regions'
)
ncf
.
close
()
logging
.
debug
(
"A TransCom map on 1x1 degree was read from file %s"
%
dacycle
.
dasystem
[
'regionsfile'
])
logging
.
debug
(
"A parameter map on 1x1 degree was read from file %s"
%
dacycle
.
dasystem
[
'regionsfile'
])
# Create a dictionary for state <-> gridded map conversions
nparams
=
self
.
gridmap
.
max
()
self
.
gridmap
=
np
.
random
.
randint
(
low
=
1
,
high
=
self
.
nparams
+
1
,
size
=
(
180
,
360
,))
self
.
griddict
=
{}
for
r
in
range
(
1
,
int
(
nparams
)
+
1
):
sel
=
np
.
nonzero
(
self
.
gridmap
.
flat
==
r
)
for
r
in
range
(
1
,
self
.
nparams
+
1
):
sel
=
np
.
nonzero
(
self
.
gridmap
.
flat
==
r
)
if
len
(
sel
[
0
])
>
0
:
self
.
griddict
[
r
]
=
sel
logging
.
debug
(
"A dictionary to map grids to states and vice versa was created"
)
# Create a matrix for state <-> TransCom conversions
self
.
tcmatrix
=
np
.
zeros
((
self
.
nparams
,
23
),
'float'
)
for
r
in
range
(
1
,
self
.
nparams
+
1
):
sel
=
np
.
nonzero
(
self
.
gridmap
.
flat
==
r
)
if
len
(
sel
[
0
])
<
1
:
continue
else
:
n_tc
=
set
(
self
.
tcmap
.
flatten
().
take
(
sel
[
0
]))
if
len
(
n_tc
)
>
1
:
logging
.
error
(
"Parameter %d seems to map to multiple TransCom regions (%s), I do not know how to handle this"
%
(
r
,
n_tc
))
raise
ValueError
self
.
tcmatrix
[
r
-
1
,
n_tc
.
pop
()
-
1
]
=
1.0
logging
.
debug
(
"A matrix to map states to TransCom regions and vice versa was created"
)
# Create a mask for species/unknowns
self
.
make_species_mask
()
...
...
@@ -230,7 +199,7 @@ class StateVector(object):
logging
.
debug
(
" -> %s"
%
k
)
def
make_new_ensemble
(
self
,
lag
,
covariancematrix
=
None
):
def
make_new_ensemble
(
self
,
lag
,
covariancematrix
=
[
None
]
):
"""
:param lag: an integer indicating the time step in the lag order
:param covariancematrix: a matrix to draw random values from
...
...
da/rc/carbontracker_random.rc
View file @
eb723e28
...
...
@@ -11,15 +11,7 @@
! You should have received a copy of the GNU General Public License along with this
! program. If not, see <http://www.gnu.org/licenses/>.
datadir : /Storage/CO2/carbontracker/input/ctdas_2016/
ocn.covariance : ${datadir}/oceans/oif/cov_ocean.2000.01.nc
deltaco2.prefix : oif_p3_era40.dpco2
bio.cov.dir : ${datadir}/covariances/gridded_NH/
bio.cov.prefix : cov_ecoregion
regtype : gridded_oif30
nparameters : 9835
nparameters : 100
random.seed : 4385
regionsfile : ${datadir}/covariances/gridded_NH/griddedNHparameters.nc
obs.sites.rc : Random
da/tm5/observationoperator.py
View file @
eb723e28
...
...
@@ -38,7 +38,6 @@ import logging
import
shutil
import
datetime
import
subprocess
from
string
import
join
import
glob
sys
.
path
.
append
(
os
.
getcwd
())
sys
.
path
.
append
(
"../../"
)
...
...
@@ -192,7 +191,7 @@ class TM5ObservationOperator(ObservationOperator):
else
:
cmd
=
[
'python'
,
'setup_tm5'
,
'--%s'
%
self
.
dacycle
.
daplatform
.
give_queue_type
(),
rcfilename
]
logging
.
info
(
'Starting the external TM5 setup script'
)
logging
.
info
(
'using command ... %s'
%
join
(
cmd
))
logging
.
info
(
'using command ... %s'
%
(
''
.
join
(
cmd
))
)
retcode
=
subprocess
.
call
(
cmd
)
os
.
chdir
(
self
.
dacycle
[
'dir.da_submit'
])
...
...
@@ -254,8 +253,9 @@ class TM5ObservationOperator(ObservationOperator):
self
.
write_rc
(
self
.
rc_filename
)
# Define the name of the file that will contain the modeled output of each observation
self
.
simulated_file
=
os
.
path
.
join
(
self
.
outputdir
,
'flask_output.%s.nc'
%
self
.
dacycle
[
'time.sample.stamp'
])
#temp = 'flask_output.%s.nc'%(self.dacycle['time.sample.stamp'])
#self.simulated_file = os.path.join(self.outputdir, temp)
self
.
simulated_file
=
os
.
path
.
join
(
self
.
outputdir
,
'flask_output.%s.nc'
%
(
self
.
dacycle
[
'time.sample.stamp'
]))
def
load_rc
(
self
,
name
):
"""
...
...
@@ -346,7 +346,7 @@ class TM5ObservationOperator(ObservationOperator):
"""
for
k
,
v
in
list
(
newvalues
.
items
()):
if
k
ey
in
self
.
tm_settings
:
if
k
in
self
.
tm_settings
:
# keep previous value
v_orig
=
self
.
tm_settings
[
k
]
#replace with new
...
...
@@ -629,6 +629,3 @@ class TM5ObservationOperator(ObservationOperator):
if
__name__
==
"__main__"
:
pass
da/tools/initexit.py
View file @
eb723e28
...
...
@@ -332,25 +332,20 @@ class CycleControl(dict):
strippedname
=
os
.
path
.
split
(
self
[
'jobrcfilename'
])[
-
1
]
self
[
'jobrcfilename'
]
=
os
.
path
.
join
(
self
[
'dir.exec'
],
strippedname
)
# shutil.copy(os.path.join(self.dasystem['regionsfile']),os.path.join(self['dir.exec'],'da','analysis','copied_regions.nc'))
#logging.info('Copied regions file to the analysis directory: %s'%os.path.join(self.dasystem['regionsfile']))
if
'extendedregionsfile'
in
self
.
dasystem
:
# shutil.copy(os.path.join(self.dasystem['extendedregionsfile']),os.path.join(self['dir.exec'],'da','analysis','copied_regions_extended.nc'))
logging
.
info
(
'Copied extended regions file to the analysis directory: %s'
%
os
.
path
.
join
(
self
.
dasystem
[
'extendedregionsfile'
]))
else
:
# shutil.copy(os.path.join(self['dir.exec'],'da','analysis','olson_extended.nc'),os.path.join(self['dir.exec'],'da','analysis','copied_regions_extended.nc'))
logging
.
info
(
'Copied extended regions within the analysis directory: %s'
%
os
.
path
.
join
(
self
[
'dir.exec'
],
'da'
,
'analysis'
,
'olson_extended.nc'
))
for
filename
in
glob
.
glob
(
os
.
path
.
join
(
self
[
'dir.exec'
],
'da'
,
'analysis'
,
'*.pickle'
)):
logging
.
info
(
'Deleting pickle file %s to make sure the correct regions are used'
%
os
.
path
.
split
(
filename
)[
1
])
os
.
remove
(
filename
)
for
filename
in
glob
.
glob
(
os
.
path
.
join
(
self
[
'dir.exec'
],
'*.pickle'
)):
logging
.
info
(
'Deleting pickle file %s to make sure the correct regions are used'
%
os
.
path
.
split
(
filename
)[
1
])
os
.
remove
(
filename
)
#for filename in glob.glob(os.path.join(self['dir.exec'],'da','analysis','*.pickle')):
# logging.info('Deleting pickle file %s to make sure the correct regions are used'%os.path.split(filename)[1])
# os.remove(filename)
#for filename in glob.glob(os.path.join(self['dir.exec'],'*.pickle')):
# logging.info('Deleting pickle file %s to make sure the correct regions are used'%os.path.split(filename)[1])
# os.remove(filename)
if
'random.seed.init'
in
self
.
dasystem
:
self
.
read_random_seed
(
True
)
self
.
parse_times
()
#self.write_rc(self['jobrcfilename'])
def
setup_file_structure
(
self
):
"""
...
...
templates/template.py
View file @
eb723e28
...
...
@@ -29,7 +29,7 @@ from da.tools.initexit import start_logger, validate_opts_args, parse_options, C
from
da.stilt.pipeline
import
forward_pipeline
,
header
,
footer
from
da.platform.cartesius
import
CartesiusPlatform
from
da.baseclasses.dasystem
import
DaSystem
from
da.
stilt
.statevector
import
CO2Gridded
StateVector
from
da.
baseclasses
.statevector
import
StateVector
from
da.baseclasses.obs
import
Observations
from
da.baseclasses.optimizer
import
Optimizer
from
da.stilt.observationoperator
import
STILTObservationOperator
...
...
@@ -54,7 +54,7 @@ platform = CartesiusPlatform()
dasystem
=
DaSystem
(
dacycle
[
'da.system.rc'
])
obsoperator
=
STILTObservationOperator
(
dacycle
[
'da.obsoperator.rc'
])
samples
=
Observations
()
statevector
=
CO2Gridded
StateVector
()
statevector
=
StateVector
()
optimizer
=
Optimizer
()
##########################################################################################
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment