Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Tsurata, Aki
CTDAS
Commits
915eb8de
Commit
915eb8de
authored
Dec 19, 2019
by
Peters, Wouter
Browse files
Merge branch 'py3-statevector' into 'master'
Py3 statevector See merge request
ctdas/CTDAS!15
parents
42c6f431
b6d0761b
Changes
4
Hide whitespace changes
Inline
Side-by-side
da/baseclasses/statevector.py
View file @
915eb8de
...
...
@@ -34,6 +34,7 @@ your own baseclass StateVector we refer to :ref:`tut_chapter5`.
"""
import
os
import
sys
import
logging
import
numpy
as
np
from
datetime
import
timedelta
...
...
@@ -162,46 +163,14 @@ class StateVector(object):
self
.
ensemble_members
[
n
]
=
[]
# This specifies the file to read with the gridded mask at 1x1 degrees. Each gridbox holds a number that specifies the parametermember
# that maps onto it. From this map, a dictionary is created that allows a reverse look-up so that we can map parameters to a grid.
mapfile
=
os
.
path
.
join
(
dacycle
.
dasystem
[
'regionsfile'
])
ncf
=
io
.
ct_read
(
mapfile
,
'read'
)
self
.
gridmap
=
ncf
.
get_variable
(
'regions'
)
self
.
tcmap
=
ncf
.
get_variable
(
'transcom_regions'
)
ncf
.
close
()
logging
.
debug
(
"A TransCom map on 1x1 degree was read from file %s"
%
dacycle
.
dasystem
[
'regionsfile'
])
logging
.
debug
(
"A parameter map on 1x1 degree was read from file %s"
%
dacycle
.
dasystem
[
'regionsfile'
])
# Create a dictionary for state <-> gridded map conversions
nparams
=
self
.
gridmap
.
max
()
self
.
gridmap
=
np
.
random
.
randint
(
low
=
1
,
high
=
self
.
nparams
+
1
,
size
=
(
180
,
360
,))
self
.
griddict
=
{}
for
r
in
range
(
1
,
int
(
nparams
)
+
1
):
sel
=
np
.
nonzero
(
self
.
gridmap
.
flat
==
r
)
for
r
in
range
(
1
,
self
.
nparams
+
1
):
sel
=
np
.
nonzero
(
self
.
gridmap
.
flat
==
r
)
if
len
(
sel
[
0
])
>
0
:
self
.
griddict
[
r
]
=
sel
logging
.
debug
(
"A dictionary to map grids to states and vice versa was created"
)
# Create a matrix for state <-> TransCom conversions
self
.
tcmatrix
=
np
.
zeros
((
self
.
nparams
,
23
),
'float'
)
for
r
in
range
(
1
,
self
.
nparams
+
1
):
sel
=
np
.
nonzero
(
self
.
gridmap
.
flat
==
r
)
if
len
(
sel
[
0
])
<
1
:
continue
else
:
n_tc
=
set
(
self
.
tcmap
.
flatten
().
take
(
sel
[
0
]))
if
len
(
n_tc
)
>
1
:
logging
.
error
(
"Parameter %d seems to map to multiple TransCom regions (%s), I do not know how to handle this"
%
(
r
,
n_tc
))
raise
ValueError
self
.
tcmatrix
[
r
-
1
,
n_tc
.
pop
()
-
1
]
=
1.0
logging
.
debug
(
"A matrix to map states to TransCom regions and vice versa was created"
)
# Create a mask for species/unknowns
self
.
make_species_mask
()
...
...
@@ -230,7 +199,7 @@ class StateVector(object):
logging
.
debug
(
" -> %s"
%
k
)
def
make_new_ensemble
(
self
,
lag
,
covariancematrix
=
None
):
def
make_new_ensemble
(
self
,
lag
,
covariancematrix
=
[
None
]
):
"""
:param lag: an integer indicating the time step in the lag order
:param covariancematrix: a matrix to draw random values from
...
...
da/rc/carbontracker_random.rc
View file @
915eb8de
...
...
@@ -11,15 +11,7 @@
! You should have received a copy of the GNU General Public License along with this
! program. If not, see <http://www.gnu.org/licenses/>.
datadir : /Storage/CO2/carbontracker/input/ctdas_2016/
ocn.covariance : ${datadir}/oceans/oif/cov_ocean.2000.01.nc
deltaco2.prefix : oif_p3_era40.dpco2
bio.cov.dir : ${datadir}/covariances/gridded_NH/
bio.cov.prefix : cov_ecoregion
regtype : gridded_oif30
nparameters : 9835
nparameters : 100
random.seed : 4385
regionsfile : ${datadir}/covariances/gridded_NH/griddedNHparameters.nc
obs.sites.rc : Random
da/tools/initexit.py
View file @
915eb8de
...
...
@@ -332,25 +332,20 @@ class CycleControl(dict):
strippedname
=
os
.
path
.
split
(
self
[
'jobrcfilename'
])[
-
1
]
self
[
'jobrcfilename'
]
=
os
.
path
.
join
(
self
[
'dir.exec'
],
strippedname
)
# shutil.copy(os.path.join(self.dasystem['regionsfile']),os.path.join(self['dir.exec'],'da','analysis','copied_regions.nc'))
logging
.
info
(
'Copied regions file to the analysis directory: %s'
%
os
.
path
.
join
(
self
.
dasystem
[
'regionsfile'
]))
if
'extendedregionsfile'
in
self
.
dasystem
:
# shutil.copy(os.path.join(self.dasystem['extendedregionsfile']),os.path.join(self['dir.exec'],'da','analysis','copied_regions_extended.nc'))
logging
.
info
(
'Copied extended regions file to the analysis directory: %s'
%
os
.
path
.
join
(
self
.
dasystem
[
'extendedregionsfile'
]))
else
:
# shutil.copy(os.path.join(self['dir.exec'],'da','analysis','olson_extended.nc'),os.path.join(self['dir.exec'],'da','analysis','copied_regions_extended.nc'))
logging
.
info
(
'Copied extended regions within the analysis directory: %s'
%
os
.
path
.
join
(
self
[
'dir.exec'
],
'da'
,
'analysis'
,
'olson_extended.nc'
))
for
filename
in
glob
.
glob
(
os
.
path
.
join
(
self
[
'dir.exec'
],
'da'
,
'analysis'
,
'*.pickle'
)):
logging
.
info
(
'Deleting pickle file %s to make sure the correct regions are used'
%
os
.
path
.
split
(
filename
)[
1
])
os
.
remove
(
filename
)
for
filename
in
glob
.
glob
(
os
.
path
.
join
(
self
[
'dir.exec'
],
'*.pickle'
)):
logging
.
info
(
'Deleting pickle file %s to make sure the correct regions are used'
%
os
.
path
.
split
(
filename
)[
1
])
os
.
remove
(
filename
)
#for filename in glob.glob(os.path.join(self['dir.exec'],'da','analysis','*.pickle')):
# logging.info('Deleting pickle file %s to make sure the correct regions are used'%os.path.split(filename)[1])
# os.remove(filename)
#for filename in glob.glob(os.path.join(self['dir.exec'],'*.pickle')):
# logging.info('Deleting pickle file %s to make sure the correct regions are used'%os.path.split(filename)[1])
# os.remove(filename)
if
'random.seed.init'
in
self
.
dasystem
:
self
.
read_random_seed
(
True
)
self
.
parse_times
()
#self.write_rc(self['jobrcfilename'])
def
setup_file_structure
(
self
):
"""
...
...
templates/template.py
View file @
915eb8de
...
...
@@ -29,7 +29,7 @@ from da.tools.initexit import start_logger, validate_opts_args, parse_options, C
from
da.tools.pipeline
import
ensemble_smoother_pipeline
,
header
,
footer
from
da.platform.capegrim
import
CapeGrimPlatform
from
da.baseclasses.dasystem
import
DaSystem
from
da.
co2gridded
.statevector
import
CO2Gridded
StateVector
from
da.
baseclasses
.statevector
import
StateVector
from
da.baseclasses.obs
import
Observations
from
da.baseclasses.optimizer
import
Optimizer
from
da.baseclasses.observationoperator
import
RandomizerObservationOperator
...
...
@@ -54,7 +54,7 @@ platform = CapeGrimPlatform()
dasystem
=
DaSystem
(
dacycle
[
'da.system.rc'
])
obsoperator
=
RandomizerObservationOperator
(
dacycle
[
'da.obsoperator.rc'
])
samples
=
Observations
()
statevector
=
CO2Gridded
StateVector
()
statevector
=
StateVector
()
optimizer
=
Optimizer
()
##########################################################################################
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment