Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Smith, Naomi
CTDAS
Commits
37ba0d6f
Commit
37ba0d6f
authored
Jun 08, 2011
by
Peters, Wouter
Browse files
project ready to go
parent
d1c3556b
Changes
78
Show whitespace changes
Inline
Side-by-side
da/__init__.py
0 → 100644
View file @
37ba0d6f
da/analysis/NamingScheme.wp_Mar2011.rc
0 → 100644
View file @
37ba0d6f
! output naming scheme for CarbonTracker savestate files, version May 07 by Wouter Peters
! assimilated quantities, observation details
assimilated.co2_mixingratio.observed : co2_obs_fcast
assimilated.latitude.observed : lat_obs_fcast
assimilated.longitude.observed : lon_obs_fcast
assimilated.height.observed : height_obs_fcast
assimilated.code.observed : stationnames_obs_fcast
assimilated.time.observed : itau_obs_fcast
assimilated.eventnumber.observed : eventnumber_obs_fcast
! assimilated quantities, simulation details
assimilated.co2_mixingratio.simulated : co2_sim_fcast
assimilated.flag.simulated : flag_sim_fcast
assimilated.hphr.simulated : hqhr_sim_fcast
assimilated.modeldatamismatch.simulated : error_sim_fcast
assimilated.co2_mixingratio.ensemble.simulated : dF
! analysis quantities, sampled after each optimization and thus not necessarily final
analyzed.co2_mixingratio.simulated : co2_sim_ana
! same for quantities sampled from optimized/final results
final.co2_mixingratio.observed : co2_obs_final
final.latitude.observed : lat_obs_final
final.longitude.observed : lon_obs_final
final.height.observed : height_obs_final
final.code.observed : stationnames_obs_final
final.time.observed : itau_obs_final
final.eventnumber.observed : eventnumber_obs_final
! final optimized quantities, simulation details
final.co2_mixingratio.simulated : co2_sim_final
final.co2_bg_mixingratio.simulated : co2_bg_sim_final
final.co2_fossil_mixingratio.simulated : co2_ff_sim_final
final.co2_fires_mixingratio.simulated : co2_fires_sim_final
final.co2_bio_mixingratio.simulated : co2_bio_sim_final
final.co2_ocean_mixingratio.simulated : co2_ocean_sim_final
final.co2_mixingratio.ensemble.simulated : dF_f
! background fluxes
background.co2.fossil.flux : flux_ff_prior_mean
background.co2.fires.flux : flux_fires_prior_mean
background.co2.bio.flux : flux_bio_prior_mean
background.co2.ocean.flux : flux_ocean_prior_mean
background.co2.res.flux : flux_res_prior_mean
background.co2.gpp.flux : flux_gpp_prior_mean
! optimized fluxes
final.co2.fossil.flux : flux_ff_post_mean
final.co2.fires.flux : flux_fires_post_mean
final.co2.bio.flux : flux_bio_post_mean
final.co2.ocean.flux : flux_ocean_post_mean
final.co2.res.flux : flux_res_post_mean
final.co2.gpp.flux : flux_gpp_post_mean
! background parameters
background.param.mean : xpc
background.param.ensemble : pdX
! optimized parameters
final.param.mean : xac
final.param.ensemble : adX
final.param.mean.1x1 : flux_multiplier_m
da/analysis/__init__.py
0 → 100644
View file @
37ba0d6f
da/analysis/expand_fluxes.py
0 → 100755
View file @
37ba0d6f
#!/usr/bin/env python
# expand_fluxes.py
import
os
import
sys
import
getopt
from
pylab
import
array
,
arange
,
transpose
,
date2num
from
runinfo
import
*
from
datetime
import
datetime
from
da.tools.io4
import
CT_CDF
,
std_savedict
,
GetVariable
from
da.tools.general
import
CreateDirs
"""
Author: Wouter Peters (Wouter.Peters@noaa.gov)
Revision History:
File created on 21 Ocotber 2008.
"""
import
sys
sys
.
path
.
append
(
'../../'
)
def
proceed_dialog
(
txt
,
yes
=
[
'y'
,
'yes'
],
all
=
[
'a'
,
'all'
,
'yes-to-all'
]):
""" function to ask whether to proceed or not """
response
=
raw_input
(
txt
)
if
response
.
lower
()
in
yes
:
return
1
if
response
.
lower
()
in
all
:
return
2
return
0
def
SaveWeeklyAvg1x1Data
(
rundat
):
""" Function SaveFlux1x1Data saves 3-hourly surface flux data to NetCDF files
*** Inputs ***
rundat : a RunInfo object
*** Outputs ***
daily NetCDF file containing 1-hourly global surface fluxes at 1x1 degree
*** Example ***
./expand_savestate project=enkf_release sd=20000101 ed=20010101 """
import
da.tools.io4
as
io
#
dirname
=
'data_flux1x1_weekly'
#
dirname
=
CreateDirs
(
os
.
path
.
join
(
rundat
.
outputdir
,
dirname
))
#
# Some help variables
#
dectime0
=
date2num
(
datetime
(
2000
,
1
,
1
))
dt
=
rundat
.
dt
#
# Create or open NetCDF output file
#
saveas
=
os
.
path
.
join
(
dirname
,
'flux_1x1.nc'
)
ncf
=
io
.
CT_CDF
(
saveas
,
'write'
)
#
# Create dimensions and lat/lon grid
#
dimgrid
=
ncf
.
AddLatLonDim
()
dimdate
=
ncf
.
AddDateDim
()
#
# set title and tell GMT that we are using "pixel registration"
#
setattr
(
ncf
,
'Title'
,
'CarbonTracker fluxes'
)
setattr
(
ncf
,
'node_offset'
,
1
)
#
# write data from each week
#
sys
.
stdout
.
write
(
'['
)
sys
.
stdout
.
flush
()
for
week
in
rundat
.
weeks
:
# setup time interval
#
# skip dataset if already in file
#
skip
=
ncf
.
has_date
(
date2num
(
week
+
dt
/
2
)
-
dectime0
)
if
skip
:
sys
.
stdout
.
write
(
'#'
)
sys
.
stdout
.
flush
()
continue
#
# if not, process this week. Start by getting input data from TM5
#
filename
=
os
.
path
.
join
(
rundat
.
inputdir
,
'%s'
%
week
.
strftime
(
'%Y%m%d'
),
'flux1x1_%s_%s.nc'
%
(
week
.
strftime
(
'%Y%m%d%H'
),(
week
+
dt
).
strftime
(
'%Y%m%d%H'
),)
)
file
=
io
.
CT_CDF
(
filename
,
'read'
)
bio
=
array
(
file
.
GetVariable
(
rundat
.
namedict
[
'background.co2.bio.flux'
]))
ocean
=
array
(
file
.
GetVariable
(
rundat
.
namedict
[
'background.co2.ocean.flux'
]))
fire
=
array
(
file
.
GetVariable
(
rundat
.
namedict
[
'background.co2.fires.flux'
]))
fossil
=
array
(
file
.
GetVariable
(
rundat
.
namedict
[
'background.co2.fossil.flux'
]))
mapped_parameters
=
array
(
file
.
GetVariable
(
rundat
.
namedict
[
'final.param.mean.1x1'
]))
file
.
close
()
next
=
ncf
.
inq_unlimlen
()[
0
]
# Start adding datasets from here on, both prior and posterior datasets for bio and ocn
for
prior
in
[
False
,
True
]:
#
# if prior, do not multiply fluxes with parameters, otherwise do
#
if
prior
:
qual_short
=
'prior'
biomapped
=
bio
oceanmapped
=
ocean
else
:
qual_short
=
'opt'
biomapped
=
bio
*
mapped_parameters
oceanmapped
=
ocean
*
mapped_parameters
#
# For each dataset, get the standard definitions from the module mysettings, add values, dimensions, and unlimited count, then write
#
savedict
=
ncf
.
StandardVar
(
varname
=
'bio_flux_'
+
qual_short
)
savedict
[
'values'
]
=
biomapped
.
tolist
()
savedict
[
'dims'
]
=
dimdate
+
dimgrid
savedict
[
'count'
]
=
next
savedict
[
'nsets'
]
=
1
ncf
.
AddData
(
savedict
)
#
savedict
=
ncf
.
StandardVar
(
varname
=
'ocn_flux_'
+
qual_short
)
savedict
[
'name'
]
=
'ocn_flux_'
+
qual_short
savedict
[
'values'
]
=
oceanmapped
.
tolist
()
savedict
[
'dims'
]
=
dimdate
+
dimgrid
savedict
[
'count'
]
=
next
ncf
.
AddData
(
savedict
)
#
savedict
=
ncf
.
StandardVar
(
varname
=
'fire_flux_imp'
)
savedict
[
'values'
]
=
fire
.
tolist
()
savedict
[
'dims'
]
=
dimdate
+
dimgrid
savedict
[
'count'
]
=
next
ncf
.
AddData
(
savedict
)
#
savedict
=
ncf
.
StandardVar
(
varname
=
'fossil_flux_imp'
)
savedict
[
'values'
]
=
fossil
.
tolist
()
savedict
[
'dims'
]
=
dimdate
+
dimgrid
savedict
[
'count'
]
=
next
ncf
.
AddData
(
savedict
)
#
savedict
=
ncf
.
StandardVar
(
varname
=
'date'
)
savedict
[
'values'
]
=
date2num
(
week
+
dt
/
2
)
-
dectime0
savedict
[
'dims'
]
=
dimdate
savedict
[
'count'
]
=
next
ncf
.
AddData
(
savedict
)
sys
.
stdout
.
write
(
'.'
)
sys
.
stdout
.
flush
()
#
# Done, close the new NetCDF file
#
ncf
.
close
()
sys
.
stdout
.
write
(
']'
)
sys
.
stdout
.
flush
()
print
''
#
# Return the full name of the NetCDF file so it can be processed by the next routine
#
return
saveas
def
SaveWeeklyAvgEcoData
(
rundat
):
""" Function SaveEcoData saves weekly surface flux data to NetCDF files
*** Inputs ***
rundat : a RunInfo object
"""
from
da.analysis.tools_regions
import
globarea
from
numpy
import
dot
dirname
=
'data_eco_weekly'
dirname
=
CreateDirs
(
os
.
path
.
join
(
rundat
.
outputdir
,
dirname
))
# Create NetCDF output file
#
saveas
=
os
.
path
.
join
(
dirname
,
'ecofluxes.nc'
)
ncf
=
CT_CDF
(
saveas
,
'write'
)
dimdate
=
ncf
.
AddDateDim
()
dimidateformat
=
ncf
.
AddDateDimFormat
()
dimregs
=
ncf
.
AddRegionDim
(
type
=
'eco'
)
#
dectime0
=
date2num
(
datetime
(
2000
,
1
,
1
))
area
=
globarea
()
dt
=
rundat
.
dt
print
'['
for
week
,
savefile
in
zip
(
rundat
.
weeks
,
rundat
.
inputfiles
):
skip
=
ncf
.
has_date
(
date2num
(
week
+
dt
/
2
)
-
dectime0
)
if
skip
:
sys
.
stdout
.
write
(
'#'
)
sys
.
stdout
.
flush
()
continue
try
:
hdf
=
HDF
.
SD
(
savefile
)
except
:
print
'An error occurred opening the file: %s'
%
savefile
print
'Exiting...'
sys
.
exit
(
2
)
bio
=
GetVariable
(
hdf
,
rundat
.
namedict
[
'background.co2.bio.flux'
])
oce
=
GetVariable
(
hdf
,
rundat
.
namedict
[
'background.co2.ocean.flux'
])
ff
=
GetVariable
(
hdf
,
rundat
.
namedict
[
'background.co2.fossil.flux'
])
fire
=
GetVariable
(
hdf
,
rundat
.
namedict
[
'background.co2.fires.flux'
])
priorensemble
=
GetVariable
(
hdf
,
rundat
.
namedict
[
'background.param.ensemble'
]
+
'_05'
)
postparams
=
GetVariable
(
hdf
,
rundat
.
namedict
[
'final.param.mean'
]
+
'_01'
)
postensemble
=
GetVariable
(
hdf
,
rundat
.
namedict
[
'final.param.ensemble'
]
+
'_01'
)
hdf
.
end
()
next
=
ncf
.
inq_unlimlen
()[
0
]
print
next
data
=
map_to_regions
(
rundat
,
bio
*
area
)
# units of mole region-1 s-1
savedict
=
ncf
.
StandardVar
(
varname
=
'bio_flux_prior'
)
savedict
[
'values'
]
=
data
.
tolist
()
savedict
[
'dims'
]
=
dimdate
+
dimregs
savedict
[
'units'
]
=
'mol region-1 s-1'
savedict
[
'count'
]
=
next
ncf
.
AddData
(
savedict
)
data
=
postparams
*
map_to_regions
(
rundat
,
bio
*
area
)
# units of mole region-1 s-1
savedict
=
ncf
.
StandardVar
(
varname
=
'bio_flux_opt'
)
savedict
[
'values'
]
=
data
.
tolist
()
savedict
[
'dims'
]
=
dimdate
+
dimregs
savedict
[
'units'
]
=
'mol region-1 s-1'
savedict
[
'count'
]
=
next
ncf
.
AddData
(
savedict
)
data
=
map_to_regions
(
rundat
,
bio
*
area
)
# units of mole region-1 s-1
priorfluxens
=
data
*
priorensemble
[
1
:,:]
covdata
=
dot
(
transpose
(
priorfluxens
),
priorfluxens
)
/
(
rundat
.
nmembers
-
1
)
savedict
=
ncf
.
StandardVar
(
varname
=
'bio_flux_prior_cov'
)
savedict
[
'values'
]
=
covdata
.
tolist
()
savedict
[
'dims'
]
=
dimdate
+
dimregs
+
dimregs
savedict
[
'count'
]
=
next
ncf
.
AddData
(
savedict
)
data
=
map_to_regions
(
rundat
,
bio
*
area
)
# units of mole region-1 s-1
postfluxens
=
data
*
postensemble
[
1
:,:]
covdata
=
dot
(
transpose
(
postfluxens
),
postfluxens
)
/
(
rundat
.
nmembers
-
1
)
savedict
=
ncf
.
StandardVar
(
varname
=
'bio_flux_opt_cov'
)
savedict
[
'values'
]
=
covdata
.
tolist
()
savedict
[
'dims'
]
=
dimdate
+
dimregs
+
dimregs
savedict
[
'count'
]
=
next
ncf
.
AddData
(
savedict
)
data
=
map_to_regions
(
rundat
,
oce
*
area
)
# units of mole region-1 s-1
savedict
=
ncf
.
StandardVar
(
varname
=
'ocn_flux_prior'
)
savedict
[
'values'
]
=
data
.
tolist
()
savedict
[
'dims'
]
=
dimdate
+
dimregs
savedict
[
'units'
]
=
'mol region-1 s-1'
savedict
[
'count'
]
=
next
ncf
.
AddData
(
savedict
)
data
=
postparams
*
map_to_regions
(
rundat
,
oce
*
area
)
# units of mole region-1 s-1
savedict
=
ncf
.
StandardVar
(
varname
=
'ocn_flux_opt'
)
savedict
[
'values'
]
=
data
.
tolist
()
savedict
[
'dims'
]
=
dimdate
+
dimregs
savedict
[
'units'
]
=
'mol region-1 s-1'
savedict
[
'count'
]
=
next
ncf
.
AddData
(
savedict
)
data
=
map_to_regions
(
rundat
,
oce
*
area
)
# units of mole region-1 s-1
priorfluxens
=
data
*
priorensemble
[
1
:,:]
covdata
=
dot
(
transpose
(
priorfluxens
),
priorfluxens
)
/
(
rundat
.
nmembers
-
1
)
savedict
=
ncf
.
StandardVar
(
varname
=
'ocn_flux_prior_cov'
)
savedict
[
'values'
]
=
covdata
.
tolist
()
savedict
[
'dims'
]
=
dimdate
+
dimregs
+
dimregs
savedict
[
'units'
]
=
'mol2 region-2 s-2'
savedict
[
'count'
]
=
next
ncf
.
AddData
(
savedict
)
data
=
map_to_regions
(
rundat
,
oce
*
area
)
# units of mole region-1 s-1
postfluxens
=
data
*
postensemble
[
1
:,:]
covdata
=
dot
(
transpose
(
postfluxens
),
postfluxens
)
/
(
rundat
.
nmembers
-
1
)
savedict
=
ncf
.
StandardVar
(
varname
=
'ocn_flux_opt_cov'
)
savedict
[
'values'
]
=
covdata
.
tolist
()
savedict
[
'dims'
]
=
dimdate
+
dimregs
+
dimregs
savedict
[
'units'
]
=
'mol2 region-2 s-2'
savedict
[
'count'
]
=
next
ncf
.
AddData
(
savedict
)
data
=
map_to_regions
(
rundat
,
fire
*
area
)
# units of mole region-1 s-1
savedict
=
ncf
.
StandardVar
(
varname
=
'fire_flux_imp'
)
savedict
[
'values'
]
=
data
.
tolist
()
savedict
[
'dims'
]
=
dimdate
+
dimregs
savedict
[
'units'
]
=
'mol region-1 s-1'
savedict
[
'count'
]
=
next
ncf
.
AddData
(
savedict
)
data
=
map_to_regions
(
rundat
,
ff
*
area
)
# units of mole region-1 s-1
savedict
=
ncf
.
StandardVar
(
varname
=
'fossil_flux_imp'
)
savedict
[
'values'
]
=
data
.
tolist
()
savedict
[
'dims'
]
=
dimdate
+
dimregs
savedict
[
'units'
]
=
'mol region-1 s-1'
savedict
[
'count'
]
=
next
ncf
.
AddData
(
savedict
)
date
=
(
week
+
rundat
.
dt
/
2
)
savedict
=
ncf
.
StandardVar
(
varname
=
'date'
)
savedict
[
'values'
]
=
date2num
(
date
)
-
dectime0
savedict
[
'dims'
]
=
dimdate
savedict
[
'count'
]
=
next
ncf
.
AddData
(
savedict
)
sys
.
stdout
.
write
(
'.'
)
sys
.
stdout
.
flush
()
ncf
.
close
()
print
']'
return
saveas
def
SaveWeeklyAvgTCData
(
rundat
):
""" Function SaveEcoData saves weekly surface flux data to NetCDF files
*** Inputs ***
rundat : a RunInfo object
"""
from
da.analysis.tools_regions
import
globarea
,
StateToGrid
from
da.analysis.tools_transcom
import
StateToTranscom
,
StateCovToTranscom
,
transcommask
import
da.tools.io4
as
io
from
numpy
import
dot
dirname
=
'data_tc_weekly'
dirname
=
CreateDirs
(
os
.
path
.
join
(
rundat
.
outputdir
,
dirname
))
# Create NetCDF output file
#
saveas
=
os
.
path
.
join
(
dirname
,
'tcfluxes.nc'
)
ncf
=
io
.
CT_CDF
(
saveas
,
'create'
)
dimdate
=
ncf
.
AddDateDim
()
dimidateformat
=
ncf
.
AddDateDimFormat
()
dimregs
=
ncf
.
AddRegionDim
(
type
=
'tc'
)
#
dectime0
=
date2num
(
datetime
(
2000
,
1
,
1
))
area
=
globarea
()
dt
=
rundat
.
dt
infile
=
os
.
path
.
join
(
rundat
.
outputdir
,
'data_flux1x1_weekly'
,
'flux_1x1.nc'
)
if
not
os
.
path
.
exists
(
infile
):
print
"Needed input file (%s) does not exist yet, please create weekly eco flux files first, returning..."
%
infile
return
None
ncf_in
=
io
.
CT_CDF
(
infile
)
vardict
=
ncf_in
.
variables
for
vname
,
vprop
in
vardict
.
iteritems
():
data
=
ncf_in
.
GetVariable
(
vname
)[:]
if
vname
==
'latitude'
:
continue
elif
vname
==
'longitude'
:
continue
elif
vname
==
'date'
:
dims
=
dimdate
elif
vname
==
'idate'
:
dims
=
dimdate
+
dimidateformat
if
vname
not
in
[
'date'
,
'idate'
]:
if
'cov'
in
vname
:
alldata
=
[]
for
dd
in
data
:
dd
=
StateCovToGrid
(
dd
*
area
,
transcommask
,
reverse
=
True
)
alldata
.
append
(
dd
)
data
=
array
(
alldata
)
dims
=
dimdate
+
dimregs
+
dimregs
else
:
alldata
=
[]
for
dd
in
data
:
dd
=
StateToGrid
(
dd
*
area
,
transcommask
,
reverse
=
True
)
alldata
.
append
(
dd
)
data
=
array
(
alldata
)
dims
=
dimdate
+
dimregs
print
vname
,
data
.
shape
savedict
=
ncf
.
StandardVar
(
varname
=
vname
)
savedict
[
'values'
]
=
data
.
tolist
()
savedict
[
'dims'
]
=
dims
savedict
[
'units'
]
=
'mol/region/s'
savedict
[
'count'
]
=
0
ncf
.
AddData
(
savedict
,
nsets
=
data
.
shape
[
0
])
ncf
.
close
()
return
saveas
def
SaveTCDataExt
(
rundat
):
""" Function SaveTCDataExt saves surface flux data to NetCDF files for extended TransCom regions
*** Inputs ***
rundat : a RunInfo object
*** Outputs ***
NetCDF file containing n-hourly global surface fluxes per TransCom region
*** Example ***
./expand_savestate project=enkf_release sd=20000101 ed=20010101 """
from
da.analysis.tools_transcom
import
*
import
da.tools.io4
as
io
infile
=
os
.
path
.
join
(
rundat
.
outputdir
,
'data_tc_weekly'
,
'tcfluxes.nc'
)
if
not
os
.
path
.
exists
(
infile
):
print
"Needed input file (%s) does not exist yet, please create weekly tc flux files first, returning..."
%
infile
return
None
# Create NetCDF output file
#
saveas
=
os
.
path
.
join
(
rundat
.
outputdir
,
'data_tc_weekly'
,
'tc_extfluxes.nc'
)
ncf
=
io
.
CT_CDF
(
saveas
,
'create'
)
dimdate
=
ncf
.
AddDateDim
()
dimidateformat
=
ncf
.
AddDateDimFormat
()
dimregs
=
ncf
.
AddRegionDim
(
type
=
'tc_ext'
)
ncf_in
=
io
.
CT_CDF
(
infile
,
'read'
)
vardict
=
ncf_in
.
variables
for
vname
,
vprop
in
vardict
.
iteritems
():
data
=
ncf_in
.
GetVariable
(
vname
)[:]
if
vname
==
'latitude'
:
continue
elif
vname
==
'longitude'
:
continue
elif
vname
==
'date'
:
dims
=
dimdate
elif
vname
==
'idate'
:
dims
=
dimdate
+
dimidateformat
if
vname
not
in
[
'date'
,
'idate'
]:
if
'cov'
in
vname
:
alldata
=
[]
for
dd
in
data
:
dd
=
StateCovToGrid
(
dd
*
area
,
transcommask
,
reverse
=
True
)
alldata
.
append
(
dd
)
data
=
array
(
alldata
)
dims
=
dimdate
+
dimregs
+
dimregs
else
:
data
=
ExtendedTCRegions
(
data
)
dims
=
dimdate
+
dimregs
print
vname
,
data
.
shape
savedict
=
ncf
.
StandardVar
(
varname
=
vname
)
savedict
[
'values'
]
=
data
.
tolist
()
savedict
[
'dims'
]
=
dims
savedict
[
'units'
]
=
'mol/region/s'
savedict
[
'count'
]
=
0
ncf
.
AddData
(
savedict
,
nsets
=
data
.
shape
[
0
])
ncf
.
close
()
return
saveas
def
SaveEcoDataExt
(
rundat
):
""" Function SaveEcoDataExt saves surface flux data to NetCDF files for extended ecoregions
*** Inputs ***
rundat : a RunInfo object
*** Outputs ***
NetCDF file containing n-hourly global surface fluxes per TransCom region
*** Example ***
./expand_savestate project=enkf_release sd=20000101 ed=20010101 """
from
ecotools
import
xte
from
da.analysis.tools_transcom
import
LookUpName
import
pycdf
as
CDF
infile
=
os
.
path
.
join
(
rundat
.
outputdir
,
'data_eco_weekly'
,
'ecofluxes.nc'
)
if
not
os
.
path
.
exists
(
infile
):
print
"Needed input file (%s) does not exist yet, please create weekly ecoflux files first, returning..."
%
infile
return
None
# Create NetCDF output file
#
saveas
=
os
.
path
.
join
(
rundat
.
outputdir
,
'data_eco_weekly'
,
'eco_extfluxes.nc'
)
ncf
=
CT_CDF
(
saveas
,
'create'
)
dimdate
=
ncf
.
AddDateDim
()
dimidateformat
=
ncf
.
AddDateDimFormat
()
dimregs
=
ncf
.
AddRegionDim
(
type
=
'tc_ext'
)
ncf_in
=
CDF
.
CDF
(
infile
)
vardict
=
ncf_in
.
variables
()
for
vname
,
vprop
in
vardict
.
iteritems
():
dims
=
()
data
=
array
(
ncf_in
.
var
(
vname
)[:])
atts
=
ncf_in
.
var
(
vname
).
attributes
()
if
vname
not
in
[
'date'
,
'idate'
]:
if
'cov'
in
vname
:
data
=
xte
(
data
[:,
0
:
rundat
.
n_land
,
0
:
rundat
.
n_land
],
cov
=
True
)
dims
=
dimdate
+
dimregs
+
dimregs
else
:
data
=
xte
(
data
[:,
0
:
rundat
.
n_land
])
dims
=
dimdate
+
dimregs
elif
vname
==
'date'
:
dims
=
dimdate
elif
vname
==
'idate'
:
dims
=
dimdate
+
dimidateformat
else
:
print
'Dataset with unknown dimensions encountered in file: %s'
%
vname
savedict
=
ncf
.
StandardVar
(
varname
=
vname
)
savedict
[
'units'
]
=
atts
[
'units'
]
savedict
[
'values'
]
=
data
.
tolist
()
savedict
[
'dims'
]
=
dims
ncf
.
AddData
(
savedict
,
nsets
=
data
.
shape
[
0
])
ncf
.
close
()
return
saveas
def
SaveTimeAvgData
(
rundat
,
infile
,
avg
=
'monthly'
):
""" Function saves time mean surface flux data to NetCDF files
*** Inputs ***
rundat : a RunInfo object