Skip to content
Snippets Groups Projects
Commit a07e81c7 authored by Peters, Wouter's avatar Peters, Wouter
Browse files

routine can now be run as part of a cycle so that analysis finishes directly...

routine can now be run as part of a cycle so that analysis finishes directly instead of post-processing
parent 440a0da9
Branches
No related tags found
No related merge requests found
#!/usr/bin/env python
# expand_fluxes.py
import os
import sys
sys.path.append('../../')
import os
import getopt
from pylab import array, arange, transpose, date2num
from runinfo import *
from datetime import datetime
from da.tools.io4 import CT_Read, CT_CDF, std_savedict, GetVariable
from da.tools.general import CreateDirs
from da.tools.io4 import CT_Read, CT_CDF, std_savedict, GetVariable
"""
Author: Wouter Peters (Wouter.Peters@noaa.gov)
......@@ -16,8 +17,6 @@ Revision History:
File created on 21 Ocotber 2008.
"""
import sys
sys.path.append('../../')
def proceed_dialog(txt, yes=['y','yes'], all=['a','all', 'yes-to-all']):
""" function to ask whether to proceed or not """
......@@ -28,7 +27,7 @@ def proceed_dialog(txt, yes=['y','yes'], all=['a','all', 'yes-to-all']):
return 2
return 0
def SaveWeeklyAvg1x1Data(rundat):
def SaveWeeklyAvg1x1Data(DaCycle):
""" Function SaveFlux1x1Data saves 3-hourly surface flux data to NetCDF files
*** Inputs ***
......@@ -41,15 +40,22 @@ def SaveWeeklyAvg1x1Data(rundat):
./expand_savestate project=enkf_release sd=20000101 ed=20010101 """
import da.tools.io4 as io
import logging
#
dirname = 'data_flux1x1_weekly'
#
dirname = CreateDirs(os.path.join(rundat.outputdir,dirname))
dirname = CreateDirs(os.path.join(DaCycle['dir.analysis'],dirname))
#
# Some help variables
#
dectime0 = date2num(datetime(2000,1,1))
dt = rundat.dt
dt = DaCycle['cyclelength']
startdate = DaCycle['time.start']
enddate = DaCycle['time.end']
msg = "DA Cycle start date is %s" % startdate.strftime('%Y-%m-%d %H:%M') ; logging.info(msg)
msg = "DA Cycle end date is %s" % enddate.strftime('%Y-%m-%d %H:%M') ; logging.info(msg)
#
# Create or open NetCDF output file
#
......@@ -67,32 +73,25 @@ def SaveWeeklyAvg1x1Data(rundat):
setattr(ncf,'Title','CarbonTracker fluxes')
setattr(ncf,'node_offset',1)
#
# write data from each week
#
sys.stdout.write('[')
sys.stdout.flush()
for week in rundat.weeks:
# setup time interval
#
# skip dataset if already in file
#
skip = ncf.has_date(date2num(week+dt/2)-dectime0)
if skip:
sys.stdout.write('#')
sys.stdout.flush()
continue
ncfdate = date2num(startdate)-dectime0+dt.days/2.0
skip = ncf.has_date(ncfdate)
if skip:
msg = 'Skipping writing of data for date %s : already present in file %s' % (startdate.strftime('%Y-%m-%d'),saveas) ; logging.warning(msg)
else:
#
# if not, process this week. Start by getting input data from TM5
# if not, process this cycle. Start by getting input data from CTDAS
#
filename = os.path.join(rundat.inputdir,'%s'%week.strftime('%Y%m%d'),'flux1x1_%s_%s.nc'%(week.strftime('%Y%m%d%H'),(week+dt).strftime('%Y%m%d%H'),) )
filename = os.path.join(DaCycle['dir.output'],'flux1x1_%s_%s.nc'%(startdate.strftime('%Y%m%d%H'),enddate.strftime('%Y%m%d%H'),) )
file=io.CT_Read(filename,'read')
bio=array(file.GetVariable(rundat.namedict['background.co2.bio.flux']))
ocean=array(file.GetVariable(rundat.namedict['background.co2.ocean.flux']))
fire=array(file.GetVariable(rundat.namedict['background.co2.fires.flux']))
fossil=array(file.GetVariable(rundat.namedict['background.co2.fossil.flux']))
mapped_parameters=array(file.GetVariable(rundat.namedict['final.param.mean.1x1']))
file = io.CT_Read(filename,'read')
bio = array(file.GetVariable(DaCycle.DaSystem['background.co2.bio.flux']))
ocean = array(file.GetVariable(DaCycle.DaSystem['background.co2.ocean.flux']))
fire = array(file.GetVariable(DaCycle.DaSystem['background.co2.fires.flux']))
fossil = array(file.GetVariable(DaCycle.DaSystem['background.co2.fossil.flux']))
mapped_parameters = array(file.GetVariable(DaCycle.DaSystem['final.param.mean.1x1']))
file.close()
next=ncf.inq_unlimlen()[0]
......@@ -141,7 +140,7 @@ def SaveWeeklyAvg1x1Data(rundat):
ncf.AddData(savedict)
#
savedict=ncf.StandardVar(varname='date')
savedict['values']=date2num(week+dt/2)-dectime0
savedict['values']=date2num(startdate)-dectime0+dt.days/2.0
savedict['dims']=dimdate
savedict['count']=next
ncf.AddData(savedict)
......@@ -152,9 +151,6 @@ def SaveWeeklyAvg1x1Data(rundat):
# Done, close the new NetCDF file
#
ncf.close()
sys.stdout.write(']')
sys.stdout.flush()
print ''
#
# Return the full name of the NetCDF file so it can be processed by the next routine
#
......@@ -413,7 +409,7 @@ def SaveTCDataExt(rundat):
*** Example ***
./expand_savestate project=enkf_release sd=20000101 ed=20010101 """
from da.analysis.tools_transcom import *
from da.analysis.tools_transcom import StateCovToGrid, transcommask, ExtendedTCRegions
import da.tools.io4 as io
infile=os.path.join(rundat.outputdir,'data_tc_weekly','tcfluxes.nc')
......@@ -671,20 +667,39 @@ def SaveTimeAvgData(rundat,infile,avg='monthly'):
if __name__ == "__main__":
import logging
from da.tools.initexit import CycleControl, StartLogger
from da.ct.dasystem import CtDaSystem
sys.path.append('../../')
rundat=RunInfo(rcfile='../../da.rc')
StartLogger()
DaCycle = CycleControl(args={'rc':'../../da.rc'})
DaCycle.Initialize()
DaCycle.ParseTimes()
DaSystem = CtDaSystem('../rc/carbontracker.rc')
DaSystem.Initialize()
DaCycle.DaSystem = DaSystem
no, yes, all = range(3)
proceed = no
print rundat
print DaCycle
print DaSystem
# first, parse results from the inverse part of the run
if proceed != all:
proceed = proceed_dialog('Create average 1x1 flux data files? [y|yes, n|no, a|all|yes-to-all ]')
if proceed != no:
savedas=SaveWeeklyAvg1x1Data(rundat)
while DaCycle['time.start'] < DaCycle['time.finish']:
savedas=SaveWeeklyAvg1x1Data(DaCycle)
DaCycle.AdvanceCycleTimes()
sys.exit(2)
a=SaveTimeAvgData(rundat,savedas,avg='monthly')
a=SaveTimeAvgData(rundat,savedas,avg='yearly')
a=SaveTimeAvgData(rundat,savedas,avg='longterm')
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment