From 975b0f186aefa3f36e63116cf89fcb65a585d8ec Mon Sep 17 00:00:00 2001
From: Wouter Peters <wouter.peters@wur.nl>
Date: Tue, 1 Mar 2011 16:05:53 +0000
Subject: [PATCH] start of library for analysis, work in progress...

---
 da/analysis/NamingScheme.wp_Mar2011.rc |  73 +++
 da/analysis/expand_fluxes.py           | 706 +++++++++++++++++++++++++
 da/analysis/runinfo.py                 | 156 ++++++
 3 files changed, 935 insertions(+)
 create mode 100644 da/analysis/NamingScheme.wp_Mar2011.rc
 create mode 100755 da/analysis/expand_fluxes.py
 create mode 100755 da/analysis/runinfo.py

diff --git a/da/analysis/NamingScheme.wp_Mar2011.rc b/da/analysis/NamingScheme.wp_Mar2011.rc
new file mode 100644
index 00000000..094cb6c5
--- /dev/null
+++ b/da/analysis/NamingScheme.wp_Mar2011.rc
@@ -0,0 +1,73 @@
+! output naming scheme for CarbonTracker savestate files, version May 07 by Wouter Peters
+
+! assimilated quantities, observation details
+
+assimilated.co2_mixingratio.observed    : co2_obs_fcast
+assimilated.latitude.observed           : lat_obs_fcast
+assimilated.longitude.observed          : lon_obs_fcast
+assimilated.height.observed             : height_obs_fcast
+assimilated.code.observed               : stationnames_obs_fcast
+assimilated.time.observed               : itau_obs_fcast
+assimilated.eventnumber.observed        : eventnumber_obs_fcast
+
+! assimilated quantities, simulation details
+
+assimilated.co2_mixingratio.simulated   : co2_sim_fcast
+assimilated.flag.simulated              : flag_sim_fcast
+assimilated.hphr.simulated              : hqhr_sim_fcast
+assimilated.modeldatamismatch.simulated : error_sim_fcast
+assimilated.co2_mixingratio.ensemble.simulated   : dF
+
+! analysis quantities, sampled after each optimization and thus not necessarily final
+
+analyzed.co2_mixingratio.simulated : co2_sim_ana
+
+! same for quantities sampled from optimized/final results
+
+final.co2_mixingratio.observed    : co2_obs_final
+final.latitude.observed           : lat_obs_final
+final.longitude.observed          : lon_obs_final
+final.height.observed             : height_obs_final
+final.code.observed               : stationnames_obs_final
+final.time.observed               : itau_obs_final
+final.eventnumber.observed        : eventnumber_obs_final
+
+! final optimized quantities, simulation details
+
+final.co2_mixingratio.simulated         : co2_sim_final
+final.co2_bg_mixingratio.simulated      : co2_bg_sim_final
+final.co2_fossil_mixingratio.simulated  : co2_ff_sim_final
+final.co2_fires_mixingratio.simulated   : co2_fires_sim_final
+final.co2_bio_mixingratio.simulated     : co2_bio_sim_final
+final.co2_ocean_mixingratio.simulated   : co2_ocean_sim_final
+final.co2_mixingratio.ensemble.simulated : dF_f
+
+! background fluxes
+
+background.co2.fossil.flux  : flux_ff_prior_mean
+background.co2.fires.flux   : flux_fires_prior_mean
+background.co2.bio.flux     : flux_bio_prior_mean
+background.co2.ocean.flux   : flux_ocean_prior_mean
+background.co2.res.flux     : flux_res_prior_mean
+background.co2.gpp.flux     : flux_gpp_prior_mean
+
+! optimized fluxes
+
+final.co2.fossil.flux  : flux_ff_post_mean
+final.co2.fires.flux   : flux_fires_post_mean
+final.co2.bio.flux     : flux_bio_post_mean
+final.co2.ocean.flux   : flux_ocean_post_mean
+final.co2.res.flux     : flux_res_post_mean
+final.co2.gpp.flux     : flux_gpp_post_mean
+
+! background parameters
+
+background.param.mean       : xpc
+background.param.ensemble   : pdX
+
+! optimized parameters
+
+final.param.mean       : xac
+final.param.ensemble   : adX
+final.param.mean.1x1   : flux_multiplier_m
+
diff --git a/da/analysis/expand_fluxes.py b/da/analysis/expand_fluxes.py
new file mode 100755
index 00000000..6cac7e90
--- /dev/null
+++ b/da/analysis/expand_fluxes.py
@@ -0,0 +1,706 @@
+#!/usr/bin/env python
+# expand_fluxes.py
+import os
+import sys
+import getopt
+from pylab import array, arange, transpose, date2num
+from runinfo import *
+from datetime import datetime
+from ct_netcdf import CT_CDF, std_savedict, GetVariable, CreateDirs
+
+"""
+Author: Wouter Peters (Wouter.Peters@noaa.gov)
+
+Revision History:
+File created on 21 Ocotber 2008.
+
+"""
+import sys
+sys.path.append('../../')
+
+def proceed_dialog(txt, yes=['y','yes'], all=['a','all', 'yes-to-all']):
+    """ function to ask whether to proceed or not """
+    response=raw_input(txt)
+    if response.lower() in yes:
+       return 1
+    if response.lower() in all:
+       return 2
+    return 0
+
+def SaveWeeklyAvg1x1Data(rundat):
+    """ Function SaveFlux1x1Data saves 3-hourly surface flux data to NetCDF files
+        
+        *** Inputs ***
+        rundat : a RunInfo object
+
+        *** Outputs ***
+        daily NetCDF file containing 1-hourly global surface fluxes at 1x1 degree
+
+        *** Example ***
+        ./expand_savestate project=enkf_release sd=20000101 ed=20010101 """
+
+    import da.tools.io4 as io
+#
+    dirname     = 'data_flux1x1_weekly'
+#
+    dirname     = CreateDirs(rundat,dirname)
+#
+# Some help variables
+#
+    dectime0    = date2num(datetime(2000,1,1))
+    dt          = rundat.dt
+#
+# Create or open NetCDF output file
+#
+    saveas      = os.path.join(dirname,'flux_1x1.nc')
+    ncf         = io.CT_CDF(saveas,'write')
+
+#
+# Create dimensions and lat/lon grid
+#
+    dimgrid     = ncf.AddLatLonDim()
+    dimdate     = ncf.AddDateDim()
+#
+# set title and tell GMT that we are using "pixel registration"
+#
+    setattr(ncf,'Title','CarbonTracker fluxes')
+    setattr(ncf,'node_offset',1)
+#
+# write data from each week
+#
+    sys.stdout.write('[')
+    sys.stdout.flush()
+    for week in rundat.weeks:
+        
+# setup time interval
+#
+# skip dataset if already in file
+#
+        skip    = ncf.has_date(date2num(week+dt/2)-dectime0)
+        if skip:
+            sys.stdout.write('#')
+            sys.stdout.flush()
+            continue
+#
+# if not, process this week. Start by getting input data from TM5
+#
+        filename = os.path.join(rundat.inputdir,'flux1x1_%s_%s.nc'%(week.strftime('%Y%m%d%H'),(week+dt).strftime('%Y%m%d%H'),) )
+
+        file=io.CT_CDF(filename,'read')
+        bio=array(file.GetVariable(rundat.namedict['background.co2.bio.flux']))
+        ocean=array(file.GetVariable(rundat.namedict['background.co2.ocean.flux']))
+        fire=array(file.GetVariable(rundat.namedict['background.co2.fires.flux']))
+        fossil=array(file.GetVariable(rundat.namedict['background.co2.fossil.flux']))
+        mapped_parameters=array(file.GetVariable(rundat.namedict['final.param.mean.1x1']))
+        file.close()
+
+        next=ncf.inq_unlimlen()[0]
+
+# Start adding datasets from here on, both prior and posterior datasets for bio and ocn
+
+        for prior in [False, True]:
+#
+# if prior, do not multiply fluxes with parameters, otherwise do
+#
+            if prior:
+                qual_short='prior'
+                biomapped=bio
+                oceanmapped=ocean
+            else:
+                qual_short='opt'
+                biomapped=bio*mapped_parameters 
+                oceanmapped=ocean*mapped_parameters 
+#
+#  For each dataset, get the standard definitions from the module mysettings, add values, dimensions, and unlimited count, then write
+#
+            savedict=ncf.StandardVar(varname='bio_flux_'+qual_short)
+            savedict['values']=biomapped.tolist()
+            savedict['dims']=dimdate+dimgrid
+            savedict['count']=next
+            savedict['nsets']=1
+            ncf.AddData(savedict)
+#
+            savedict=ncf.StandardVar(varname='ocn_flux_'+qual_short)
+            savedict['name']='ocn_flux_'+qual_short
+            savedict['values']=oceanmapped.tolist()
+            savedict['dims']=dimdate+dimgrid
+            savedict['count']=next
+            ncf.AddData(savedict)
+#
+        savedict=ncf.StandardVar(varname='fire_flux_imp')
+        savedict['values']=fire.tolist()
+        savedict['dims']=dimdate+dimgrid
+        savedict['count']=next
+        ncf.AddData(savedict)
+#
+        savedict=ncf.StandardVar(varname='fossil_flux_imp')
+        savedict['values']=fossil.tolist()
+        savedict['dims']=dimdate+dimgrid
+        savedict['count']=next
+        ncf.AddData(savedict)
+#
+        savedict=ncf.StandardVar(varname='date')
+        savedict['values']=date2num(week+dt/2)-dectime0
+        savedict['dims']=dimdate
+        savedict['count']=next
+        ncf.AddData(savedict)
+
+        sys.stdout.write('.')
+        sys.stdout.flush()
+#
+#   Done, close the new NetCDF file
+#
+    ncf.close()
+    sys.stdout.write(']')
+    sys.stdout.flush()
+    print ''
+#
+#   Return the full name of the NetCDF file so it can be processed by the next routine
+#
+
+    return saveas
+
+
+def SaveWeeklyAvgEcoData(rundat):
+    """ Function SaveEcoData saves weekly surface flux data to NetCDF files
+        
+        *** Inputs ***
+        rundat : a RunInfo object
+    """
+    from tm5tools import globarea
+    from numpy import dot
+
+    dirname='data_eco_weekly'
+
+    dirname=CreateDirs(rundat,dirname)
+
+    # Create NetCDF output file
+    #
+    saveas=os.path.join(rundat.outputdir,dirname,'ecofluxes.nc')
+    ncf=CT_CDF(saveas,'write')
+    dimdate=ncf.AddDateDim()
+    dimidateformat=ncf.AddDateDimFormat()
+    dimregs=ncf.AddRegionDim(rundat,type='eco')
+    dummy=ncf.AddRegionDefs(rundat,type='olson')
+    #
+    dectime0=date2num(datetime(2000,1,1))
+
+
+    area=globarea()
+    dt=rundat.dt
+    print '['
+    for week,savefile in zip(rundat.weeks,rundat.inputfiles):
+
+           skip=ncf.has_date(date2num(week+dt/2)-dectime0)
+           if skip:
+                sys.stdout.write('#')
+                sys.stdout.flush()
+                continue
+
+           try:
+                hdf=HDF.SD(savefile)
+           except: 
+                print 'An error occurred opening the file: %s'%savefile
+                print 'Exiting...'
+                sys.exit(2)
+
+           bio=GetVariable(hdf,rundat.namedict['background.co2.bio.flux'])
+           oce=GetVariable(hdf,rundat.namedict['background.co2.ocean.flux'])
+           ff=GetVariable(hdf,rundat.namedict['background.co2.fossil.flux'])
+           fire=GetVariable(hdf,rundat.namedict['background.co2.fires.flux'])
+           priorensemble=GetVariable(hdf,rundat.namedict['background.param.ensemble']+'_05')
+           postparams=GetVariable(hdf,rundat.namedict['final.param.mean']+'_01')
+           postensemble=GetVariable(hdf,rundat.namedict['final.param.ensemble']+'_01')
+
+           hdf.end()
+
+           next=ncf.inq_unlimlen()[0]
+           print next
+
+           data=map_to_regions(rundat,bio*area) # units of mole region-1 s-1
+
+           savedict=ncf.StandardVar(varname='bio_flux_prior')
+           savedict['values']=data.tolist()
+           savedict['dims']=dimdate+dimregs
+           savedict['units']='mol region-1 s-1'
+           savedict['count']=next
+           ncf.AddData(savedict)
+
+           data=postparams*map_to_regions(rundat,bio*area) # units of mole region-1 s-1
+
+           savedict=ncf.StandardVar(varname='bio_flux_opt')
+           savedict['values']=data.tolist()
+           savedict['dims']=dimdate+dimregs
+           savedict['units']='mol region-1 s-1'
+           savedict['count']=next
+           ncf.AddData(savedict)
+
+           data=map_to_regions(rundat,bio*area) # units of mole region-1 s-1
+           priorfluxens=data*priorensemble[1:,:]
+           covdata=dot(transpose(priorfluxens),priorfluxens)/(rundat.nmembers-1)
+
+           savedict=ncf.StandardVar(varname='bio_flux_prior_cov')
+           savedict['values']=covdata.tolist()
+           savedict['dims']=dimdate+dimregs+dimregs
+           savedict['count']=next
+           ncf.AddData(savedict)
+
+           data=map_to_regions(rundat,bio*area) # units of mole region-1 s-1
+           postfluxens=data*postensemble[1:,:]
+           covdata=dot(transpose(postfluxens),postfluxens)/(rundat.nmembers-1)
+
+           savedict=ncf.StandardVar(varname='bio_flux_opt_cov')
+           savedict['values']=covdata.tolist()
+           savedict['dims']=dimdate+dimregs+dimregs
+           savedict['count']=next
+           ncf.AddData(savedict)
+
+           data=map_to_regions(rundat,oce*area) # units of mole region-1 s-1
+
+           savedict=ncf.StandardVar(varname='ocn_flux_prior')
+           savedict['values']=data.tolist()
+           savedict['dims']=dimdate+dimregs
+           savedict['units']='mol region-1 s-1'
+           savedict['count']=next
+           ncf.AddData(savedict)
+
+           data=postparams*map_to_regions(rundat,oce*area) # units of mole region-1 s-1
+
+           savedict=ncf.StandardVar(varname='ocn_flux_opt')
+           savedict['values']=data.tolist()
+           savedict['dims']=dimdate+dimregs
+           savedict['units']='mol region-1 s-1'
+           savedict['count']=next
+           ncf.AddData(savedict)
+
+           data=map_to_regions(rundat,oce*area) # units of mole region-1 s-1
+           priorfluxens=data*priorensemble[1:,:]
+           covdata=dot(transpose(priorfluxens),priorfluxens)/(rundat.nmembers-1)
+
+           savedict=ncf.StandardVar(varname='ocn_flux_prior_cov')
+           savedict['values']=covdata.tolist()
+           savedict['dims']=dimdate+dimregs+dimregs
+           savedict['units']='mol2 region-2 s-2'
+           savedict['count']=next
+           ncf.AddData(savedict)
+
+           data=map_to_regions(rundat,oce*area) # units of mole region-1 s-1
+           postfluxens=data*postensemble[1:,:]
+           covdata=dot(transpose(postfluxens),postfluxens)/(rundat.nmembers-1)
+
+           savedict=ncf.StandardVar(varname='ocn_flux_opt_cov')
+           savedict['values']=covdata.tolist()
+           savedict['dims']=dimdate+dimregs+dimregs
+           savedict['units']='mol2 region-2 s-2'
+           savedict['count']=next
+           ncf.AddData(savedict)
+
+           data=map_to_regions(rundat,fire*area) # units of mole region-1 s-1
+
+           savedict=ncf.StandardVar(varname='fire_flux_imp')
+           savedict['values']=data.tolist()
+           savedict['dims']=dimdate+dimregs
+           savedict['units']='mol region-1 s-1'
+           savedict['count']=next
+           ncf.AddData(savedict)
+
+           data=map_to_regions(rundat,ff*area) # units of mole region-1 s-1
+
+           savedict=ncf.StandardVar(varname='fossil_flux_imp')
+           savedict['values']=data.tolist()
+           savedict['dims']=dimdate+dimregs
+           savedict['units']='mol region-1 s-1'
+           savedict['count']=next
+           ncf.AddData(savedict)
+
+           date=(week+rundat.dt/2)
+
+           savedict=ncf.StandardVar(varname='date')
+           savedict['values']=date2num(date)-dectime0
+           savedict['dims']=dimdate
+           savedict['count']=next
+           ncf.AddData(savedict)
+
+           sys.stdout.write('.')
+           sys.stdout.flush()
+         
+    ncf.close()
+    print ']'
+    return saveas
+
+def SaveWeeklyAvgTCData(rundat):
+    """ Function SaveEcoData saves weekly surface flux data to NetCDF files
+        
+        *** Inputs ***
+        rundat : a RunInfo object
+    """
+    from tm5tools import globarea
+    from tctools import xto_tc, Pto_tc
+    from numpy import dot
+    import pycdf as CDF
+
+    dirname='data_tc_weekly'
+
+    dirname=CreateDirs(rundat,dirname)
+
+    # Create NetCDF output file
+    #
+    saveas=os.path.join(rundat.outputdir,dirname,'tcfluxes.nc')
+    ncf=CT_CDF(saveas,'create')
+    dimdate=ncf.AddDateDim()
+    dimidateformat=ncf.AddDateDimFormat()
+    dimregs=ncf.AddRegionDim(rundat,type='tc')
+    dummy=ncf.AddRegionDefs(rundat,type='tc')
+    #
+    dectime0=date2num(datetime(2000,1,1))
+
+
+    area=globarea()
+    dt=rundat.dt
+
+    infile=os.path.join(rundat.outputdir,'data_eco_weekly','ecofluxes.nc')
+    if not os.path.exists(infile):
+        print "Needed input file (%s) does not exist yet, please create weekly eco flux files first, returning..."%infile
+        return None
+
+    ncf_in=CDF.CDF(infile)
+    vardict=ncf_in.variables()
+    for vname, vprop in vardict.iteritems():
+        data=array(ncf_in.var(vname)[:])
+        atts=ncf_in.var(vname).attributes()
+        if vname not in ['date','idate']:
+                if 'cov' in vname: 
+                    data=Pto_tc(data.shape[-1],data)
+                    dims=dimdate+dimregs+dimregs
+                else: 
+                    data=xto_tc(data.shape[-1],data)
+                    dims=dimdate+dimregs
+        elif vname=='date': dims=dimdate
+        elif vname=='idate': dims=dimdate+dimidateformat
+        else:
+             print 'Dataset with unknown dimensions encountered in file: %s'%vname
+
+        savedict=ncf.StandardVar(varname=vname)
+        savedict['values']=data.tolist()
+        savedict['dims']=dims
+        savedict['units']=atts['units']
+        savedict['count']=0
+        ncf.AddData(savedict,nsets=data.shape[0])
+         
+    ncf.close()
+
+    return saveas
+
+def SaveTCDataExt(rundat):
+    """ Function SaveTCDataExt saves surface flux data to NetCDF files for extended TransCom regions
+        
+        *** Inputs ***
+        rundat : a RunInfo object
+
+        *** Outputs ***
+        NetCDF file containing n-hourly global surface fluxes per TransCom region
+
+        *** Example ***
+        ./expand_savestate project=enkf_release sd=20000101 ed=20010101 """
+    from tctools import xtc
+    import pycdf as CDF
+
+    infile=os.path.join(rundat.outputdir,'data_tc_weekly','tcfluxes.nc')
+    if not os.path.exists(infile):
+        print "Needed input file (%s) does not exist yet, please create weekly tc flux files first, returning..."%infile
+        return None
+
+    # Create NetCDF output file
+    #
+    saveas=os.path.join(rundat.outputdir,'data_tc_weekly','tc_extfluxes.nc')
+    ncf=CT_CDF(saveas,'create')
+    dimdate=ncf.AddDateDim()
+    dimidateformat=ncf.AddDateDimFormat()
+    dimregs=ncf.AddRegionDim(rundat,type='tc_ext')
+    dummy=ncf.AddRegionDefs(rundat,type='tc_ext')
+
+
+    ncf_in=CDF.CDF(infile)
+    vardict=ncf_in.variables()
+    for vname, vprop in vardict.iteritems():
+        dims=()
+        data=array(ncf_in.var(vname)[:])
+        atts=ncf_in.var(vname).attributes()
+        if vname not in ['date','idate']:
+                if 'cov' in vname: 
+                    data=xtc(data[:,0:23,0:23],cov=True)
+                    dims=dimdate+dimregs+dimregs
+                else: 
+                    data=xtc(data[:,0:23])
+                    dims=dimdate+dimregs
+        elif vname=='date': dims=dimdate
+        elif vname=='idate': dims=dimdate+dimidateformat
+        else:
+             print 'Dataset with unknown dimensions encountered in file: %s'%vname
+
+        savedict=ncf.StandardVar(varname=vname)
+        savedict['units']=atts['units']
+        savedict['values']=data.tolist()
+        savedict['dims']=dims
+        ncf.AddData(savedict,nsets=data.shape[0])
+         
+    ncf.close()
+
+    return saveas
+
+def SaveEcoDataExt(rundat):
+    """ Function SaveEcoDataExt saves surface flux data to NetCDF files for extended ecoregions
+        
+        *** Inputs ***
+        rundat : a RunInfo object
+
+        *** Outputs ***
+        NetCDF file containing n-hourly global surface fluxes per TransCom region
+
+        *** Example ***
+        ./expand_savestate project=enkf_release sd=20000101 ed=20010101 """
+    from ecotools import xte
+    from tctools import LookUpName
+    import pycdf as CDF
+
+    infile=os.path.join(rundat.outputdir,'data_eco_weekly','ecofluxes.nc')
+    if not os.path.exists(infile):
+        print "Needed input file (%s) does not exist yet, please create weekly ecoflux files first, returning..."%infile
+        return None
+
+    # Create NetCDF output file
+    #
+    saveas=os.path.join(rundat.outputdir,'data_eco_weekly','eco_extfluxes.nc')
+    ncf=CT_CDF(saveas,'create')
+    dimdate=ncf.AddDateDim()
+    dimidateformat=ncf.AddDateDimFormat()
+    dimregs=ncf.AddRegionDim(rundat,type='eco_ext')
+    dummy=ncf.AddRegionDefs(rundat,type='eco_ext')
+
+
+    ncf_in=CDF.CDF(infile)
+    vardict=ncf_in.variables()
+    for vname, vprop in vardict.iteritems():
+        dims=()
+        data=array(ncf_in.var(vname)[:])
+        atts=ncf_in.var(vname).attributes()
+        if vname not in ['date','idate']:
+                if 'cov' in vname: 
+                    data=xte(data[:,0:rundat.n_land,0:rundat.n_land],cov=True)
+                    dims=dimdate+dimregs+dimregs
+                else: 
+                    data=xte(data[:,0:rundat.n_land])
+                    dims=dimdate+dimregs
+        elif vname=='date': dims=dimdate
+        elif vname=='idate': dims=dimdate+dimidateformat
+        else:
+             print 'Dataset with unknown dimensions encountered in file: %s'%vname
+
+        savedict=ncf.StandardVar(varname=vname)
+        savedict['units']=atts['units']
+        savedict['values']=data.tolist()
+        savedict['dims']=dims
+        ncf.AddData(savedict,nsets=data.shape[0])
+         
+    ncf.close()
+
+    return saveas
+
+def SaveTimeAvgData(rundat,infile,avg='monthly'):
+    """ Function saves time mean surface flux data to NetCDF files
+        
+        *** Inputs ***
+        rundat : a RunInfo object
+
+        *** Outputs ***
+        daily NetCDF file containing 1-hourly global surface fluxes at 1x1 degree
+
+        *** Example ***
+        ./expand_savestate project=enkf_release sd=20000101 ed=20010101 """
+
+    from tm5tools import monthly_avg, yearly_avg, longterm_avg, season_avg
+    import da.tools.io4 as io
+
+    dirname,filename=os.path.split(infile)
+    dirname=CreateDirs(rundat,dirname.replace('weekly',avg) )
+
+    dectime0=date2num(datetime(2000,1,1))
+
+# Create NetCDF output file
+#
+    saveas=os.path.join(dirname,filename)
+    ncf=io.CT_CDF(saveas,'create')
+    dimdate=ncf.AddDateDim()
+#
+# Open input file specified from the command line
+#
+    if not os.path.exists(infile):
+        print "Needed input file (%s) not found. Please create this first:"%infile
+        print "returning..."
+        return None 
+    else:
+        pass
+
+    file=io.CT_CDF(infile,'read')
+    datasets=file.variables.keys()
+    date=file.GetVariable('date')
+    globatts=file.ncattrs()
+
+    time=[datetime(2000,1,1)+timedelta(days=d) for d in date]
+
+#
+# Add global attributes, sorted 
+#
+
+    keys=globatts
+    keys.sort()
+    for k in keys:
+        setattr(ncf,k,k)
+
+# loop over datasets in infile, skip idate and date as we will make new time axis for the averaged data
+
+    for sds in datasets:
+        if sds in ['idate','date'] : continue
+        print '['
+
+# get original data
+
+        data=array(file.GetVariable(sds))
+        varatts=file.variables[sds].ncattrs()
+        vardims=file.variables[sds].dimensions
+#
+# Depending on dims of input dataset, create dims for output dataset. Note that we add the new dimdate now.
+#
+
+        dims=()
+        for d in vardims:
+            if 'latitude' in d: 
+                dimgrid=ncf.AddLatLonDim()
+                dims+=(dimgrid[0],)
+            if 'longitude' in d: 
+                dimgrid=ncf.AddLatLonDim()
+                dims+=(dimgrid[1],)
+            for type in ['eco','eco_ext','tc','tc_ext','olson']:
+                if 'regions_%s' % type == d: 
+                    dimregs=ncf.AddRegionDim(rundat,type)
+                    dims+=dimregs
+#
+# If the variable name from the infile is not defined in the standard, simply copy attributes from the old to the new sds
+# Do not copy over the grid information
+#
+
+        if ncf.StandardVar(sds)['name'] == 'unknown':
+
+                savedict=ncf.StandardVar(sds)
+                savedict['name']=sds
+                savedict['values']=data.tolist()
+                savedict['dims']=dims
+                savedict['units']=file.variables[sds].units
+                savedict['long_name']=file.variables[sds].long_name
+                savedict['comment']=file.variables[sds].comment
+                savedict['standard_name']=file.variables[sds].standard_name
+                savedict['count']=0
+                ncf.AddData(savedict)
+                sys.stdout.write('.')
+                sys.stdout.flush()
+
+        else:
+
+            if sds in ['latitude','longitude']: continue
+
+            if avg == 'monthly':
+                time_avg, data_avg=monthly_avg(time,data)
+            elif avg == 'seasonal':
+                time_avg, data_avg=season_avg(time,data)
+            elif avg == 'yearly':
+                time_avg, data_avg=yearly_avg(time,data)
+            elif avg == 'longterm':
+                time_avg, data_avg=longterm_avg(time,data)
+                time_avg=[time_avg]
+                data_avg=[data_avg]
+            else:
+                raise ValueError,'Averaging (%s) does not exist' % avg
+
+            count=-1
+            for dd,data in zip(time_avg,data_avg):
+                count=count+1
+
+                if not ncf.has_date(date2num(dd)-dectime0): 
+
+                    savedict=ncf.StandardVar('date')
+                    savedict['values']=date2num(dd)-dectime0
+                    savedict['dims']=dimdate
+                    savedict['count']=count
+                    ncf.AddData(savedict)
+
+                savedict=ncf.StandardVar(sds)
+                savedict['values']=data.tolist()
+                savedict['units']=file.variables[sds].units
+                if 'cov' in sds: 
+                    savedict['dims']=dimdate+dims+dims
+                else:
+                    savedict['dims']=dimdate+dims
+                savedict['count']=count
+                ncf.AddData(savedict)
+
+                sys.stdout.write('.')
+                sys.stdout.flush()
+        print ']'
+
+
+# end NetCDF file access
+    file.close()
+    ncf.close()
+
+if __name__ == "__main__":
+
+    sys.path.append('../../')
+
+    rundat=RunInfo(rcfile='../../da.rc')
+
+    no, yes, all = range(3)
+
+    proceed = no
+
+    print rundat
+    # first, parse results from the inverse part of the run 
+    if proceed != all:
+        proceed = proceed_dialog('Create average 1x1 flux data files? [y|yes, n|no, a|all|yes-to-all ]')
+    if proceed != no:
+        savedas=SaveWeeklyAvg1x1Data(rundat)
+        a=SaveTimeAvgData(rundat,savedas,avg='monthly')
+        a=SaveTimeAvgData(rundat,savedas,avg='yearly')
+        a=SaveTimeAvgData(rundat,savedas,avg='longterm')
+    if proceed != all:
+        proceed = proceed_dialog('Create average eco flux data files? [y|yes, n|no, a|all|yes-to-all] ')
+    if proceed != no:
+        savedas=SaveWeeklyAvgEcoData(rundat)
+        a=SaveTimeAvgData(rundat,savedas,avg='monthly')
+        a=SaveTimeAvgData(rundat,savedas,avg='seasonal')
+        a=SaveTimeAvgData(rundat,savedas,avg='yearly')
+        a=SaveTimeAvgData(rundat,savedas,avg='longterm')
+    if proceed != all:
+        proceed = proceed_dialog('Create average extended eco flux data files? [y|yes, n|no, a|all|yes-to-all] ')
+    if proceed != no:
+        savedas=SaveEcoDataExt(rundat)
+        a=SaveTimeAvgData(rundat,savedas,avg='monthly')
+        a=SaveTimeAvgData(rundat,savedas,avg='seasonal')
+        a=SaveTimeAvgData(rundat,savedas,avg='yearly')
+        a=SaveTimeAvgData(rundat,savedas,avg='longterm')
+    if proceed != all:
+        proceed = proceed_dialog('Create average tc flux data files? [y|yes, n|no, a|all|yes-to-all] ')
+    if proceed != no:
+        savedas=SaveWeeklyAvgTCData(rundat)
+        a=SaveTimeAvgData(rundat,savedas,avg='monthly')
+        a=SaveTimeAvgData(rundat,savedas,avg='seasonal')
+        a=SaveTimeAvgData(rundat,savedas,avg='yearly')
+        a=SaveTimeAvgData(rundat,savedas,avg='longterm')
+    if proceed != all:
+        proceed = proceed_dialog('Create average extended tc flux data files? [y|yes, n|no, a|all|yes-to-all] ')
+    if proceed != no:
+        savedas=SaveTCDataExt(rundat)
+        a=SaveTimeAvgData(rundat,savedas,avg='monthly')
+        a=SaveTimeAvgData(rundat,savedas,avg='seasonal')
+        a=SaveTimeAvgData(rundat,savedas,avg='yearly')
+        a=SaveTimeAvgData(rundat,savedas,avg='longterm')
+    sys.exit(0)
diff --git a/da/analysis/runinfo.py b/da/analysis/runinfo.py
new file mode 100755
index 00000000..16a323d0
--- /dev/null
+++ b/da/analysis/runinfo.py
@@ -0,0 +1,156 @@
+#!/usr/bin/env python
+# runinfo.py
+
+"""
+Author : peters 
+
+Revision History:
+File created on 01 Mar 2011.
+
+"""
+
+import mysettings 
+import commands
+import os
+import sys
+from datetime import datetime, timedelta
+
+
+class RunInfo(object):
+    """ 
+    This is a CarbonTracker  run info object
+    Initialize it with the following keywords:
+   
+    MANDATORY: 
+    project  = Name of project, should correspond to a directory [string]
+    sd       = start date of project output [datetime object or integer yyyymmdd]
+    ed       = end date of project output [datetime object or integer yyyymmdd]
+    OPTIONAL:
+    basedir  = base directory of model output on your machine [string, default='~/Modeling/download']
+    outputdir= output directory for figures and summary files [string, default='~/Modeling/SEATA']
+
+    Tip: change the std_rundat dictionary in module filtertools to set new defaults for all values!
+    """
+    def __init__(self,infodict={},rcfile=None):
+        """ Initialize from the name of an rcfile, or by passing the dictionary directly """
+        import da.tools.rc as rc
+        import da.tools.io4 as io
+        import copy
+
+
+        if rcfile:
+            infodict    = rc.read(rcfile)
+
+        self.basedir    = infodict['dir.da_run']
+        self.proj       = os.path.split(self.basedir)[-1]
+        self.projdir    = self.basedir
+
+        self.dir_scheme = mysettings.ct_dir_scheme
+        
+        self.inputdir   = os.path.join(self.basedir,'output')
+        self.outputdir  = os.path.join(self.basedir,'analysis')
+
+        if not os.path.exists(self.outputdir): 
+            os.makedirs(self.outputdir)
+            print "Creating new output directory "+self.outputdir
+ 
+        sd=infodict['time.start']
+        ed=infodict['time.finish']
+        dt=infodict['time.cycle']
+
+        self.dt = timedelta(days=int(dt))
+
+        if not isinstance(sd,datetime):
+            self.sd = datetime.strptime(sd,'%Y-%m-%d %H:%M:%S')
+        if not isinstance(ed,datetime):
+            self.ed = datetime.strptime(ed,'%Y-%m-%d %H:%M:%S')
+
+        dd             = copy.deepcopy(self.sd)
+        self.inputfiles= []
+        self.weeks     = []
+        self.inputdict = {}
+
+        while dd < self.ed:
+            filename = os.path.join(self.inputdir,'%s'%dd.strftime('%Y%m%d'),'savestate.nc')
+            if os.path.exists(filename):
+                self.inputfiles.append(filename)
+                self.weeks.append(dd)
+                self.inputdict[dd] = filename
+            else:
+                break
+                    
+            dd = dd+self.dt
+
+        self.ed     = dd
+        self.nweeks = len(self.weeks)
+
+        # run parameters from file
+        ncf             = io.CT_CDF(self.inputfiles[0],'read')
+        self.nlag       = len(ncf.dimensions['nlag'])
+        self.nmembers   = len(ncf.dimensions['nmembers'])
+        self.nparameters  = len(ncf.dimensions['nparameters'])
+        ncf.close()
+
+        self.namingscheme = 'wp_Mar2011'
+        filename          = os.path.join('NamingScheme.'+self.namingscheme+'.rc')
+
+        try: 
+            rcinfo=rc.read(filename)
+        except IOError: 
+            print '%s was specified as naming scheme, but no matching %s rc-file was found, exiting...'%(NamingScheme,filename,)
+            sys.exit(1)
+        except:
+            print 'Unknown error reading rc-file: %s, exiting...'%(filename,) 
+            sys.exit(1)
+
+
+        self.namedict=rcinfo
+    
+    def __str__(self):
+        return 'project              : '+self.projdir+\
+               '\nstart date           : '+str(self.sd)+\
+               '\nend date             : '+str(self.ed)+\
+               '\ndelta date           : '+str(self.dt)+\
+               '\nnweeks               : '+str(self.nweeks)+\
+               '\nnparameters          : '+str(self.nparameters)+\
+               '\nnmembers             : '+str(self.nmembers)+\
+               '\nnlag                 : '+str(self.nlag)+\
+               '\nDA output dir        : '+self.inputdir+\
+               '\nanalysis output dir  : '+self.outputdir+\
+               '\nnaming scheme        : '+self.namingscheme
+
+def get_rundat_settings(args):
+    """ create settings dict for rundat from scripts arguments """
+
+    settings=std_rundat.copy()  # start with defaults for rundat
+
+    for items in args:
+        items=items.lower()
+        k, v = items.split('=')
+        if settings.has_key(k):
+            if k in std_constructors:
+                settings[k] = std_constructors[k](v)
+            else:
+                settings[k] = v
+        else: raise IOError,'Parameter unknown:%s'%(v,)
+
+    return settings
+
+
+
+
+
+if __name__ == "__main__":
+    import getopt
+    import sys
+    from string import *
+
+    sys.path.append('../../')
+
+# Parse keywords
+
+    rundat=RunInfo(rcfile='../../da.rc')
+
+    print rundat
+
+    sys.exit(0)
-- 
GitLab