diff --git a/da/analysis/expand_fluxes.py b/da/analysis/expand_fluxes.py
index 08a960b3a0c30fd00bef219dfc75f9182156bbbb..4eb200ccbbab0233cfe8d2d5104e72e0b0392b22 100755
--- a/da/analysis/expand_fluxes.py
+++ b/da/analysis/expand_fluxes.py
@@ -547,7 +547,7 @@ def SaveWeeklyAvgTCData(DaCycle, StateVector):
 
     return saveas
 
-def SaveTCDataExt(rundat):
+def SaveWeeklyAvgExtTCData(DaCycle):
     """ Function SaveTCDataExt saves surface flux data to NetCDF files for extended TransCom regions
         
         *** Inputs ***
@@ -558,57 +558,112 @@ def SaveTCDataExt(rundat):
 
         *** Example ***
         ./expand_savestate project=enkf_release sd=20000101 ed=20010101 """
-    from da.analysis.tools_transcom import StateCovToGrid, transcommask, ExtendedTCRegions
+
+    from da.analysis.tools_transcom import ExtendedTCRegions
     import da.tools.io4 as io
+#
+    dirname     = 'data_tc_weekly'
+#
+    dirname     = CreateDirs(os.path.join(DaCycle['dir.analysis'],dirname))
+#
+# Some help variables
+#
+    dectime0    = date2num(datetime(2000,1,1))
+    dt          = DaCycle['cyclelength']
+    startdate   = DaCycle['time.start'] 
+    enddate     = DaCycle['time.end'] 
+    ncfdate     = date2num(startdate)-dectime0+dt.days/2.0
 
-    infile=os.path.join(rundat.outputdir,'data_tc_weekly','tcfluxes.nc')
-    if not os.path.exists(infile):
-        print "Needed input file (%s) does not exist yet, please create weekly tc flux files first, returning..."%infile
-        return None
+    msg = "DA Cycle start date is %s"   % startdate.strftime('%Y-%m-%d %H:%M')      ; logging.debug(msg)
+    msg = "DA Cycle end   date is %s"   % enddate.strftime('%Y-%m-%d %H:%M')        ; logging.debug(msg)
 
-    # Create NetCDF output file
+    # Write/Create NetCDF output file
     #
-    saveas          = os.path.join(rundat.outputdir,'data_tc_weekly','tc_extfluxes.nc')
-    ncf             = io.CT_CDF(saveas,'create')
+    saveas          = os.path.join(dirname,'tc_extfluxes.nc')
+    ncf             = io.CT_CDF(saveas,'write')
     dimdate         = ncf.AddDateDim()
     dimidateformat  = ncf.AddDateDimFormat()
     dimregs         = ncf.AddRegionDim(type='tc_ext')
+#
+# set title and tell GMT that we are using "pixel registration"
+#
+    setattr(ncf,'Title','CarbonTracker TransCom fluxes')
+    setattr(ncf,'node_offset',1)
+    #
 
+    skip             = ncf.has_date(ncfdate)
+    if skip:
+        msg = 'Skipping writing of data for date %s : already present in file %s' % (startdate.strftime('%Y-%m-%d'),saveas) ; logging.warning(msg)
+    else:
+        infile=os.path.join(DaCycle['dir.analysis'],'data_tc_weekly','tcfluxes.nc')
+        if not os.path.exists(infile):
+            msg="Needed input file (%s) does not exist yet, please create file first, returning..."%infile ; logging.error(msg)
+            return None
 
-    ncf_in      = io.CT_Read(infile,'read')
-    vardict     = ncf_in.variables
-    for vname, vprop in vardict.iteritems():
+        ncf_in      = io.CT_Read(infile,'read')
 
-        data    = ncf_in.GetVariable(vname)[:]
+        # Transform data one by one
 
-        if vname=='latitude': continue
-        elif vname=='longitude': continue
-        elif vname=='date': dims=dimdate
-        elif vname=='idate': dims=dimdate+dimidateformat
+        # Get the date variable, and find index corresponding to the DaCycle date
 
-        if vname not in ['date','idate']:
+        try:
+            dates       = ncf_in.variables['date'][:]
+        except KeyError:
+            msg         = "The variable date cannot be found in the requested input file (%s) " % infile ; logging.error(msg)
+            msg         = "Please make sure you create gridded fluxes before making extended TC fluxes " ; logging.error(msg)
+            raise KeyError
 
-                if 'cov' in vname: 
-                    alldata=[] 
-                    for dd in data:
-                        dd=StateCovToGrid(dd*area,transcommask,reverse=True)
-                        alldata.append(dd)
-                    data=array(alldata) 
-                    dims=dimdate+dimregs+dimregs
-                else:
-                    data=ExtendedTCRegions(data)
-                    dims=dimdate+dimregs
+        try:
+            index       = dates.tolist().index(ncfdate)
+        except ValueError:
+            msg         = "The requested cycle date is not yet available in file %s "% infile ; logging.error(msg)
+            msg         = "Please make sure you create state based fluxes before making extended TC fluxes " ; logging.error(msg)
+            raise ValueError
 
-        print vname,data.shape
-        savedict            = ncf.StandardVar(varname=vname)
-        savedict['values']  = data.tolist()
-        savedict['dims']    = dims
-        savedict['units']   = 'mol/region/s'
-        savedict['count']   = 0
-        ncf.AddData(savedict,nsets=data.shape[0])
-         
+        # First add the date for this cycle to the file, this grows the unlimited dimension
+
+        savedict            = ncf.StandardVar(varname='date')
+        savedict['values']  = ncfdate
+        savedict['dims']    = dimdate
+        savedict['count']   = index
+        dummy               = ncf.AddData(savedict)
+
+        # Now convert other variables that were inside the tcfluxes.nc file
+
+        vardict     = ncf_in.variables
+        for vname, vprop in vardict.iteritems():
+
+            data    = ncf_in.GetVariable(vname)[index]
+
+            if vname=='latitude': continue
+            elif vname=='longitude': continue
+            elif vname=='date': continue
+            elif vname=='idate': continue
+            elif 'cov' in vname:
+              
+                tcdata              = ExtendedTCRegions(data,cov=True)
+
+                savedict            = ncf.StandardVar(varname=vname)
+                savedict['units']   = '[mol/region/s]**2'
+                savedict['dims']    = dimdate+dimregs+dimregs
+                
+            else:
+
+                tcdata              = ExtendedTCRegions(data,cov=False)
+
+                savedict            = ncf.StandardVar(varname=vname)
+                savedict['dims']    = dimdate+dimregs
+                savedict['units']   = 'mol/region/s'
+
+            savedict['count']   = index
+            savedict['values']  = tcdata
+            ncf.AddData(savedict)
+
+        ncf_in.close()
     ncf.close()
 
+    msg="TransCom weekly average extended fluxes now written" ; logging.info(msg)
+
     return saveas
 
 def SaveEcoDataExt(rundat):
@@ -851,6 +906,7 @@ if __name__ == "__main__":
             savedas=SaveWeeklyAvg1x1Data(DaCycle, StateVector)
             savedas=SaveWeeklyAvgStateData(DaCycle, StateVector)
             savedas=SaveWeeklyAvgTCData(DaCycle, StateVector)
+            savedas=SaveWeeklyAvgExtTCData(DaCycle)
 
             DaCycle.AdvanceCycleTimes()
 
diff --git a/da/analysis/tools_transcom.py b/da/analysis/tools_transcom.py
index d8ff9b6f9f26da51751428b31dae88ae813f30d6..b6d8b85d7c2c73953e5667af3821dd1b017b9c96 100755
--- a/da/analysis/tools_transcom.py
+++ b/da/analysis/tools_transcom.py
@@ -274,8 +274,7 @@ def StateCovToTranscom(runinfo,p):
 
 def ExtendedTCRegions(data,cov=False):
     """ convert to extended transcom shaped regions"""
-    import da.tools.io4 as io 
-    from numpy import dot
+    from numpy import dot, transpose
 
     nparams     = data.shape[-1]
     if nparams != 23 :
@@ -285,7 +284,7 @@ def ExtendedTCRegions(data,cov=False):
     if not cov:
         return dot(array(data).squeeze(),M)
     else:
-        return transpose(dot(transpose(M),dot(array(data).squeeze(),M)),(1,0,2))
+        return M.transpose().dot(data).dot(M)
 
 def cov2corr(A):
     b=1./sqrt(A.diagonal())