diff --git a/da/analysis/tools_transcom.py b/da/analysis/tools_transcom.py
index 3c8a31eaea4198efd581077239b74e9321b6d9f5..22ff2e5ce969af118e287a89f3df02f1a63e1fd1 100755
--- a/da/analysis/tools_transcom.py
+++ b/da/analysis/tools_transcom.py
@@ -12,15 +12,18 @@ import da.tools.io4 as io
 
 # Get masks of different region definitions
 
-matrix_file = os.path.join(analysisdir, 'regions.nc')
+matrix_file = os.path.join(analysisdir, 'copied_regions.nc')
 cdf_temp = io.CT_CDF(matrix_file, 'read')
 transcommask = cdf_temp.get_variable('transcom_regions')
+if transcommask.max() < 23:
+    if 'transcom_regions_original' in cdf_temp.variables:
+        transcommask = cdf_temp.get_variable('transcom_regions_original')
 olson240mask = cdf_temp.get_variable('regions')
 olsonmask = cdf_temp.get_variable('land_ecosystems')
 oifmask = cdf_temp.get_variable('ocean_regions')
 dummy = cdf_temp.close()
 
-matrix_file = os.path.join(analysisdir, 'olson_extended.nc')
+matrix_file = os.path.join(analysisdir, 'copied_regions_extended.nc')
 cdf_temp = io.CT_CDF(matrix_file, 'read')
 olson_ext_mask = cdf_temp.get_variable('regions')
 dummy = cdf_temp.close()
@@ -56,7 +59,7 @@ for line in temp:
         olsonshort.append(abbr)
 
 olsonextnams = []
-matrix_file = os.path.join(analysisdir, 'olson_extended.nc')
+matrix_file = os.path.join(analysisdir, 'copied_regions_extended.nc')
 cdf_temp = io.CT_CDF(matrix_file, 'read')
 keys = cdf_temp.ncattrs()
 keys.sort()
diff --git a/da/rc/carbontracker_sibcasa_regions.rc b/da/rc/carbontracker_sibcasa_regions.rc
new file mode 100644
index 0000000000000000000000000000000000000000..cb906e7f49ae06d5e247393cd23ab8e49d39f3ac
--- /dev/null
+++ b/da/rc/carbontracker_sibcasa_regions.rc
@@ -0,0 +1,27 @@
+!!! Info for the CarbonTracker data assimilation system
+
+datadir         : /Volumes/DataRaid/CO2/carbontracker/input/ctdas_2012/ 
+
+! For ObsPack
+obspack.input.dir  : ${datadir}/obspacks/${obspack.input.id}
+obspack.input.id   : obspack_co2_1_PROTOTYPE_v1.0.3_2013-01-29
+
+ocn.covariance  : ${datadir}/oceans/oif/cov_ocean.2000.01.nc 
+deltaco2.prefix : oif_p3_era40.dpco2
+bio.covariance  : ${datadir}/covariances/sibcasa/cov_sibcasaregion_001_143.nc
+regtype         : olson19_oif30
+nparameters     : 174 
+random.seed     : 4385
+random.seed.init: ${datadir}/randomseedinit.pickle
+regionsfile     : ${datadir}/regions_sibcasa.nc
+extendedregionsfile: ${datadir}/sibcasa_extended.nc
+
+! Include a naming scheme for the variables
+
+#include NamingScheme.wp_Mar2011.rc 
+
+! Info on the sites file used
+
+! For ObsPack
+obs.sites.rc        : ${obspack.input.dir}/summary/sites_weights_geocarbon_July2013.rc
+
diff --git a/da/rc/carbontracker_sibcasa_sam_koppen_regions.rc b/da/rc/carbontracker_sibcasa_sam_koppen_regions.rc
new file mode 100644
index 0000000000000000000000000000000000000000..e9e88c1d76ea7030808c2cf87fba7560aebae089
--- /dev/null
+++ b/da/rc/carbontracker_sibcasa_sam_koppen_regions.rc
@@ -0,0 +1,27 @@
+!!! Info for the CarbonTracker data assimilation system
+
+datadir         : /Volumes/DataRaid/CO2/carbontracker/input/ctdas_2012/ 
+
+! For ObsPack
+obspack.input.dir  : ${datadir}/obspacks/${obspack.input.id}
+obspack.input.id   : obspack_co2_1_PROTOTYPE_v1.0.3_2013-01-29
+
+ocn.covariance  : ${datadir}/oceans/oif/cov_ocean.2000.01.nc 
+deltaco2.prefix : oif_p3_era40.dpco2
+bio.covariance  : ${datadir}/covariances/sibcasa/cov_sibcasa_koppen_sam_region_001_160.nc
+regtype         : olson19_oif30
+nparameters     : 191 
+random.seed     : 4385
+random.seed.init: ${datadir}/randomseedinit.pickle
+regionsfile     : ${datadir}/regions_sibcasa_koppen_sam.nc
+extendedregionsfile: ${datadir}/sibcasa_koppen_sam_extended.nc
+
+! Include a naming scheme for the variables
+
+#include NamingScheme.wp_Mar2011.rc 
+
+! Info on the sites file used
+
+! For ObsPack
+obs.sites.rc        : ${obspack.input.dir}/summary/sites_weights_geocarbon_July2013.rc
+
diff --git a/da/tm5/observationoperator.py b/da/tm5/observationoperator.py
index f57c651fd29a6c768df8b2ddd70d5912ad9463bd..2a454077db2d3c1800eb26e1d3b48294c3fc54e9 100755
--- a/da/tm5/observationoperator.py
+++ b/da/tm5/observationoperator.py
@@ -27,6 +27,7 @@ import shutil
 import datetime
 import subprocess
 from string import join
+import glob
 sys.path.append(os.getcwd())
 sys.path.append("../../")
 
@@ -135,12 +136,12 @@ class TM5ObservationOperator(ObservationOperator):
             #Use a TM5 restart file in the first cycle (instead of init file). Used now for the CO project.
             if self.dacycle.has_key('da.obsoperator.restartfileinfirstcycle'):
                 restartfilename = self.dacycle['da.obsoperator.restartfileinfirstcycle']
-                sourcedir = self.dacycle['dir.exec']
                 targetdir = self.tm_settings[self.savedirkey]
                 create_dirs(targetdir)
-                sourcefile = os.path.join(sourcedir,restartfilename)
-                shutil.copy(sourcefile, sourcefile.replace(sourcedir, targetdir))
-                logging.debug('Copied TM5 restart file to TM5 restart directory for first cycle: %s'%sourcefile)
+                for file in glob.glob(restartfilename):
+                    fname = os.path.split(file)[1]
+                    logging.debug('Copied TM5 restart file to TM5 restart directory for first cycle: %s'%fname)
+                    shutil.copy(file,os.path.join(targetdir,fname))
 
             # Replace the rc filename for TM5 with the newly created one in the new run directory
 
diff --git a/da/tools/initexit.py b/da/tools/initexit.py
index 40896c779169c4031aa9f5c03ee2ba9e358407a6..89272de779c3331692108c469fe8687acbdef188 100755
--- a/da/tools/initexit.py
+++ b/da/tools/initexit.py
@@ -57,6 +57,7 @@ Other functions in the module initexit that are related to the control of a DA c
 import logging
 import os
 import sys
+import glob
 import shutil
 import copy
 import getopt
@@ -319,6 +320,16 @@ class CycleControl(dict):
 
             strippedname = os.path.split(self['jobrcfilename'])[-1]
             self['jobrcfilename'] = os.path.join(self['dir.exec'], strippedname)
+            shutil.copy(os.path.join(self.dasystem['regionsfile']),os.path.join(self['dir.exec'],'da','analysis','copied_regions.nc'))
+            logging.info('Copied regions file to the analysis directory: %s'%os.path.join(self.dasystem['regionsfile'])) 
+            shutil.copy(os.path.join(self.dasystem['extendedregionsfile']),os.path.join(self['dir.exec'],'da','analysis','copied_regions_extended.nc')) 
+            logging.info('Copied extended regions file to the analysis directory: %s'%os.path.join(self.dasystem['extendedregionsfile'])) 
+            for filename in glob.glob(os.path.join(self['dir.exec'],'da','analysis','*.pickle')):
+                logging.info('Deleting pickle file %s to make sure the correct regions are used'%os.path.split(filename)[1])
+                os.remove(filename) 
+            for filename in glob.glob(os.path.join(self['dir.exec'],'*.pickle')):
+                logging.info('Deleting pickle file %s to make sure the correct regions are used'%os.path.split(filename)[1])
+                os.remove(filename) 
             if self.has_key('random.seed.init'):
                 self.read_random_seed(True)
 
@@ -539,6 +550,7 @@ class CycleControl(dict):
                 nextrestartfilename = self['da.restart.fname'].replace(jobid,nextjobid)
                 nextlogfilename = logfile.replace(jobid,nextjobid)
             	template += '\nexport icycle_in_job=%d\npython %s rc=%s %s >&%s\n' % (cycle+1,execcommand, nextrestartfilename, join(self.opts, ''), nextlogfilename,) 
+                #template += '\nexport icycle_in_job=%d\npython %s rc=%s %s >&%s &\n' % (cycle+1,execcommand, nextrestartfilename, join(self.opts, ''), nextlogfilename,)
 
             # write and submit 
             self.daplatform.write_job(jobfile, template, jobid)
diff --git a/template.jb b/template.jb
index 1005ba97e5d1631e5daad3f7e0351795736a3d24..aa9266e00bbab7b164e0df8608f3185604997ba9 100755
--- a/template.jb
+++ b/template.jb
@@ -1,10 +1,14 @@
-#! /bin/env bash
-#SBATCH -p normal
-#SBATCH -t 12:00:00 
+#!/bin/sh
+#$ das.py
+#$ co2
+#$ nserial 1
+#$ 06:30:00
+#$ /bin/sh
 
-echo ########################
+echo "All output piped to file template.log"
+source /usr/local/Modules/3.2.8/init/sh
+source /opt/intel/bin/ifortvars.sh intel64
+export HOST='capegrim'
 module load python
-module load nco
-
 export icycle_in_job=999
-python template.py rc=template.rc $1 >& template.log &
+python template.py rc=template.rc -v $1 >& template.log &
diff --git a/template.py b/template.py
index 40c339ba40a57d977b3eb8ee0b6622dd0ff015f3..2359b4faa659e7e997cbab48cacb07ca93ab5481 100755
--- a/template.py
+++ b/template.py
@@ -51,6 +51,7 @@ obsoperator = TM5ObservationOperator(dacycle['da.obsoperator.rc'])
 samples = ObsPackObservations()
 #samples     = CtObservations()
 statevector = CO2GriddedStateVector()
+#statevector = CO2StateVector()
 optimizer = CO2Optimizer()
 
 ##########################################################################################
@@ -68,7 +69,7 @@ ensemble_smoother_pipeline(dacycle, platform, dasystem, samples, statevector, ob
 ################### All done, extra stuff can be added next, such as analysis
 ##########################################################################################
 
-analysis_pipeline(dacycle, platform, dasystem, samples, statevector, obsoperator )
+analysis_pipeline(dacycle, platform, dasystem, samples, statevector )
 
 archive_pipeline(dacycle, platform, dasystem)