From 58818fde99e1eb357a8a80e5b8e2bf27f9d84e61 Mon Sep 17 00:00:00 2001
From: Ingrid Luijx <ingrid.vanderlaan@wur.nl>
Date: Sat, 1 Mar 2014 20:12:13 +0000
Subject: [PATCH] Changes for South America runs: regions files in analysis
 directory are copied from the one specified in the system.rc file. Region
 pickle files are deleted in the analysis directory, to make sure the correct
 one is used. Option to use covariances for less than 23 transcom regions, but
 still use 23 for the analysis (does not work in tcfluxes.nc). Update in
 observation_operator.py to use restart file in first cycle also for more than
 one resolution (i.e. incl. zoom regions).

---
 da/analysis/tools_transcom.py                 |  9 ++++---
 da/rc/carbontracker_sibcasa_regions.rc        | 27 +++++++++++++++++++
 ...arbontracker_sibcasa_sam_koppen_regions.rc | 27 +++++++++++++++++++
 da/tm5/observationoperator.py                 |  9 ++++---
 da/tools/initexit.py                          | 12 +++++++++
 template.jb                                   | 18 ++++++++-----
 template.py                                   |  3 ++-
 7 files changed, 90 insertions(+), 15 deletions(-)
 create mode 100644 da/rc/carbontracker_sibcasa_regions.rc
 create mode 100644 da/rc/carbontracker_sibcasa_sam_koppen_regions.rc

diff --git a/da/analysis/tools_transcom.py b/da/analysis/tools_transcom.py
index 3c8a31ea..22ff2e5c 100755
--- a/da/analysis/tools_transcom.py
+++ b/da/analysis/tools_transcom.py
@@ -12,15 +12,18 @@ import da.tools.io4 as io
 
 # Get masks of different region definitions
 
-matrix_file = os.path.join(analysisdir, 'regions.nc')
+matrix_file = os.path.join(analysisdir, 'copied_regions.nc')
 cdf_temp = io.CT_CDF(matrix_file, 'read')
 transcommask = cdf_temp.get_variable('transcom_regions')
+if transcommask.max() < 23:
+    if 'transcom_regions_original' in cdf_temp.variables:
+        transcommask = cdf_temp.get_variable('transcom_regions_original')
 olson240mask = cdf_temp.get_variable('regions')
 olsonmask = cdf_temp.get_variable('land_ecosystems')
 oifmask = cdf_temp.get_variable('ocean_regions')
 dummy = cdf_temp.close()
 
-matrix_file = os.path.join(analysisdir, 'olson_extended.nc')
+matrix_file = os.path.join(analysisdir, 'copied_regions_extended.nc')
 cdf_temp = io.CT_CDF(matrix_file, 'read')
 olson_ext_mask = cdf_temp.get_variable('regions')
 dummy = cdf_temp.close()
@@ -56,7 +59,7 @@ for line in temp:
         olsonshort.append(abbr)
 
 olsonextnams = []
-matrix_file = os.path.join(analysisdir, 'olson_extended.nc')
+matrix_file = os.path.join(analysisdir, 'copied_regions_extended.nc')
 cdf_temp = io.CT_CDF(matrix_file, 'read')
 keys = cdf_temp.ncattrs()
 keys.sort()
diff --git a/da/rc/carbontracker_sibcasa_regions.rc b/da/rc/carbontracker_sibcasa_regions.rc
new file mode 100644
index 00000000..cb906e7f
--- /dev/null
+++ b/da/rc/carbontracker_sibcasa_regions.rc
@@ -0,0 +1,27 @@
+!!! Info for the CarbonTracker data assimilation system
+
+datadir         : /Volumes/DataRaid/CO2/carbontracker/input/ctdas_2012/ 
+
+! For ObsPack
+obspack.input.dir  : ${datadir}/obspacks/${obspack.input.id}
+obspack.input.id   : obspack_co2_1_PROTOTYPE_v1.0.3_2013-01-29
+
+ocn.covariance  : ${datadir}/oceans/oif/cov_ocean.2000.01.nc 
+deltaco2.prefix : oif_p3_era40.dpco2
+bio.covariance  : ${datadir}/covariances/sibcasa/cov_sibcasaregion_001_143.nc
+regtype         : olson19_oif30
+nparameters     : 174 
+random.seed     : 4385
+random.seed.init: ${datadir}/randomseedinit.pickle
+regionsfile     : ${datadir}/regions_sibcasa.nc
+extendedregionsfile: ${datadir}/sibcasa_extended.nc
+
+! Include a naming scheme for the variables
+
+#include NamingScheme.wp_Mar2011.rc 
+
+! Info on the sites file used
+
+! For ObsPack
+obs.sites.rc        : ${obspack.input.dir}/summary/sites_weights_geocarbon_July2013.rc
+
diff --git a/da/rc/carbontracker_sibcasa_sam_koppen_regions.rc b/da/rc/carbontracker_sibcasa_sam_koppen_regions.rc
new file mode 100644
index 00000000..e9e88c1d
--- /dev/null
+++ b/da/rc/carbontracker_sibcasa_sam_koppen_regions.rc
@@ -0,0 +1,27 @@
+!!! Info for the CarbonTracker data assimilation system
+
+datadir         : /Volumes/DataRaid/CO2/carbontracker/input/ctdas_2012/ 
+
+! For ObsPack
+obspack.input.dir  : ${datadir}/obspacks/${obspack.input.id}
+obspack.input.id   : obspack_co2_1_PROTOTYPE_v1.0.3_2013-01-29
+
+ocn.covariance  : ${datadir}/oceans/oif/cov_ocean.2000.01.nc 
+deltaco2.prefix : oif_p3_era40.dpco2
+bio.covariance  : ${datadir}/covariances/sibcasa/cov_sibcasa_koppen_sam_region_001_160.nc
+regtype         : olson19_oif30
+nparameters     : 191 
+random.seed     : 4385
+random.seed.init: ${datadir}/randomseedinit.pickle
+regionsfile     : ${datadir}/regions_sibcasa_koppen_sam.nc
+extendedregionsfile: ${datadir}/sibcasa_koppen_sam_extended.nc
+
+! Include a naming scheme for the variables
+
+#include NamingScheme.wp_Mar2011.rc 
+
+! Info on the sites file used
+
+! For ObsPack
+obs.sites.rc        : ${obspack.input.dir}/summary/sites_weights_geocarbon_July2013.rc
+
diff --git a/da/tm5/observationoperator.py b/da/tm5/observationoperator.py
index f57c651f..2a454077 100755
--- a/da/tm5/observationoperator.py
+++ b/da/tm5/observationoperator.py
@@ -27,6 +27,7 @@ import shutil
 import datetime
 import subprocess
 from string import join
+import glob
 sys.path.append(os.getcwd())
 sys.path.append("../../")
 
@@ -135,12 +136,12 @@ class TM5ObservationOperator(ObservationOperator):
             #Use a TM5 restart file in the first cycle (instead of init file). Used now for the CO project.
             if self.dacycle.has_key('da.obsoperator.restartfileinfirstcycle'):
                 restartfilename = self.dacycle['da.obsoperator.restartfileinfirstcycle']
-                sourcedir = self.dacycle['dir.exec']
                 targetdir = self.tm_settings[self.savedirkey]
                 create_dirs(targetdir)
-                sourcefile = os.path.join(sourcedir,restartfilename)
-                shutil.copy(sourcefile, sourcefile.replace(sourcedir, targetdir))
-                logging.debug('Copied TM5 restart file to TM5 restart directory for first cycle: %s'%sourcefile)
+                for file in glob.glob(restartfilename):
+                    fname = os.path.split(file)[1]
+                    logging.debug('Copied TM5 restart file to TM5 restart directory for first cycle: %s'%fname)
+                    shutil.copy(file,os.path.join(targetdir,fname))
 
             # Replace the rc filename for TM5 with the newly created one in the new run directory
 
diff --git a/da/tools/initexit.py b/da/tools/initexit.py
index 40896c77..89272de7 100755
--- a/da/tools/initexit.py
+++ b/da/tools/initexit.py
@@ -57,6 +57,7 @@ Other functions in the module initexit that are related to the control of a DA c
 import logging
 import os
 import sys
+import glob
 import shutil
 import copy
 import getopt
@@ -319,6 +320,16 @@ class CycleControl(dict):
 
             strippedname = os.path.split(self['jobrcfilename'])[-1]
             self['jobrcfilename'] = os.path.join(self['dir.exec'], strippedname)
+            shutil.copy(os.path.join(self.dasystem['regionsfile']),os.path.join(self['dir.exec'],'da','analysis','copied_regions.nc'))
+            logging.info('Copied regions file to the analysis directory: %s'%os.path.join(self.dasystem['regionsfile'])) 
+            shutil.copy(os.path.join(self.dasystem['extendedregionsfile']),os.path.join(self['dir.exec'],'da','analysis','copied_regions_extended.nc')) 
+            logging.info('Copied extended regions file to the analysis directory: %s'%os.path.join(self.dasystem['extendedregionsfile'])) 
+            for filename in glob.glob(os.path.join(self['dir.exec'],'da','analysis','*.pickle')):
+                logging.info('Deleting pickle file %s to make sure the correct regions are used'%os.path.split(filename)[1])
+                os.remove(filename) 
+            for filename in glob.glob(os.path.join(self['dir.exec'],'*.pickle')):
+                logging.info('Deleting pickle file %s to make sure the correct regions are used'%os.path.split(filename)[1])
+                os.remove(filename) 
             if self.has_key('random.seed.init'):
                 self.read_random_seed(True)
 
@@ -539,6 +550,7 @@ class CycleControl(dict):
                 nextrestartfilename = self['da.restart.fname'].replace(jobid,nextjobid)
                 nextlogfilename = logfile.replace(jobid,nextjobid)
             	template += '\nexport icycle_in_job=%d\npython %s rc=%s %s >&%s\n' % (cycle+1,execcommand, nextrestartfilename, join(self.opts, ''), nextlogfilename,) 
+                #template += '\nexport icycle_in_job=%d\npython %s rc=%s %s >&%s &\n' % (cycle+1,execcommand, nextrestartfilename, join(self.opts, ''), nextlogfilename,)
 
             # write and submit 
             self.daplatform.write_job(jobfile, template, jobid)
diff --git a/template.jb b/template.jb
index 1005ba97..aa9266e0 100755
--- a/template.jb
+++ b/template.jb
@@ -1,10 +1,14 @@
-#! /bin/env bash
-#SBATCH -p normal
-#SBATCH -t 12:00:00 
+#!/bin/sh
+#$ das.py
+#$ co2
+#$ nserial 1
+#$ 06:30:00
+#$ /bin/sh
 
-echo ########################
+echo "All output piped to file template.log"
+source /usr/local/Modules/3.2.8/init/sh
+source /opt/intel/bin/ifortvars.sh intel64
+export HOST='capegrim'
 module load python
-module load nco
-
 export icycle_in_job=999
-python template.py rc=template.rc $1 >& template.log &
+python template.py rc=template.rc -v $1 >& template.log &
diff --git a/template.py b/template.py
index 40c339ba..2359b4fa 100755
--- a/template.py
+++ b/template.py
@@ -51,6 +51,7 @@ obsoperator = TM5ObservationOperator(dacycle['da.obsoperator.rc'])
 samples = ObsPackObservations()
 #samples     = CtObservations()
 statevector = CO2GriddedStateVector()
+#statevector = CO2StateVector()
 optimizer = CO2Optimizer()
 
 ##########################################################################################
@@ -68,7 +69,7 @@ ensemble_smoother_pipeline(dacycle, platform, dasystem, samples, statevector, ob
 ################### All done, extra stuff can be added next, such as analysis
 ##########################################################################################
 
-analysis_pipeline(dacycle, platform, dasystem, samples, statevector, obsoperator )
+analysis_pipeline(dacycle, platform, dasystem, samples, statevector )
 
 archive_pipeline(dacycle, platform, dasystem)
 
-- 
GitLab