Commit 7e83a098 authored by brunner's avatar brunner
Browse files

No commit message

No commit message
parent 469624cd
...@@ -110,7 +110,7 @@ def write_mole_fractions(dacycle): ...@@ -110,7 +110,7 @@ def write_mole_fractions(dacycle):
fc_simulated = ncf_fc_in.get_variable('modelsamplesmean_prior') fc_simulated = ncf_fc_in.get_variable('modelsamplesmean_prior')
fc_simulated_ens = ncf_fc_in.get_variable('modelsamplesdeviations_prior') fc_simulated_ens = ncf_fc_in.get_variable('modelsamplesdeviations_prior')
fc_flag = ncf_fc_in.get_variable('flag') fc_flag = ncf_fc_in.get_variable('flag')
if not dacycle.dasystem.has_key('opt.algorithm'): if 'modeldatamismatchvariance' not in dacycle.dasystem:
fc_r = ncf_fc_in.get_variable('modeldatamismatchvariance') fc_r = ncf_fc_in.get_variable('modeldatamismatchvariance')
fc_hphtr = ncf_fc_in.get_variable('totalmolefractionvariance') fc_hphtr = ncf_fc_in.get_variable('totalmolefractionvariance')
elif dacycle.dasystem['opt.algorithm'] == 'serial': elif dacycle.dasystem['opt.algorithm'] == 'serial':
...@@ -172,10 +172,10 @@ def write_mole_fractions(dacycle): ...@@ -172,10 +172,10 @@ def write_mole_fractions(dacycle):
# get nobs dimension # get nobs dimension
if ncf_out.dimensions.has_key('id'): if 'id' in ncf_out.dimensions:
dimidob = ncf_out.dimensions['id'] dimidob = ncf_out.dimensions['id']
dimid = ('id',) dimid = ('id',)
elif ncf_out.dimensions.has_key('obs'): elif 'obs' in ncf_out.dimensions:
dimidob = ncf_out.dimensions['obs'] dimidob = ncf_out.dimensions['obs']
dimid = ('obs',) dimid = ('obs',)
...@@ -273,11 +273,11 @@ def write_mole_fractions(dacycle): ...@@ -273,11 +273,11 @@ def write_mole_fractions(dacycle):
# Get existing file obs_nums to determine match to local obs_nums # Get existing file obs_nums to determine match to local obs_nums
if ncf_out.variables.has_key('merge_num'): if 'merge_num' in ncf_out.variables:
file_obs_nums = ncf_out.get_variable('merge_num') file_obs_nums = ncf_out.get_variable('merge_num')
elif ncf_out.variables.has_key('obspack_num'): elif 'obspack_num' in ncf_out.variables:
file_obs_nums = ncf_out.get_variable('obspack_num') file_obs_nums = ncf_out.get_variable('obspack_num')
elif ncf_out.variables.has_key('id'): elif 'id' in ncf_out.variables:
file_obs_nums = ncf_out.get_variable('id') file_obs_nums = ncf_out.get_variable('id')
# Get all obs_nums related to this file, determine their indices in the local arrays # Get all obs_nums related to this file, determine their indices in the local arrays
......
...@@ -70,7 +70,7 @@ class DaSystem(dict): ...@@ -70,7 +70,7 @@ class DaSystem(dict):
""" """
This method loads a DA System Info rc-file with settings for this simulation This method loads a DA System Info rc-file with settings for this simulation
""" """
for k, v in rc.read(rcfilename).iteritems(): for k, v in rc.read(rcfilename).items():
self[k] = v self[k] = v
logging.debug("DA System Info rc-file (%s) loaded successfully" % rcfilename) logging.debug("DA System Info rc-file (%s) loaded successfully" % rcfilename)
...@@ -82,17 +82,17 @@ class DaSystem(dict): ...@@ -82,17 +82,17 @@ class DaSystem(dict):
""" """
needed_rc_items = {} needed_rc_items = {}
for k, v in self.iteritems(): for k, v in self.items():
if v == 'True' : if v == 'True' :
self[k] = True self[k] = True
if v == 'False': if v == 'False':
self[k] = False self[k] = False
for key in needed_rc_items: for key in needed_rc_items:
if not self.has_key(key): if key not in self:
msg = 'Missing a required value in rc-file : %s' % key msg = 'Missing a required value in rc-file : %s' % key
logging.error(msg) logging.error(msg)
raise IOError, msg raise IOError(msg)
logging.debug('DA System Info settings have been validated succesfully') logging.debug('DA System Info settings have been validated succesfully')
################### End Class DaSystem ################### ################### End Class DaSystem ###################
......
...@@ -66,17 +66,17 @@ class ObservationOperator(object): ...@@ -66,17 +66,17 @@ class ObservationOperator(object):
""" Perform all steps necessary to start the observation operator through a simple Run() call """ """ Perform all steps necessary to start the observation operator through a simple Run() call """
self.dacycle = dacycle self.dacycle = dacycle
self.outputdir = dacycle['dir.output'] self.outputdir = dacycle['dir.output']
def prepare_run(self): def prepare_run(self):
""" Prepare the running of the actual forecast model, for example compile code """ """ Prepare the running of the actual forecast model, for example compile code """
import os import os
# Define the name of the file that will contain the modeled output of each observation # Define the name of the file that will contain the modeled output of each observation
self.simulated_file = os.path.join(self.outputdir, 'samples_simulated.%s.nc' % self.dacycle['time.sample.stamp']) self.simulated_file = os.path.join(self.outputdir, 'samples_simulated.%s.nc' % self.dacycle['time.sample.stamp'])
self.forecast_nmembers = int(self.dacycle['da.optimizer.nmembers']) self.forecast_nmembers = int(self.dacycle['da.optimizer.nmembers'])
def validate_input(self): def validate_input(self):
""" Make sure that data needed for the ObservationOperator (such as observation input lists, or parameter files) """ Make sure that data needed for the ObservationOperator (such as observation input lists, or parameter files)
...@@ -86,21 +86,21 @@ class ObservationOperator(object): ...@@ -86,21 +86,21 @@ class ObservationOperator(object):
""" Write the data that is needed for a restart or recovery of the Observation Operator to the save directory """ """ Write the data that is needed for a restart or recovery of the Observation Operator to the save directory """
def run(self): def run(self):
""" """
This Randomizer will take the original observation data in the Obs object, and simply copy each mean value. Next, the mean This Randomizer will take the original observation data in the Obs object, and simply copy each mean value. Next, the mean
value will be perturbed by a random normal number drawn from a specified uncertainty of +/- 2 ppm value will be perturbed by a random normal number drawn from a specified uncertainty of +/- 2 ppm
""" """
import da.tools.io4 as io import da.tools.io4 as io
import numpy as np import numpy as np
# Create a flask output file in TM5-style (to be updated later?) to hold simulated values for later reading # Create a flask output file in TM5-style (to be updated later?) to hold simulated values for later reading
f = io.CT_CDF(self.simulated_file, method='create') f = io.CT_CDF(self.simulated_file, method='create')
logging.debug('Creating new simulated observation file in ObservationOperator (%s)' % self.simulated_file) logging.debug('Creating new simulated observation file in ObservationOperator (%s)' % self.simulated_file)
dimid = f.createDimension('obs_num', size=None) dimid = f.createDimension('obs_num', size=None)
dimid = ('obs_num',) dimid = ('obs_num',)
savedict = io.std_savedict.copy() savedict = io.std_savedict.copy()
savedict['name'] = "obs_num" savedict['name'] = "obs_num"
savedict['dtype'] = "int" savedict['dtype'] = "int"
...@@ -111,7 +111,7 @@ class ObservationOperator(object): ...@@ -111,7 +111,7 @@ class ObservationOperator(object):
f.add_data(savedict,nsets=0) f.add_data(savedict,nsets=0)
dimmember = f.createDimension('nmembers', size=self.forecast_nmembers) dimmember = f.createDimension('nmembers', size=self.forecast_nmembers)
dimmember = ('nmembers',) dimmember = ('nmembers',)
savedict = io.std_savedict.copy() savedict = io.std_savedict.copy()
savedict['name'] = "flask" savedict['name'] = "flask"
savedict['dtype'] = "float" savedict['dtype'] = "float"
...@@ -121,28 +121,28 @@ class ObservationOperator(object): ...@@ -121,28 +121,28 @@ class ObservationOperator(object):
savedict['comment'] = "Simulated model value created by RandomizerObservationOperator" savedict['comment'] = "Simulated model value created by RandomizerObservationOperator"
f.add_data(savedict,nsets=0) f.add_data(savedict,nsets=0)
# Open file with x,y,z,t of model samples that need to be sampled # Open file with x,y,z,t of model samples that need to be sampled
f_in = io.ct_read(self.dacycle['ObsOperator.inputfile'],method='read') f_in = io.ct_read(self.dacycle['ObsOperator.inputfile'],method='read')
# Get simulated values and ID # Get simulated values and ID
ids = f_in.get_variable('obs_num') ids = f_in.get_variable('obs_num')
obs = f_in.get_variable('observed') obs = f_in.get_variable('observed')
mdm = f_in.get_variable('modeldatamismatch') mdm = f_in.get_variable('modeldatamismatch')
# Loop over observations, add random white noise, and write to file # Loop over observations, add random white noise, and write to file
for i,data in enumerate(zip(ids,obs,mdm)): for i,data in enumerate(zip(ids,obs,mdm)):
f.variables['obs_num'][i] = data[0] f.variables['obs_num'][i] = data[0]
f.variables['flask'][i,:] = data[1]+np.random.randn(self.forecast_nmembers)*data[2] f.variables['flask'][i,:] = data[1]+np.random.randn(self.forecast_nmembers)*data[2]
f.close() f.close()
f_in.close() f_in.close()
# Report success and exit # Report success and exit
logging.info('ObservationOperator finished successfully, output file written (%s)' % self.simulated_file) logging.info('ObservationOperator finished successfully, output file written (%s)' % self.simulated_file)
def run_forecast_model(self): def run_forecast_model(self):
self.prepare_run() self.prepare_run()
......
...@@ -124,8 +124,8 @@ class Optimizer(object): ...@@ -124,8 +124,8 @@ class Optimizer(object):
allids.extend(samples.getvalues('id')) allids.extend(samples.getvalues('id'))
simulatedensemble = samples.getvalues('simulated') simulatedensemble = samples.getvalues('simulated')
for s in range(simulatedensemble.shape[0]): for s in range(simulatedensemble.shape[0]):
allsimulated.append(simulatedensemble[s]) allsimulated.append(simulatedensemble[s])
self.obs[:] = np.array(allobs) self.obs[:] = np.array(allobs)
self.obs_ids[:] = np.array(allids) self.obs_ids[:] = np.array(allids)
......
...@@ -95,11 +95,11 @@ class Platform(object): ...@@ -95,11 +95,11 @@ class Platform(object):
template += """#$ -hold_jid depends \n""" template += """#$ -hold_jid depends \n"""
# First replace from passed dictionary # First replace from passed dictionary
for k, v in joboptions.iteritems(): for k, v in joboptions.items():
while k in template: while k in template:
template = template.replace(k, v) template = template.replace(k, v)
# Fill remaining values with std_options # Fill remaining values with std_options
for k, v in std_joboptions.iteritems(): for k, v in std_joboptions.items():
while k in template: while k in template:
template = template.replace(k, v) template = template.replace(k, v)
return template return template
......
...@@ -156,7 +156,7 @@ class StateVector(object): ...@@ -156,7 +156,7 @@ class StateVector(object):
# These list objects hold the data for each time step of lag in the system. Note that the ensembles for each time step consist # These list objects hold the data for each time step of lag in the system. Note that the ensembles for each time step consist
# of lists of EnsembleMember objects, we define member 0 as the mean of the distribution and n=1,...,nmembers as the spread. # of lists of EnsembleMember objects, we define member 0 as the mean of the distribution and n=1,...,nmembers as the spread.
self.ensemble_members = range(self.nlag) self.ensemble_members = list(range(self.nlag))
for n in range(self.nlag): for n in range(self.nlag):
self.ensemble_members[n] = [] self.ensemble_members[n] = []
...@@ -500,7 +500,7 @@ class StateVector(object): ...@@ -500,7 +500,7 @@ class StateVector(object):
raise ValueError raise ValueError
result = np.zeros((self.nparams,), float) result = np.zeros((self.nparams,), float)
for k, v in self.griddict.iteritems(): for k, v in self.griddict.items():
#print k,k-1,result.shape, v #print k,k-1,result.shape, v
if method == "avg": if method == "avg":
result[k - 1] = griddata.take(v).mean() result[k - 1] = griddata.take(v).mean()
...@@ -528,7 +528,7 @@ class StateVector(object): ...@@ -528,7 +528,7 @@ class StateVector(object):
""" """
result = np.zeros(self.gridmap.shape, float) result = np.zeros(self.gridmap.shape, float)
for k, v in self.griddict.iteritems(): for k, v in self.griddict.items():
#print k,v #print k,v
result.put(v, vectordata[k - 1]) result.put(v, vectordata[k - 1])
return result return result
......
...@@ -46,14 +46,14 @@ class CO2DaSystem(DaSystem): ...@@ -46,14 +46,14 @@ class CO2DaSystem(DaSystem):
'regtype'] 'regtype']
for k, v in self.iteritems(): for k, v in self.items():
if v == 'True' : if v == 'True' :
self[k] = True self[k] = True
if v == 'False': if v == 'False':
self[k] = False self[k] = False
for key in needed_rc_items: for key in needed_rc_items:
if not self.has_key(key): if key not in self:
logging.warning('Missing a required value in rc-file : %s' % key) logging.warning('Missing a required value in rc-file : %s' % key)
logging.debug('DA System Info settings have been validated succesfully') logging.debug('DA System Info settings have been validated succesfully')
......
...@@ -56,7 +56,7 @@ class CO2Observations(Observations): ...@@ -56,7 +56,7 @@ class CO2Observations(Observations):
if not os.path.exists(filename): if not os.path.exists(filename):
msg = 'Could not find the required observation input file (%s) ' % filename msg = 'Could not find the required observation input file (%s) ' % filename
logging.error(msg) logging.error(msg)
raise IOError, msg raise IOError(msg)
else: else:
self.obs_filename = filename self.obs_filename = filename
self.datalist = [] self.datalist = []
...@@ -118,7 +118,7 @@ class CO2Observations(Observations): ...@@ -118,7 +118,7 @@ class CO2Observations(Observations):
logging.error(msg) logging.error(msg)
logging.error("Did the sampling step succeed?") logging.error("Did the sampling step succeed?")
logging.error("...exiting") logging.error("...exiting")
raise IOError, msg raise IOError(msg)
ncf = io.ct_read(filename, method='read') ncf = io.ct_read(filename, method='read')
ids = ncf.get_variable('obs_num') ids = ncf.get_variable('obs_num')
...@@ -129,7 +129,7 @@ class CO2Observations(Observations): ...@@ -129,7 +129,7 @@ class CO2Observations(Observations):
obs_ids = self.getvalues('id') obs_ids = self.getvalues('id')
obs_ids = obs_ids.tolist() obs_ids = obs_ids.tolist()
ids = map(int, ids) ids = list(map(int, ids))
missing_samples = [] missing_samples = []
...@@ -265,7 +265,7 @@ class CO2Observations(Observations): ...@@ -265,7 +265,7 @@ class CO2Observations(Observations):
if not os.path.exists(filename): if not os.path.exists(filename):
msg = 'Could not find the required sites.rc input file (%s)' % filename msg = 'Could not find the required sites.rc input file (%s)' % filename
logging.error(msg) logging.error(msg)
raise IOError, msg raise IOError(msg)
else: else:
self.sites_file = filename self.sites_file = filename
...@@ -308,7 +308,7 @@ class CO2Observations(Observations): ...@@ -308,7 +308,7 @@ class CO2Observations(Observations):
for obs in self.datalist: for obs in self.datalist:
obs.mdm = 1000.0 # default is very high model-data-mismatch, until explicitly set by script obs.mdm = 1000.0 # default is very high model-data-mismatch, until explicitly set by script
if site_info.has_key(obs.code): if obs.code in site_info:
logging.debug("Observation found (%s)" % obs.code) logging.debug("Observation found (%s)" % obs.code)
obs.mdm = site_info[obs.code]['error'] * self.global_R_scaling obs.mdm = site_info[obs.code]['error'] * self.global_R_scaling
obs.may_localize = site_info[obs.code]['may_localize'] obs.may_localize = site_info[obs.code]['may_localize']
......
...@@ -24,7 +24,7 @@ import os ...@@ -24,7 +24,7 @@ import os
import sys import sys
import logging import logging
import datetime as dtm import datetime as dtm
from string import strip #from string import strip
from numpy import array, logical_and from numpy import array, logical_and
sys.path.append(os.getcwd()) sys.path.append(os.getcwd())
...@@ -55,7 +55,7 @@ class ObsPackObservations(Observations): ...@@ -55,7 +55,7 @@ class ObsPackObservations(Observations):
if not os.path.exists(op_dir): if not os.path.exists(op_dir):
msg = 'Could not find the required ObsPack distribution (%s) ' % op_dir msg = 'Could not find the required ObsPack distribution (%s) ' % op_dir
logging.error(msg) logging.error(msg)
raise IOError, msg raise IOError(msg)
else: else:
self.obspack_dir = op_dir self.obspack_dir = op_dir
self.obspack_id = op_id self.obspack_id = op_id
...@@ -102,7 +102,7 @@ class ObsPackObservations(Observations): ...@@ -102,7 +102,7 @@ class ObsPackObservations(Observations):
ids = ncf.get_variable('obspack_num').take(subselect) # or should we propagate obs_num which is not unique across datasets?? ids = ncf.get_variable('obspack_num').take(subselect) # or should we propagate obs_num which is not unique across datasets??
evn = ncf.get_variable('obspack_id').take(subselect, axis=0) evn = ncf.get_variable('obspack_id').take(subselect, axis=0)
evn = [s.tostring().lower() for s in evn] evn = [s.tostring().lower() for s in evn]
evn = map(strip, evn) evn = map(str, evn)
site = ncf.get_attribute('site_code') site = ncf.get_attribute('site_code')
lats = ncf.get_variable('latitude').take(subselect, axis=0) lats = ncf.get_variable('latitude').take(subselect, axis=0)
lons = ncf.get_variable('longitude').take(subselect, axis=0) lons = ncf.get_variable('longitude').take(subselect, axis=0)
...@@ -113,7 +113,7 @@ class ObsPackObservations(Observations): ...@@ -113,7 +113,7 @@ class ObsPackObservations(Observations):
strategy = 1 strategy = 1
flags = ncf.get_variable('qc_flag').take(subselect, axis=0) flags = ncf.get_variable('qc_flag').take(subselect, axis=0)
flags = [s.tostring().lower() for s in flags] flags = [s.tostring().lower() for s in flags]
flags = map(strip, flags) flags = map(str, flags)
flags = [int(f == '...') for f in flags] flags = [int(f == '...') for f in flags]
ncf.close() ncf.close()
...@@ -132,7 +132,7 @@ class ObsPackObservations(Observations): ...@@ -132,7 +132,7 @@ class ObsPackObservations(Observations):
logging.error(msg) logging.error(msg)
logging.error("Did the sampling step succeed?") logging.error("Did the sampling step succeed?")
logging.error("...exiting") logging.error("...exiting")
raise IOError, msg raise IOError(msg)
ncf = io.ct_read(filename, method='read') ncf = io.ct_read(filename, method='read')
ids = ncf.get_variable('obs_num') ids = ncf.get_variable('obs_num')
...@@ -141,7 +141,7 @@ class ObsPackObservations(Observations): ...@@ -141,7 +141,7 @@ class ObsPackObservations(Observations):
logging.info("Successfully read data from model sample file (%s)" % filename) logging.info("Successfully read data from model sample file (%s)" % filename)
obs_ids = self.getvalues('id').tolist() obs_ids = self.getvalues('id').tolist()
ids = map(int, ids) ids = list(map(int, ids))
missing_samples = [] missing_samples = []
...@@ -296,7 +296,7 @@ class ObsPackObservations(Observations): ...@@ -296,7 +296,7 @@ class ObsPackObservations(Observations):
if not os.path.exists(filename): if not os.path.exists(filename):
msg = 'Could not find the required sites.rc input file (%s) ' % filename msg = 'Could not find the required sites.rc input file (%s) ' % filename
logging.error(msg) logging.error(msg)
raise IOError, msg raise IOError(msg)
else: else:
self.sites_file = filename self.sites_file = filename
...@@ -348,7 +348,7 @@ class ObsPackObservations(Observations): ...@@ -348,7 +348,7 @@ class ObsPackObservations(Observations):
identifier = name_convert(name="%s_%s_%s" % (site.lower(), method.lower(), lab.lower(),), to='GV') identifier = name_convert(name="%s_%s_%s" % (site.lower(), method.lower(), lab.lower(),), to='GV')
if site_info.has_key(identifier): if identifier in site_info:
logging.debug("Observation found (%s, %s)" % (obs.code, identifier)) logging.debug("Observation found (%s, %s)" % (obs.code, identifier))
obs.mdm = site_info[identifier]['error'] * self.global_R_scaling obs.mdm = site_info[identifier]['error'] * self.global_R_scaling
obs.may_localize = site_info[identifier]['may_localize'] obs.may_localize = site_info[identifier]['may_localize']
......
...@@ -52,7 +52,7 @@ class ObsPackObservations(Observations): ...@@ -52,7 +52,7 @@ class ObsPackObservations(Observations):
if not os.path.exists(op_dir): if not os.path.exists(op_dir):
msg = 'Could not find the required ObsPack distribution (%s) ' % op_dir msg = 'Could not find the required ObsPack distribution (%s) ' % op_dir
logging.error(msg) logging.error(msg)
raise IOError, msg raise IOError(msg)
else: else:
self.obspack_dir = op_dir self.obspack_dir = op_dir
self.obspack_id = op_id self.obspack_id = op_id
...@@ -101,7 +101,7 @@ class ObsPackObservations(Observations): ...@@ -101,7 +101,7 @@ class ObsPackObservations(Observations):
obspacknum = ncf.get_variable('obspack_num').take(subselect) # or should we propagate obs_num which is not unique across datasets?? obspacknum = ncf.get_variable('obspack_num').take(subselect) # or should we propagate obs_num which is not unique across datasets??
obspackid = ncf.get_variable('obspack_id').take(subselect, axis=0) obspackid = ncf.get_variable('obspack_id').take(subselect, axis=0)
obspackid = [s.tostring().lower() for s in obspackid] obspackid = [s.tostring().lower() for s in obspackid]
obspackid = map(strip, obspackid) obspackid = list(map(str.strip,str(obspackid)))
datasetname = ncfile # use full name of dataset to propagate for clarity datasetname = ncfile # use full name of dataset to propagate for clarity
lats = ncf.get_variable('latitude').take(subselect, axis=0) lats = ncf.get_variable('latitude').take(subselect, axis=0)
lons = ncf.get_variable('longitude').take(subselect, axis=0) lons = ncf.get_variable('longitude').take(subselect, axis=0)
...@@ -127,7 +127,7 @@ class ObsPackObservations(Observations): ...@@ -127,7 +127,7 @@ class ObsPackObservations(Observations):
logging.error(msg) logging.error(msg)
logging.error("Did the sampling step succeed?") logging.error("Did the sampling step succeed?")
logging.error("...exiting") logging.error("...exiting")
raise IOError, msg raise IOError(msg)
ncf = io.ct_read(filename, method='read') ncf = io.ct_read(filename, method='read')
ids = ncf.get_variable('obs_num') ids = ncf.get_variable('obs_num')
...@@ -136,7 +136,7 @@ class ObsPackObservations(Observations): ...@@ -136,7 +136,7 @@ class ObsPackObservations(Observations):
logging.info("Successfully read data from model sample file (%s)" % filename) logging.info("Successfully read data from model sample file (%s)" % filename)
obs_ids = self.getvalues('id').tolist() obs_ids = self.getvalues('id').tolist()
ids = map(int, ids) ids = list(map(int, ids))
missing_samples = [] missing_samples = []
...@@ -173,7 +173,7 @@ class ObsPackObservations(Observations): ...@@ -173,7 +173,7 @@ class ObsPackObservations(Observations):
dim10char = f.add_dim('string_of10chars', 10) dim10char = f.add_dim('string_of10chars', 10)
dimcalcomp = f.add_dim('calendar_components', 6) dimcalcomp = f.add_dim('calendar_components', 6)
for key, value in self.site_move.iteritems(): for key, value in self.site_move.items():
msg = "Site is moved by %3.2f degrees latitude and %3.2f degrees longitude" % value msg = "Site is moved by %3.2f degrees latitude and %3.2f degrees longitude" % value
f.add_attribute(key, msg) f.add_attribute(key, msg)
...@@ -296,7 +296,7 @@ class ObsPackObservations(Observations): ...@@ -296,7 +296,7 @@ class ObsPackObservations(Observations):
if not os.path.exists(filename): if not os.path.exists(filename):
msg = 'Could not find the required sites.rc input file (%s) ' % filename msg = 'Could not find the required sites.rc input file (%s) ' % filename
logging.error(msg) logging.error(msg)
raise IOError, msg raise IOError(msg)