Commit 5bb5096d authored by karolina's avatar karolina
Browse files

cleanup, renaming variables

rename *PlatForm to *Platform
parent 3bf2a95c
......@@ -49,21 +49,20 @@ class DaSystem(dict):
to the dictionary
"""
self.Identifier = 'CarbonTracker CO2' # the identifier gives the platform name
self.ID = 'CarbonTracker CO2' # the identifier gives the platform name
self.load_rc(rcfilename)
logging.debug("Data Assimilation System initialized: %s" % self.Identifier)
def load_rc(self, RcFileName):
def load_rc(self, rcfilename):
"""
This method loads a DA System Info rc-file with settings for this simulation
"""
for k, v in rc.read(RcFileName).iteritems():
for k, v in rc.read(rcfilename).iteritems():
self[k] = v
self.RcFileName = RcFileName
self.DaRcLoaded = True
logging.debug("DA System Info rc-file (%s) loaded successfully" % self.RcFileName)
logging.debug("DA System Info rc-file (%s) loaded successfully" % rcfilename)
def validate(self):
......
......@@ -49,14 +49,14 @@ class Observations(object):
"""
create an object with an identifier, version, and an empty ObservationList
"""
self.Identifier = identifier
self.Version = version
self.ID = identifier
self.version = version
self.datalist = [] # initialize with an empty list of obs
# The following code allows the object to be initialized with a DaCycle object already present. Otherwise, it can
# be added at a later moment.
logging.info('Observations object initialized: %s' % self.Identifier)
logging.info('Observations object initialized: %s' % self.ID)
def getlength(self):
return len(self.datalist)
......
......@@ -28,14 +28,14 @@ class ObservationOperator(object):
"""
def __init__(self, RcFileName, DaCycle=None):
def __init__(self, rcfilename, DaCycle=None):
""" The instance of an ObservationOperator is application dependent """
self.Identifier = identifier
self.Version = version
self.RestartFileList = []
self.outputdir = None # Needed for opening the samples.nc files created
self.load_rc(RcFileName) # load the specified rc-file
self.load_rc(rcfilename) # load the specified rc-file
self.validate_rc() # validate the contents
logging.info('Observation Operator object initialized: %s' % self.Identifier)
......
......@@ -10,10 +10,7 @@ File created on 28 Jul 2010.
"""
import os
import sys
import logging
import datetime
import numpy as np
import numpy.linalg as la
......@@ -33,10 +30,10 @@ class Optimizer(object):
"""
def __init__(self):
self.Identifier = identifier
self.Version = version
self.ID = identifier
self.version = version
logging.info('Optimizer object initialized: %s' % self.Identifier)
logging.info('Optimizer object initialized: %s' % self.ID)
def initialize(self, dims):
self.nlag = dims[0]
......@@ -97,24 +94,24 @@ class Optimizer(object):
allsimulated = None # collect all members model samples for n=1,..,nlag
for n in range(self.nlag):
Samples = StateVector.ObsToAssimmilate[n]
samples = StateVector.ObsToAssimmilate[n]
members = StateVector.EnsembleMembers[n]
self.x[n * self.nparams:(n + 1) * self.nparams] = members[0].ParameterValues
self.X_prime[n * self.nparams:(n + 1) * self.nparams, :] = np.transpose(np.array([m.ParameterValues for m in members]))
if Samples != None:
self.rejection_threshold = Samples.rejection_threshold
if samples != None:
self.rejection_threshold = samples.rejection_threshold
allreject.extend(Samples.getvalues('may_reject'))
alllocalize.extend(Samples.getvalues('may_localize'))
allflags.extend(Samples.getvalues('flag'))
allspecies.extend(Samples.getvalues('species'))
allobs.extend(Samples.getvalues('obs'))
allsites.extend(Samples.getvalues('code'))
allmdm.extend(Samples.getvalues('mdm'))
allids.extend(Samples.getvalues('id'))
allreject.extend(samples.getvalues('may_reject'))
alllocalize.extend(samples.getvalues('may_localize'))
allflags.extend(samples.getvalues('flag'))
allspecies.extend(samples.getvalues('species'))
allobs.extend(samples.getvalues('obs'))
allsites.extend(samples.getvalues('code'))
allmdm.extend(samples.getvalues('mdm'))
allids.extend(samples.getvalues('id'))
simulatedensemble = Samples.getvalues('simulated')
simulatedensemble = samples.getvalues('simulated')
if allsimulated == None :
allsimulated = np.array(simulatedensemble)
......@@ -150,7 +147,7 @@ class Optimizer(object):
logging.debug('Returning optimized data to the StateVector, setting "StateVector.isOptimized = True" ')
def write_diagnostics(self, filename, type='prior'):
def write_diagnostics(self, filename, type):
"""
Open a NetCDF file and write diagnostic output from optimization process:
......
......@@ -8,14 +8,14 @@
Revision History:
File created on 06 Sep 2010.
The PlatForm class is found in the module :mod:`platform`, or in a specific implementation under the da/source tree.
The Platform class is found in the module :mod:`platform`, or in a specific implementation under the da/source tree.
The platform object holds attributes and methods that allow job control on each specific platform. This includes methods to create and submit jobs, but also to obtain process and/or job ID's. These are needed to control the flow of
the system on each platform.
Typically, every platform needs specific implementations of this object (through inheritance), and you should refer to your specific PlatForm object documentation for details (see *da/platform/*).
Typically, every platform needs specific implementations of this object (through inheritance), and you should refer to your specific Platform object documentation for details (see *da/platform/*).
.. autoclass:: da.baseclasses.platform.PlatForm
.. autoclass:: da.baseclasses.platform.Platform
:members:
:inherited-members:
......@@ -27,22 +27,22 @@ import subprocess
std_joboptions = {'jobname':'test', 'jobaccount':'co2', 'jobnodes':'nserial 1', 'jobshell':'/bin/sh', 'depends':'', 'jobtime':'01:00:00'}
class PlatForm(object):
class Platform(object):
"""
This specifies platform dependent options under generic object calls. A platform object is used to control and submit jobs
"""
def __init__(self):
"""
The init function reports the hard-coded ``Identifier`` and ``Version`` of the PlatForm. Since each new
computer/user requires their own PlatForm object modifications, the init function is usually overwritten
The init function reports the hard-coded ``Identifier`` and ``Version`` of the Platform. Since each new
computer/user requires their own Platform object modifications, the init function is usually overwritten
in the specific implementation of this class
"""
self.Identifier = 'iPad' # the identifier gives the plaform name
self.Version = '1.0' # the platform version used
self.ID = 'iPad' # the identifier gives the plaform name
self.version = '1.0' # the platform version used
logging.debug('%s object initialized' % self.Identifier)
logging.debug('%s version: %s' % (self.Identifier, self.Version))
logging.debug('%s object initialized' % self.ID)
logging.debug('%s version: %s' % (self.ID, self.version))
def give_blocking_flag(self):
return ""
......
......@@ -115,13 +115,13 @@ class StateVector(object):
"""
def __init__(self):
self.Identifier = identifier
self.Version = version
self.ID = identifier
self.version = version
# The following code allows the object to be initialized with a DaCycle object already present. Otherwise, it can
# be added at a later moment.
logging.info('Statevector object initialized: %s' % self.Identifier)
logging.info('Statevector object initialized: %s' % self.ID)
def initialize(self, DaCycle):
"""
......@@ -274,7 +274,7 @@ class StateVector(object):
newmember.ParameterValues = np.dot(C, rands) + newmean
self.EnsembleMembers[lag].append(newmember)
logging.debug('%d new ensemble members were added to the state vector # %d' % (self.nmembers, lag + 1))
logging.debug('%d new ensemble members were added to the state vector # %d' % (self.nmembers, (lag + 1)))
def propagate(self, DaCycle):
......@@ -380,7 +380,7 @@ class StateVector(object):
#import da.tools.io as io
f = io.CT_Read(filename, 'read')
MeanState = f.get_variable('statevectormean_' + qual)
meanstate = f.get_variable('statevectormean_' + qual)
EnsembleMembers = f.get_variable('statevectorensemble_' + qual)
f.close()
......@@ -390,9 +390,9 @@ class StateVector(object):
logging.warning('Existing ensemble for lag=%d was removed to make place for newly read data' % (n + 1))
for m in range(self.nmembers):
NewMember = EnsembleMember(m)
NewMember.ParameterValues = EnsembleMembers[n, m, :].flatten() + MeanState[n] # add the mean to the deviations to hold the full parameter values
self.EnsembleMembers[n].append(NewMember)
newmember = EnsembleMember(m)
newmember.ParameterValues = EnsembleMembers[n, m, :].flatten() + meanstate[n] # add the mean to the deviations to hold the full parameter values
self.EnsembleMembers[n].append(newmember)
logging.info('Successfully read the State Vector from file (%s) ' % filename)
......
......@@ -35,8 +35,10 @@ class CO2DaSystem(DaSystem):
for k, v in self.iteritems():
if v == 'True' : self[k] = True
if v == 'False': self[k] = False
if v == 'True' :
self[k] = True
if v == 'False':
self[k] = False
for key in needed_rc_items:
if not self.has_key(key):
......
......@@ -255,15 +255,15 @@ class CO2Observations(Observations):
logging.error(msg)
raise IOError, msg
else:
self.SitesFile = filename
self.sites_file = filename
SitesWeights = rc.read(self.SitesFile)
sites_weights = rc.read(self.sites_file)
self.rejection_threshold = int(SitesWeights['obs.rejection.threshold'])
self.global_R_scaling = float(SitesWeights['global.R.scaling'])
self.n_site_categories = int(SitesWeights['n.site.categories'])
self.n_sites_active = int(SitesWeights['n.sites.active'])
self.n_sites_moved = int(SitesWeights['n.sites.moved'])
self.rejection_threshold = int(sites_weights['obs.rejection.threshold'])
self.global_R_scaling = float(sites_weights['global.R.scaling'])
self.n_site_categories = int(sites_weights['n.site.categories'])
self.n_sites_active = int(sites_weights['n.sites.active'])
self.n_sites_moved = int(sites_weights['n.sites.moved'])
logging.debug('Model-data mismatch rejection threshold: %d ' % self.rejection_threshold)
logging.debug('Model-data mismatch scaling factor : %f ' % self.global_R_scaling)
......@@ -271,11 +271,11 @@ class CO2Observations(Observations):
logging.debug('Model-data mismatch active sites : %d ' % self.n_sites_active)
logging.debug('Model-data mismatch moved sites : %d ' % self.n_sites_moved)
cats = [k for k in SitesWeights.keys() if 'site.category' in k]
cats = [k for k in sites_weights.keys() if 'site.category' in k]
SiteCategories = {}
for key in cats:
name, error, may_localize, may_reject = SitesWeights[key].split(';')
name, error, may_localize, may_reject = sites_weights[key].split(';')
name = name.strip().lower()
error = float(error)
may_reject = ("TRUE" in may_reject.upper())
......@@ -284,30 +284,30 @@ class CO2Observations(Observations):
#print name,SiteCategories[name]
active = [k for k in SitesWeights.keys() if 'site.active' in k]
active = [k for k in sites_weights.keys() if 'site.active' in k]
SiteInfo = {}
site_info = {}
for key in active:
sitename, sitecategory = SitesWeights[key].split(';')
sitename, sitecategory = sites_weights[key].split(';')
sitename = sitename.strip().lower()
sitecategory = sitecategory.strip().lower()
SiteInfo[sitename] = SiteCategories[sitecategory]
#print sitename,SiteInfo[sitename]
site_info[sitename] = SiteCategories[sitecategory]
#print sitename,site_info[sitename]
for obs in self.datalist:
obs.mdm = 1000.0 # default is very high model-data-mismatch, until explicitly set by script
if SiteInfo.has_key(obs.code):
if site_info.has_key(obs.code):
logging.debug("Observation found (%s)" % obs.code)
obs.mdm = SiteInfo[obs.code]['error'] * self.global_R_scaling
obs.may_localize = SiteInfo[obs.code]['may_localize']
obs.may_reject = SiteInfo[obs.code]['may_reject']
obs.mdm = site_info[obs.code]['error'] * self.global_R_scaling
obs.may_localize = site_info[obs.code]['may_localize']
obs.may_reject = site_info[obs.code]['may_reject']
else:
logging.warning("Observation NOT found (%s, %s), please check sites.rc file (%s) !!!" % (obs.code, identifier, self.SitesFile))
logging.warning("Observation NOT found (%s, %s), please check sites.rc file (%s) !!!" % (obs.code, identifier, self.sites_file))
obs.flag = 99
# Add SiteInfo dictionary to the Observations object for future use
# Add site_info dictionary to the Observations object for future use
self.SiteInfo = SiteInfo
self.site_info = site_info
def write_obs_to_file(self, outfile):
"""
......
......@@ -46,8 +46,8 @@ class ObsPackObservations(Observations):
logging.error(msg)
raise IOError, msg
else:
self.ObsPackDir = op_dir
self.ObsPackId = op_id
self.obspack_dir = op_dir
self.obspack_id = op_id
self.datalist = []
......@@ -62,7 +62,7 @@ class ObsPackObservations(Observations):
# Step 1: Read list of available site files in package
infile = os.path.join(self.ObsPackDir, 'summary', '%s_dataset_summary.txt' % (self.ObsPackId,))
infile = os.path.join(self.obspack_dir, 'summary', '%s_dataset_summary.txt' % (self.obspack_id,))
f = open(infile, 'r')
lines = f.readlines()
f.close()
......@@ -79,7 +79,7 @@ class ObsPackObservations(Observations):
for ncfile in ncfilelist:
infile = os.path.join(self.ObsPackDir, 'data', 'nc', ncfile)
infile = os.path.join(self.obspack_dir, 'data', 'nc', ncfile)
ncf = io.CT_Read(infile, 'read')
idates = ncf.get_variable('time_components')
dates = array([dtm.datetime(*d) for d in idates])
......@@ -267,15 +267,15 @@ class ObsPackObservations(Observations):
logging.error(msg)
raise IOError, msg
else:
self.SitesFile = filename
self.sites_file = filename
SitesWeights = rc.read(self.SitesFile)
sites_weights = rc.read(self.sites_file)
self.rejection_threshold = int(SitesWeights['obs.rejection.threshold'])
self.global_R_scaling = float(SitesWeights['global.R.scaling'])
self.n_site_categories = int(SitesWeights['n.site.categories'])
self.n_sites_active = int(SitesWeights['n.sites.active'])
self.n_sites_moved = int(SitesWeights['n.sites.moved'])
self.rejection_threshold = int(sites_weights['obs.rejection.threshold'])
self.global_R_scaling = float(sites_weights['global.R.scaling'])
self.n_site_categories = int(sites_weights['n.site.categories'])
self.n_sites_active = int(sites_weights['n.sites.active'])
self.n_sites_moved = int(sites_weights['n.sites.moved'])
logging.debug('Model-data mismatch rejection threshold: %d ' % (self.rejection_threshold))
logging.warning('Model-data mismatch scaling factor : %f ' % (self.global_R_scaling))
......@@ -283,28 +283,28 @@ class ObsPackObservations(Observations):
logging.debug('Model-data mismatch active sites : %d ' % (self.n_sites_active))
logging.debug('Model-data mismatch moved sites : %d ' % (self.n_sites_moved))
cats = [k for k in SitesWeights.keys() if 'site.category' in k]
cats = [k for k in sites_weights.keys() if 'site.category' in k]
SiteCategories = {}
site_categories = {}
for key in cats:
name, error, may_localize, may_reject = SitesWeights[key].split(';')
name, error, may_localize, may_reject = sites_weights[key].split(';')
name = name.strip().lower()
error = float(error)
may_localize = ("TRUE" in may_localize.upper())
may_reject = ("TRUE" in may_reject.upper())
SiteCategories[name] = {'category':name, 'error':error, 'may_localize':may_localize, 'may_reject':may_reject}
#print name,SiteCategories[name]
site_categories[name] = {'category':name, 'error':error, 'may_localize':may_localize, 'may_reject':may_reject}
#print name,site_categories[name]
active = [k for k in SitesWeights.keys() if 'site.active' in k]
active = [k for k in sites_weights.keys() if 'site.active' in k]
SiteInfo = {}
site_info = {}
for key in active:
sitename, sitecategory = SitesWeights[key].split(';')
sitename, sitecategory = sites_weights[key].split(';')
sitename = sitename.strip()
sitecategory = sitecategory.strip().lower()
SiteInfo[sitename] = SiteCategories[sitecategory]
#print sitename,SiteInfo[sitename]
site_info[sitename] = site_categories[sitecategory]
#print sitename,site_info[sitename]
for obs in self.datalist:
......@@ -317,22 +317,22 @@ class ObsPackObservations(Observations):
identifier = name_convert(name="%s_%s_%s" % (site.lower(), method.lower(), lab.lower(),), to='GV')
if SiteInfo.has_key(identifier):
if site_info.has_key(identifier):
logging.debug("Observation found (%s, %s)" % (obs.code, identifier))
obs.mdm = SiteInfo[identifier]['error'] * self.global_R_scaling
obs.may_localize = SiteInfo[identifier]['may_localize']
obs.may_reject = SiteInfo[identifier]['may_reject']
obs.mdm = site_info[identifier]['error'] * self.global_R_scaling
obs.may_localize = site_info[identifier]['may_localize']
obs.may_reject = site_info[identifier]['may_reject']
obs.flag = 0
else:
logging.warning("Observation NOT found (%s, %s), please check sites.rc file (%s) !!!" % (obs.code, identifier, self.SitesFile))
logging.warning("Observation NOT found (%s, %s), please check sites.rc file (%s) !!!" % (obs.code, identifier, self.sites_file))
if SiteInfo[identifier]['category'] == 'do-not-use':
if site_info[identifier]['category'] == 'do-not-use':
logging.warning("Observation found (%s, %s), but not used in assimilation !!!" % (obs.code, identifier))
obs.flag = 99
# Add SiteInfo dictionary to the Observations object for future use
# Add site_info dictionary to the Observations object for future use
self.SiteInfo = SiteInfo
self.site_info = site_info
def write_obs_to_file(self, outfile):
"""
......
......@@ -42,8 +42,8 @@ class ObsPackObservations(Observations):
logging.error(msg)
raise IOError, msg
else:
self.ObsPackDir = op_dir
self.ObsPackId = op_id
self.obspack_dir = op_dir
self.obspack_id = op_id
self.datalist = []
......@@ -57,7 +57,7 @@ class ObsPackObservations(Observations):
# Step 1: Read list of available site files in package
infile = os.path.join(self.ObsPackDir, 'summary', '%s_dataset_summary.txt' % (self.ObsPackId,))
infile = os.path.join(self.obspack_dir, 'summary', '%s_dataset_summary.txt' % (self.obspack_id,))
f = open(infile, 'r')
lines = f.readlines()
f.close()
......@@ -77,7 +77,7 @@ class ObsPackObservations(Observations):
for ncfile in ncfilelist:
infile = os.path.join(self.ObsPackDir, 'data', 'nc', ncfile + '.nc')
infile = os.path.join(self.obspack_dir, 'data', 'nc', ncfile + '.nc')
ncf = io.CT_Read(infile, 'read')
idates = ncf.get_variable('time_components')
dates = array([dtm.datetime(*d) for d in idates])
......@@ -162,7 +162,7 @@ class ObsPackObservations(Observations):
f.close()
#return obsinputfile
for key, value in self.SiteMove.iteritems():
for key, value in self.site_move.iteritems():
msg = "Site is moved by %3.2f degrees latitude and %3.2f degrees longitude" % value
f.AddAttribute(key, msg)
......@@ -266,44 +266,44 @@ class ObsPackObservations(Observations):
logging.error(msg)
raise IOError, msg
else:
self.SitesFile = filename
self.sites_file = filename
SitesWeights = rc.read(self.SitesFile)
sites_weights = rc.read(self.sites_file)
self.rejection_threshold = int(SitesWeights['obs.rejection.threshold'])
self.global_R_scaling = float(SitesWeights['global.R.scaling'])
self.n_site_categories = int(SitesWeights['n.site.categories'])
self.rejection_threshold = int(sites_weights['obs.rejection.threshold'])
self.global_R_scaling = float(sites_weights['global.R.scaling'])
self.n_site_categories = int(sites_weights['n.site.categories'])
logging.debug('Model-data mismatch rejection threshold: %d ' % self.rejection_threshold)
logging.warning('Model-data mismatch scaling factor : %f ' % self.global_R_scaling)
logging.debug('Model-data mismatch site categories : %d ' % self.n_site_categories)
cats = [k for k in SitesWeights.keys() if 'site.category' in k]
cats = [k for k in sites_weights.keys() if 'site.category' in k]
SiteCategories = {}
site_categories = {}
for key in cats:
name, error, may_localize, may_reject = SitesWeights[key].split(';')
name, error, may_localize, may_reject = sites_weights[key].split(';')
name = name.strip().lower()
error = float(error)
may_reject = ("TRUE" in may_reject.upper())
may_localize = ("TRUE" in may_localize.upper())
SiteCategories[name] = {'category': name, 'error': error, 'may_localize': may_localize, 'may_reject': may_reject}
site_categories[name] = {'category': name, 'error': error, 'may_localize': may_localize, 'may_reject': may_reject}
SiteInfo = {}
SiteMove = {}
SiteHourly = {} # option added to include only certain hours of the day (for e.g. PAL) IvdL
for key, value in SitesWeights.iteritems():
site_info = {}
site_move = {}
site_hourly = {} # option added to include only certain hours of the day (for e.g. PAL) IvdL
for key, value in sites_weights.iteritems():
if 'co2_' in key or 'sf6' in key: # to be fixed later, do not yet know how to parse valid keys from rc-files yet.... WP
sitename, sitecategory = key, value
sitename = sitename.strip()
sitecategory = sitecategory.split()[0].strip().lower()
SiteInfo[sitename] = SiteCategories[sitecategory]
site_info[sitename] = site_categories[sitecategory]
if 'site.move' in key:
identifier, latmove, lonmove = value.split(';')
SiteMove[identifier.strip()] = (float(latmove), float(lonmove))
site_move[identifier.strip()] = (float(latmove), float(lonmove))
if 'site.hourly' in key:
identifier, hourfrom, hourto = value.split(';')
SiteHourly[identifier.strip()] = (int(hourfrom), int(hourto))
site_hourly[identifier.strip()] = (int(hourfrom), int(hourto))
for obs in self.datalist: # loop over all available data points
......@@ -314,43 +314,43 @@ class ObsPackObservations(Observations):
identifier = obs.code
species, site, method, lab, datasetnr = identifier.split('_')
if SiteInfo.has_key(identifier):
if SiteHourly.has_key(identifier):
hourf, hourt = SiteHourly[identifier]
if site_info.has_key(identifier):
if site_hourly.has_key(identifier):
hourf, hourt = site_hourly[identifier]
if int(obs.xdate.hour) >= hourf and int(obs.xdate.hour) <= hourt:
logging.warning("Observations in hourly dataset INCLUDED, while sampling time %s was between %s:00-%s:00"%(obs.xdate.time(),hourf,hourt))
else:
logging.warning("Observation in hourly dataset EXCLUDED, while sampling time %s was outside %s:00-%s:00"%(obs.xdate.time(),hourf,hourt))
exclude_hourly = True
if SiteInfo[identifier]['category'] == 'do-not-use' or exclude_hourly:
if site_info[identifier]['category'] == 'do-not-use' or exclude_hourly:
logging.warning("Observation found (%s, %d), but not used in assimilation !!!" % (identifier, obs.id))
obs.mdm = SiteInfo[identifier]['error'] * self.global_R_scaling
obs.may_localize = SiteInfo[identifier]['may_localize']
obs.may_reject = SiteInfo[identifier]['may_reject']
obs.mdm = site_info[identifier]['error'] * self.global_R_scaling
obs.may_localize = site_info[identifier]['may_localize']
obs.may_reject = site_info[identifier]['may_reject']
obs.flag = 99
else:
logging.debug("Observation found (%s, %d)" % (identifier, obs.id))
obs.mdm = SiteInfo[identifier]['error'] * self.global_R_scaling
obs.may_localize = SiteInfo[identifier]['may_localize']
obs.may_reject = SiteInfo[identifier]['may_reject']
obs.mdm = site_info[identifier]['error'] * self.global_R_scaling
obs.may_localize = site_info[identifier]['may_localize']
obs.may_reject = site_info[identifier]['may_reject']
obs.flag = 0
else:
logging.warning("Observation NOT found (%s, %d), please check sites.rc file (%s) !!!" % (identifier, obs.id, self.SitesFile))
logging.warning("Observation NOT found (%s, %d), please check sites.rc file (%s) !!!" % (identifier, obs.id, self.sites_file))
if SiteMove.has_key(identifier):
if site_move.has_key(identifier):