Commit 6f0daa3a authored by Woude, Auke van der's avatar Woude, Auke van der

test

parent e9fa0433
......@@ -96,11 +96,6 @@ class CO2StateVector(StateVector):
if len(sel[0]) > 0:
self.griddict[r] = sel
# biosphere_fluxes = nc.Dataset(dacycle.dasystem['biosphere_fluxdir'])
# self.gpp = biosphere_fluxes['GPP']#[time_indices]
# self.ter = biosphere_fluxes['TER']#[time_indices]
# logging.debug("A dictionary to map grids to states and vice versa was created")
# Create a mask for species/unknowns
self.make_species_mask()
......
import numpy as np
#### Some function for using SiB data
def convert_2d(sib_data, latindex, lonindex, empty_val=np.nan):
## Convert sib4 vector to lat-lon fields
# get number of timesteps from sib data
if len(sib_data.shape) == 1:
nts = 1
data_2d = np.zeros((360,720))
elif len(sib_data.shape) == 2:
nts = sib_data.shape[0]
data_2d = np.zeros((nts,360,720))
elif len(sib_data.shape) == 3:
nts = sib_data.shape[0]
npft= sib_data.shape[1]
data_2d = np.zeros((nts,npft, 360,720))
else:
print('sib_data shape not recognized!')
data_2d[:] = empty_val
# convert and fill
if len(sib_data.shape) == 1:
for i in range(len(latindex)):
lat = int((latindex[i] + 89.75) * 2)
lon = int((lonindex[i] + 179.75) * 2)
data_2d[lat,lon] = sib_data[i]
elif len(sib_data.shape) == 2:
for i in range(len(latindex)):
lat = int((latindex[i] + 89.75) * 2)
lon = int((lonindex[i] + 179.75) * 2)
data_2d[:,lat,lon] = sib_data[:,i]
elif len(sib_data.shape) == 3:
for i in range(len(latindex)):
lat = int((latindex[i] + 89.75) * 2)
lon = int((lonindex[i] + 179.75) * 2)
data_2d[:, :,lat,lon] = sib_data[:,: ,i]
return(data_2d)
def latindex(latitude):
## Convert latitude to index in 360x720 matrix
latind = - int((latitude - 89.75)*2)
return(latind)
def lonindex(longitude):
## Convert longitude to index in 360x720 matrix
lonind = int((longitude+179.75)*2)
return(lonind)
#! /bin/env bash
#SBATCH -p short
#SBATCH -t 1:00:00
#SBATCH -J DEVELOP
#SBATCH -n 1
#SBATCH --mail-user=auke.vanderwoude@wur.nl
#SBATCH --mail-type=ALL
echo "All output piped to file ffdas.log"
export HOST='cartesius'
module load Python/3.6.6-intel-2018b
module load nco
export icycle_in_job=1
python develop.py rc=develop.rc -v $1 >& develop.log
"""CarbonTracker Data Assimilation Shell (CTDAS) Copyright (C) 2017 Wouter Peters.
Users are recommended to contact the developers (wouter.peters@wur.nl) to receive
updates of the code. See also: http://www.carbontracker.eu.
This program is free software: you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software Foundation,
version 3. This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with this
program. If not, see <http://www.gnu.org/licenses/>."""
#!/usr/bin/env python
#################################################################################################
# First order of business is always to make all other python modules accessible through the path
#################################################################################################
import sys
import os
import logging
sys.path.append(os.getcwd())
# ALready defined here, this makes the output a little more verbose, but working.
# Maybe look for a workaround in more relaxed times ;)
logging.basicConfig(level='DEBUG',
format=' [%(levelname)-7s] (%(asctime)s) py-%(module)-20s : %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
#################################################################################################
# Next, import the tools needed to initialize a data assimilation cycle
#################################################################################################
from da.tools.initexit import start_logger, validate_opts_args, parse_options, CycleControl
from da.ccffdas.pipeline import ensemble_smoother_pipeline, header, footer, analysis_pipeline, archive_pipeline, save_and_submit
from da.platform.cartesius import CartesiusPlatform
from da.ccffdas.dasystem import CO2DaSystem
from da.baseclasses.optimizer import Optimizer
from da.ccffdas.obs import RINGOObservations
from da.ccffdas.statevector import CO2StateVector
from da.ccffdas.observationoperator import *
from da.ccffdas.emissionmodel import EmisModel
#################################################################################################
# Parse and validate the command line options, start logging
#################################################################################################
start_logger()
opts, args = parse_options()
opts, args = validate_opts_args(opts, args)
#################################################################################################
# Create the Cycle Control object for this job
#################################################################################################
dacycle = CycleControl(opts, args)
platform = CartesiusPlatform()
dasystem = CO2DaSystem(dacycle['da.system.rc'])
obsoperator = STILTObservationOperator(dacycle['da.obsoperator.rc'])
samples = RINGOObservations()
statevector = CO2StateVector()
optimizer = Optimizer()
##########################################################################################
################### ENTER THE PIPELINE WITH THE OBJECTS PASSED BY THE USER ###############
##########################################################################################
logging.info(header + "Entering Pipeline " + footer)
ensemble_smoother_pipeline(dacycle, platform, dasystem, samples, statevector, obsoperator, optimizer)
##########################################################################################
################### All done, extra stuff can be added next, such as analysis
##########################################################################################
#
#logging.info(header + "Starting analysis" + footer)
#sys.exit(0)
#
#save_weekly_avg_1x1_data(dacycle, statevector)
#save_weekly_avg_state_data(dacycle, statevector)
#save_weekly_avg_tc_data(dacycle, statevector)
#save_weekly_avg_ext_tc_data(dacycle)
#write_mole_fractions(dacycle)
#
sys.exit(0)
#! /bin/env bash
#SBATCH -p short
#SBATCH -t 1:00:00
#SBATCH -J DEVELOP
#SBATCH -n 1
#SBATCH --mail-user=auke.vanderwoude@wur.nl
#SBATCH --mail-type=ALL
echo "All output piped to file ffdas.log"
export HOST='cartesius'
module load Python/3.6.6-intel-2018b
module load nco
export icycle_in_job=1
python develop.py rc=develop.rc.easy1 -v $1 >& develop.log
python develop.py rc=develop.rc.easy2 -v $1 >& develop.log
python develop.py rc=develop.rc.easy3 -v $1 >& develop.log
python develop.py rc=develop.rc.easy4 -v $1 >& develop.log
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment