Merge branch 'CLIMATE-388' of https://github.com/MichaelArthurAnderson/climate
diff --git a/docs/source/index.rst b/docs/source/index.rst
index 2834ee6..9e73110 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -18,6 +18,7 @@
ocw/evaluation
ocw/metrics
ocw/plotter
+ ocw/statistical_downscaling
ocw/utils
data_source/data_sources
ui-backend/backend
diff --git a/docs/source/ocw/statistical_downscaling.rst b/docs/source/ocw/statistical_downscaling.rst
new file mode 100644
index 0000000..be63151
--- /dev/null
+++ b/docs/source/ocw/statistical_downscaling.rst
@@ -0,0 +1,8 @@
+Downscaling Module
+******************
+
+Downscaling
+===========
+.. autoclass:: statistical_downscaling.Downscaling
+ :members:
+
diff --git a/examples/GPM_WRF24_JPDF_comparison.py b/examples/GPM_WRF24_JPDF_comparison.py
index 20b070e..45eee89 100644
--- a/examples/GPM_WRF24_JPDF_comparison.py
+++ b/examples/GPM_WRF24_JPDF_comparison.py
@@ -15,6 +15,32 @@
# specific language governing permissions and limitations
# under the License.
+"""
+ GPM_WRF24_JPDF_comparison.py
+
+ This is an example of calculating the joint probability distribution
+ function of rainfall intensity and duration for the Northern Great
+ Plains using GPM IMERG data for June/01/2015
+
+ In this example:
+
+ 1. Load the GPM and WRF24 datasets with spatial filter.
+ 2. Load the spatial filter (Bukovsky region mask).
+ 3. Spatially subset the WRF data.
+ 4. Analyze the wet spells.
+ 5. Calculate the joint PDF(JPDF) of spell_duration and peak_rainfall.
+ 6. Visualize the JPDF.
+
+ OCW modules demonstrated:
+
+ 1. datasource/local
+ 2. dataset
+ 3. dataset_processor
+ 4. metrics
+ 5. plotter
+
+"""
+
from ocw.dataset import Bounds
import ocw.data_source.local as local
import ocw.dataset_processor as dsp
diff --git a/examples/draw_climatology_map_MISR_AOD.py b/examples/draw_climatology_map_MISR_AOD.py
index 979c0f5..c75d3b3 100644
--- a/examples/draw_climatology_map_MISR_AOD.py
+++ b/examples/draw_climatology_map_MISR_AOD.py
@@ -15,6 +15,32 @@
# specific language governing permissions and limitations
# under the License.
+"""
+ draw_climatology_map_MISR_AOD.py
+
+ Use OCW to download an MISR dataset, subset the data, calculate the 16 and 5 year
+ mean and draw a countour map of the means and the current values.
+
+ In this example:
+
+ 1. Download a dataset from https://dx.doi.org/10.6084/m9.figshare.3753321.v1.
+ *** Note *** The dataset for this example is not downloaded as part of the example
+ and must be downloaded to examples directory before running the example.
+ *** Note *** Depending on the OS on which the example is being run, the download
+ may remove the - in the filename. Rename the file appropriately.
+ 2. Subset the data set (lat / lon / start date / end date).
+ 3. Calculate the 16, 5 and 1 year mean.
+ 4. Draw a three contour maps using the calculated means and current values.
+
+ OCW modules demonstrated:
+
+ 1. datasource/local
+ 2. dataset
+ 3. dataset_processor
+ 4. plotter
+
+"""
+
import ocw.dataset as ds
import ocw.data_source.local as local
import ocw.dataset_processor as dsp
diff --git a/examples/esgf_integration_example.py b/examples/esgf_integration_example.py
index 8247435..e939927 100644
--- a/examples/esgf_integration_example.py
+++ b/examples/esgf_integration_example.py
@@ -15,6 +15,21 @@
# specific language governing permissions and limitations
# under the License.
+"""
+ esgf_integration_example.py
+
+ Use OCW to download an ESGF dataset into the common format of an OCW dataset object.
+
+ In this example:
+
+ 1. Download an ESGF (https://esgf.llnl.gov/) dataset and load it into a OCW dataset object.
+
+ OCW modules demonstrated:
+
+ 1. datasource/esgf
+
+"""
+
import ocw.data_source.esgf as esgf
from getpass import getpass
import ssl
diff --git a/examples/knmi_to_cru31_full_bias.py b/examples/knmi_to_cru31_full_bias.py
index 95b64a8..13b5686 100644
--- a/examples/knmi_to_cru31_full_bias.py
+++ b/examples/knmi_to_cru31_full_bias.py
@@ -15,6 +15,38 @@
# specific language governing permissions and limitations
# under the License.
+"""
+ knmi_to_cru31_full_bias.py
+
+ Use OCW to download, evaluate and plot (contour map) a dataset
+ against a reference dataset and OCW standard metrics (bias).
+
+ In this example:
+
+ 1. Download a netCDF files from a local site.
+ AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc
+ 2. Load the local files into OCW dataset objects.
+ 3. Interface with the Regional Climate Model Evalutaion Database (https://rcmes.jpl.nasa.gov/)
+ to load the CRU3.1 Daily-Max Temp dataset (https://rcmes.jpl.nasa.gov/content/cru31).
+ 4. Process each dataset to the same same shape.
+ 5. Temporally rebin the datasets to a single timestep.
+ 6. Spatially regrid the dataset objects to a 1/2 degree grid.
+ 7. Build a bias metric to use for evaluation use the standard OCW metric set.
+ 8. Create an evaluation object using the datasets and metric.
+ 9. Plot the results of the evaluation (contour map).
+
+ OCW modules demonstrated:
+
+ 1. datasource/local
+ 2. datasource/rcmed
+ 3. dataset
+ 4. dataset_processor
+ 5. evaluation
+ 6. metrics
+ 7. plotter
+
+"""
+
import datetime
import urllib
from os import path
diff --git a/examples/model_ensemble_to_rcmed.py b/examples/model_ensemble_to_rcmed.py
index e8e3dbe..185aa2e 100644
--- a/examples/model_ensemble_to_rcmed.py
+++ b/examples/model_ensemble_to_rcmed.py
@@ -15,6 +15,38 @@
# specific language governing permissions and limitations
# under the License.
+"""
+ model_ensemble_to_rcmed.py
+
+ Use OCW to download, evaluate and plot (contour map) two datasets
+ against a reference dataset and OCW standard metrics (bias).
+
+ In this example:
+
+ 1. Download two netCDF files from a local site.
+ AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc
+ AFRICA_UC-WRF311_CTL_ERAINT_MM_50km-rg_1989-2008_tasmax.nc
+ 2. Load the local files into OCW dataset objects.
+ 3. Interface with the Regional Climate Model Evaluation Database (https://rcmes.jpl.nasa.gov/)
+ to load the CRU3.1 Daily-Max Temp dataset (https://rcmes.jpl.nasa.gov/content/cru31).
+ 4. Temporally rebin the datasets to annual.
+ 5. Spatially regrid the dataset objects to a 1/2 degree grid.
+ 6. Build a bias metric to use for evaluation use the standard OCW metric set.
+ 7. Create an evaluation object using the datasets and metric.
+ 8. Plot the results of the evaluation (contour map).
+
+ OCW modules demonstrated:
+
+ 1. datasource/local
+ 2. datasource/rcmed
+ 3. dataset
+ 4. dataset_processor
+ 5. metrics
+ 6. evaluation
+ 7. plotter
+
+"""
+
import datetime
import math
import urllib
diff --git a/examples/multi_model_evaluation.py b/examples/multi_model_evaluation.py
index 7756cc9..ba6ad56 100644
--- a/examples/multi_model_evaluation.py
+++ b/examples/multi_model_evaluation.py
@@ -15,6 +15,42 @@
# specific language governing permissions and limitations
# under the License.
+"""
+ multi_model_evaluation.py
+
+ Use OCW to download, evaluate and plot (contour map) two datasets
+ against a reference dataset and OCW standard metrics.
+
+ In this example:
+
+ 1. Download two netCDF files from a local site.
+ AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc
+ AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc
+ 2. Load the local files into OCW dataset objects.
+ 3. Interface with the Regional Climate Model Evaluation Database (https://rcmes.jpl.nasa.gov/)
+ to load the CRU3.1 Daily Precipitation dataset (https://rcmes.jpl.nasa.gov/content/cru31).
+ 4. Process each dataset to the same same shape.
+ a.) Restrict the datasets re: geographic and time boundaries.
+ b.) Convert the dataset water flux to common units.
+ c.) Normalize the dataset date / times to monthly.
+ d.) Spatially regrid each dataset.
+ 5. Calculate the mean annual value for each dataset.
+ 6. Evaluate the datasets against the reference data set and OCW standard metric and plot
+ a contour map.
+
+ OCW modules demonstrated:
+
+ 1. datasource/local
+ 2. datasource/rcmed
+ 3. dataset
+ 4. dataset_processor
+ 5. metrics
+ 6. evaluation
+ 7. plotter
+ 8. utils
+
+"""
+
import datetime
import urllib
import numpy as np
diff --git a/examples/multi_model_taylor_diagram.py b/examples/multi_model_taylor_diagram.py
index 31d4020..8edee7b 100644
--- a/examples/multi_model_taylor_diagram.py
+++ b/examples/multi_model_taylor_diagram.py
@@ -1,3 +1,54 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+ multi_model_taylor_diagram.py
+
+ Use OCW to download, normalize and evaluate three datasets
+ against a reference dataset and OCW standard metrics
+ drawing a Taylor diagram of the results of the evaluation.
+
+ In this example:
+
+ 1. Download three netCDF files from a local site.
+ AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc
+ AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc
+ AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc
+ 2. Load the local files into OCW dataset objects.
+ 3. Process each dataset to the same same shape.
+ a.) Restrict the datasets re: geographic and time boundaries.
+ b.) Temporally rebin the data (monthly).
+ c.) Spatially regrid each dataset.
+ 4. Extract the metrics used for the evaluation and evaluate
+ against a reference dataset and standard OCW metrics.
+ 5. Draw evaluation results Taylor diagram.
+
+ OCW modules demonstrated:
+
+ 1. datasource/local
+ 2. dataset
+ 3. dataset_processor
+ 4. evaluation
+ 5. metrics
+ 6. plotter
+ 7. utils
+
+"""
+
# Apache OCW lib immports
from ocw.dataset import Dataset, Bounds
import ocw.data_source.local as local
diff --git a/examples/podaac_integration_example.py b/examples/podaac_integration_example.py
index 61663d7..be85884 100644
--- a/examples/podaac_integration_example.py
+++ b/examples/podaac_integration_example.py
@@ -15,6 +15,27 @@
# specific language governing permissions and limitations
# under the License.
+"""
+ podaac_integration_example.py
+
+ Use OCW to download a PODACC dataset, evaluate and plot (contour map).
+
+ In this example:
+
+ 1. Download a remote PO.DAAC (https://podaac.jpl.nasa.gov/) dataset
+ and read it into an OCW dataset object.
+ 2. Create a temporal STD metric using one of the OCW standard metrics.
+ 3. Evaluate the dataset against the metric and plot a contour map.
+
+ OCW modules demonstrated:
+
+ 1. datasource/podaac_datasource
+ 2. metrics
+ 3. evaluation
+ 4. plotter
+
+"""
+
import ocw.data_source.podaac_datasource as podaac
import ocw.evaluation as evaluation
import ocw.metrics as metrics
diff --git a/examples/simple_model_to_model_bias.py b/examples/simple_model_to_model_bias.py
index 8e834b6..ad1f29b 100644
--- a/examples/simple_model_to_model_bias.py
+++ b/examples/simple_model_to_model_bias.py
@@ -15,6 +15,35 @@
# specific language governing permissions and limitations
# under the License.
+"""
+ simple_model_to_model_bias.py
+
+ Use OCW to download, normalize and evaluate two datasets
+ against an OCW metric (bias) and plot the results of the
+ evaluation (contour map).
+
+ In this example:
+
+ 1. Download two netCDF files from a local site.
+ AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc
+ AFRICA_UC-WRF311_CTL_ERAINT_MM_50km-rg_1989-2008_tasmax.nc
+ 2. Load the local files into OCW dataset objects.
+ 3. Temporally rebin the data anually.
+ 4. Spatially regrid the dataset objects to a 1 degree grid.
+ 5. Build a bias metric to use for evaluation use the standard OCW metric set.
+ 6. Create an evaluation object using the datasets and metric.
+ 7. Plot the results of the evaluation (contour map).
+
+ OCW modules demonstrated:
+
+ 1. datasource/local
+ 2. dataset_processor
+ 3. evaluation
+ 4. metrics
+ 5. plotter
+
+"""
+
import datetime
from os import path
import sys
diff --git a/examples/simple_model_tstd.py b/examples/simple_model_tstd.py
index fb3ce48..6412493 100644
--- a/examples/simple_model_tstd.py
+++ b/examples/simple_model_tstd.py
@@ -15,6 +15,30 @@
# specific language governing permissions and limitations
# under the License.
+"""
+ simple_model_tstd.py
+
+ Use OCW to download, evaluate against a OCW standard
+ metrics (temporal STD) and plot the results (contour map).
+
+ In this example:
+
+ 1. Download a netCDF files from a local site.
+ AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc
+ 2. Load the local file into OCW dataset objects.
+ 3. Use the standard OCW metrics to build a metric against which to evaluation (temporal STD).
+ 4. Create an evaluation object of the dataset vs. the metric.
+ 5. Plot the results of the evaluation using a contour map.
+
+ OCW modules demonstrated:
+
+ 1. datasource/local
+ 2. metrics
+ 3. evaluation
+ 4. plotter
+
+"""
+
from os import path
import urllib
diff --git a/examples/subregions_portrait_diagram.py b/examples/subregions_portrait_diagram.py
index e85286f..3e6785c 100644
--- a/examples/subregions_portrait_diagram.py
+++ b/examples/subregions_portrait_diagram.py
@@ -1,5 +1,67 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+ subregions_portrait_diagram.py
+
+ Use OCW to download, normalize, evaluate and plot (portrait diagram)
+ three local datasets against a reference dataset.
+
+ In this example:
+
+ 1. Download three netCDF files from a local site.
+ AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc
+ AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc
+ AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc
+ 2. Load the local files into OCW dataset objects.
+ 3. Interface with the Regional Climate Model Evaluation Database (https://rcmes.jpl.nasa.gov/)
+ to load the CRU3.1 Daily Precipitation dataset (https://rcmes.jpl.nasa.gov/content/cru31).
+ 4. Process each dataset to the same same shape.
+ a.) Restrict the datasets re: geographic and time boundaries.
+ b.) Convert the dataset water flux to common units.
+ c.) Normalize the dataset date / times to monthly.
+ d.) Spatially regrid each dataset.
+ 5. Calculate the mean annual value for each dataset.
+ 6. Separate each dataset into 13 subregions.
+ 7. Extract the metrics used for the evaluation and evaluate
+ against a reference dataset.
+ 8. Create a portrait diagram of the results of the evaluation.
+
+ OCW modules demonstrated:
+
+ 1. datasource/local
+ 2. datasource/rcmed
+ 3. dataset
+ 4. dataset_processor
+ 5. metrics
+ 6. evaluation
+ 7. plotter
+ 8. utils
+
+"""
+
+from os import path
+import urllib
+import ssl
+import datetime
+import numpy as np
+
# Apache OCW lib immports
-from ocw.dataset import Dataset, Bounds
+from ocw.dataset import Bounds
import ocw.data_source.local as local
import ocw.data_source.rcmed as rcmed
import ocw.dataset_processor as dsp
@@ -8,24 +70,17 @@
import ocw.plotter as plotter
import ocw.utils as utils
-import datetime
-import numpy as np
-import numpy.ma as ma
-
-from os import path
-import urllib
-import ssl
if hasattr(ssl, '_create_unverified_context'):
ssl._create_default_https_context = ssl._create_unverified_context
# File URL leader
-FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
+FILE_LEADER = 'http://zipper.jpl.nasa.gov/dist/'
# Three Local Model Files
-FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
-FILE_2 = "AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc"
-FILE_3 = "AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
+FILE_1 = 'AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc'
+FILE_2 = 'AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc'
+FILE_3 = 'AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc'
# Filename for the output image/plot (without file extension)
-OUTPUT_PLOT = "portrait_diagram"
+OUTPUT_PLOT = 'portrait_diagram'
# Spatial and temporal configurations
LAT_MIN = -45.0
@@ -34,7 +89,8 @@
LON_MAX = 60.0
START = datetime.datetime(2000, 01, 1)
END = datetime.datetime(2007, 12, 31)
-EVAL_BOUNDS = Bounds(LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
+EVAL_BOUNDS = Bounds(lat_min=LAT_MIN, lat_max=LAT_MAX, lon_min=LON_MIN,
+ lon_max=LON_MAX, start=START, end=END)
# variable that we are analyzing
varName = 'pr'
@@ -58,32 +114,32 @@
if not path.exists(FILE_3):
urllib.urlretrieve(FILE_LEADER + FILE_3, FILE_3)
-""" Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list"""
-target_datasets.append(local.load_file(FILE_1, varName, name="KNMI"))
-target_datasets.append(local.load_file(FILE_2, varName, name="REGCM"))
-target_datasets.append(local.load_file(FILE_3, varName, name="UCT"))
+# Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list
+target_datasets.append(local.load_file(FILE_1, varName, name='KNMI'))
+target_datasets.append(local.load_file(FILE_2, varName, name='REGCM'))
+target_datasets.append(local.load_file(FILE_3, varName, name='UCT'))
-""" Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
-print("Working with the rcmed interface to get CRU3.1 Monthly Mean Precipitation")
+# Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module
+print('Working with the rcmed interface to get CRU3.1 Monthly Mean Precipitation')
# the dataset_id and the parameter id were determined from
# https://rcmes.jpl.nasa.gov/content/data-rcmes-database
CRU31 = rcmed.parameter_dataset(
10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
-""" Step 3: Processing Datasets so they are the same shape """
-print("Processing datasets ...")
+# Step 3: Processing Datasets so they are the same shape
+print('Processing datasets ...')
CRU31 = dsp.normalize_dataset_datetimes(CRU31, 'monthly')
-print("... on units")
+print('... on units')
CRU31 = dsp.water_flux_unit_conversion(CRU31)
for member, each_target_dataset in enumerate(target_datasets):
target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS)
- target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[
- member])
+ target_datasets[member] = \
+ dsp.water_flux_unit_conversion(target_datasets[member])
target_datasets[member] = dsp.normalize_dataset_datetimes(
target_datasets[member], 'monthly')
-print("... spatial regridding")
+print('... spatial regridding')
new_lats = np.arange(LAT_MIN, LAT_MAX, gridLatStep)
new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep)
CRU31 = dsp.spatial_regrid(CRU31, new_lats, new_lons)
@@ -97,12 +153,12 @@
_, CRU31.values = utils.calc_climatology_year(CRU31)
for member, each_target_dataset in enumerate(target_datasets):
- _, target_datasets[member].values = utils.calc_climatology_year(target_datasets[
- member])
+ _, target_datasets[member].values = \
+ utils.calc_climatology_year(target_datasets[member])
# make the model ensemble
target_datasets_ensemble = dsp.ensemble(target_datasets)
-target_datasets_ensemble.name = "ENS"
+target_datasets_ensemble.name = 'ENS'
# append to the target_datasets for final analysis
target_datasets.append(target_datasets_ensemble)
@@ -111,21 +167,21 @@
allNames.append(target.name)
list_of_regions = [
- Bounds(-10.0, 0.0, 29.0, 36.5),
- Bounds(0.0, 10.0, 29.0, 37.5),
- Bounds(10.0, 20.0, 25.0, 32.5),
- Bounds(20.0, 33.0, 25.0, 32.5),
- Bounds(-19.3, -10.2, 12.0, 20.0),
- Bounds(15.0, 30.0, 15.0, 25.0),
- Bounds(-10.0, 10.0, 7.3, 15.0),
- Bounds(-10.9, 10.0, 5.0, 7.3),
- Bounds(33.9, 40.0, 6.9, 15.0),
- Bounds(10.0, 25.0, 0.0, 10.0),
- Bounds(10.0, 25.0, -10.0, 0.0),
- Bounds(30.0, 40.0, -15.0, 0.0),
- Bounds(33.0, 40.0, 25.0, 35.00)]
+ Bounds(lat_min=-10.0, lat_max=0.0, lon_min=29.0, lon_max=36.5),
+ Bounds(lat_min=0.0, lat_max=10.0, lon_min=29.0, lon_max=37.5),
+ Bounds(lat_min=10.0, lat_max=20.0, lon_min=25.0, lon_max=32.5),
+ Bounds(lat_min=20.0, lat_max=33.0, lon_min=25.0, lon_max=32.5),
+ Bounds(lat_min=-19.3, lat_max=-10.2, lon_min=12.0, lon_max=20.0),
+ Bounds(lat_min=15.0, lat_max=30.0, lon_min=15.0, lon_max=25.0),
+ Bounds(lat_min=-10.0, lat_max=10.0, lon_min=7.3, lon_max=15.0),
+ Bounds(lat_min=-10.9, lat_max=10.0, lon_min=5.0, lon_max=7.3),
+ Bounds(lat_min=33.9, lat_max=40.0, lon_min=6.9, lon_max=15.0),
+ Bounds(lat_min=10.0, lat_max=25.0, lon_min=0.0, lon_max=10.0),
+ Bounds(lat_min=10.0, lat_max=25.0, lon_min=-10.0, lon_max=0.0),
+ Bounds(lat_min=30.0, lat_max=40.0, lon_min=-15.0, lon_max=0.0),
+ Bounds(lat_min=33.0, lat_max=40.0, lon_min=25.0, lon_max=35.00)]
-region_list = ["R" + str(i + 1) for i in xrange(13)]
+region_list = ['R' + str(i + 1) for i in xrange(13)]
# metrics
pattern_correlation = metrics.PatternCorrelation()
diff --git a/examples/subregions_rectangular_boundaries.py b/examples/subregions_rectangular_boundaries.py
index 7fcf0e8..cf396bc 100644
--- a/examples/subregions_rectangular_boundaries.py
+++ b/examples/subregions_rectangular_boundaries.py
@@ -1,16 +1,43 @@
-# Apache OCW lib immports
-from ocw.dataset import Dataset, Bounds
-import ocw.data_source.local as local
-import ocw.data_source.rcmed as rcmed
-import ocw.dataset_processor as dsp
-import ocw.evaluation as evaluation
-import ocw.metrics as metrics
-import ocw.plotter as plotter
-import ocw.utils as utils
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+ subregions_rectangular_boundaries.py
+
+ Use OCW to define a set a sub regions and draw the sub regions.
+
+ In this example:
+
+ 1. Subset the data set (lat / lon / start date / end date).
+ 2. Draw each sub region.
+
+ OCW modules demonstrated:
+
+ 1. dataset (Bounds)
+ 2. plotter
+
+"""
import datetime
import numpy as np
-import numpy.ma as ma
+
+# Apache OCW lib immports
+from ocw.dataset import Bounds
+import ocw.plotter as plotter
OUTPUT_PLOT = "subregions"
@@ -32,19 +59,19 @@
new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep)
list_of_regions = [
- Bounds(-10.0, 0.0, 29.0, 36.5, START_SUB, END_SUB),
- Bounds(0.0, 10.0, 29.0, 37.5, START_SUB, END_SUB),
- Bounds(10.0, 20.0, 25.0, 32.5, START_SUB, END_SUB),
- Bounds(20.0, 33.0, 25.0, 32.5, START_SUB, END_SUB),
- Bounds(-19.3, -10.2, 12.0, 20.0, START_SUB, END_SUB),
- Bounds(15.0, 30.0, 15.0, 25.0, START_SUB, END_SUB),
- Bounds(-10.0, 10.0, 7.3, 15.0, START_SUB, END_SUB),
- Bounds(-10.9, 10.0, 5.0, 7.3, START_SUB, END_SUB),
- Bounds(33.9, 40.0, 6.9, 15.0, START_SUB, END_SUB),
- Bounds(10.0, 25.0, 0.0, 10.0, START_SUB, END_SUB),
- Bounds(10.0, 25.0, -10.0, 0.0, START_SUB, END_SUB),
- Bounds(30.0, 40.0, -15.0, 0.0, START_SUB, END_SUB),
- Bounds(33.0, 40.0, 25.0, 35.0, START_SUB, END_SUB)]
+ Bounds(lat_min=-10.0, lat_max=0.0, lon_min=29.0, lon_max=36.5, start=START_SUB, end=END_SUB),
+ Bounds(lat_min=0.0, lat_max=10.0, lon_min=29.0, lon_max=37.5, start=START_SUB, end=END_SUB),
+ Bounds(lat_min=10.0, lat_max=20.0, lon_min=25.0, lon_max=32.5, start=START_SUB, end=END_SUB),
+ Bounds(lat_min=20.0, lat_max=33.0, lon_min=25.0, lon_max=32.5, start=START_SUB, end=END_SUB),
+ Bounds(lat_min=-19.3, lat_max=-10.2, lon_min=12.0, lon_max=20.0, start=START_SUB, end=END_SUB),
+ Bounds(lat_min=15.0, lat_max=30.0, lon_min=15.0, lon_max=25.0, start=START_SUB, end=END_SUB),
+ Bounds(lat_min=-10.0, lat_max=10.0, lon_min=7.3, lon_max=15.0, start=START_SUB, end=END_SUB),
+ Bounds(lat_min=-10.9, lat_max=10.0, lon_min=5.0, lon_max=7.3, start=START_SUB, end=END_SUB),
+ Bounds(lat_min=33.9, lat_max=40.0, lon_min=6.9, lon_max=15.0, start=START_SUB, end=END_SUB),
+ Bounds(lat_min=10.0, lat_max=25.0, lon_min=0.0, lon_max=10.0, start=START_SUB, end=END_SUB),
+ Bounds(lat_min=10.0, lat_max=25.0, lon_min=-10.0, lon_max=0.0, start=START_SUB, end=END_SUB),
+ Bounds(lat_min=30.0, lat_max=40.0, lon_min=-15.0, lon_max=0.0, start=START_SUB, end=END_SUB),
+ Bounds(lat_min=33.0, lat_max=40.0, lon_min=25.0, lon_max=35.0, start=START_SUB, end=END_SUB)]
# for plotting the subregions
plotter.draw_subregions(list_of_regions, new_lats,
diff --git a/examples/subset_TRMM_data_for_NCA_regions.py b/examples/subset_TRMM_data_for_NCA_regions.py
index 90b752b..4ae4300 100644
--- a/examples/subset_TRMM_data_for_NCA_regions.py
+++ b/examples/subset_TRMM_data_for_NCA_regions.py
@@ -15,9 +15,32 @@
# specific language governing permissions and limitations
# under the License.
+"""
+ subset_TRMM_data_for_NCA_regions.py
+
+ Use OCW to subset TRMM data (https://pmm.nasa.gov/trmm) for NCA regions and draw
+ a contour map for the U.S. (CA', 'NV', 'UT', 'AZ', 'NM', 'CO'), Cuba and
+ the Bahamas (https://scenarios.globalchange.gov/regions_nca4).
+
+ In this example:
+
+ 1. Interface with the Regional Climate Model Evaluation Database (https://rcmes.jpl.nasa.gov/)
+ to load the TRMM dataset.
+ 2. Define the bounds for the U.S. (CA', 'NV', 'UT', 'AZ', 'NM', 'CO'), Cuba and the Bahamas and
+ the start date / end date.
+ 3. Create a contour map of the TRMM data for the U.S., Cuba, and Bahamas.
+
+ OCW modules demonstrated:
+
+ 1. datasource/rcmed
+ 2. dataset (Bounds)
+ 3. dataset_processor
+ 4. plotter
+
+"""
+
# Apache OCW lib immports
import ocw.dataset_processor as dsp
-import ocw.utils as utils
from ocw.dataset import Bounds
import ocw.data_source.rcmed as rcmed
import ocw.plotter as plotter
diff --git a/examples/taylor_diagram_example.py b/examples/taylor_diagram_example.py
index 8d5bbf0..8f683c1 100644
--- a/examples/taylor_diagram_example.py
+++ b/examples/taylor_diagram_example.py
@@ -15,6 +15,37 @@
# specific language governing permissions and limitations
# under the License.
+"""
+ taylor_diagram_example.py
+
+ Use OCW to download, normalize and evaluate two datasets
+ drawing a Taylor diagram of the results of the evaluation.
+
+ In this example:
+
+ 1. Download two netCDF files from a local site.
+ AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc
+ AFRICA_UC-WRF311_CTL_ERAINT_MM_50km-rg_1989-2008_tasmax.nc
+ 2. Load the local files into OCW dataset objects.
+ 3. Process each dataset to the same same shape.
+ a.) Restrict the datasets re: geographic and time boundaries.
+ b.) Temporally rebin the data (monthly).
+ c.) Spatially regrid each dataset.
+ 4. Extract the metrics used for the evaluation and evaluate
+ against a reference dataset.
+ 5. Draw evaluation results Taylor diagram.
+
+ OCW modules demonstrated:
+
+ 1. datasource/local
+ 2. dataset
+ 3. dataset_processor
+ 4. evaluation
+ 5. metrics
+ 6. plotter
+
+"""
+
import datetime
import sys
from os import path
diff --git a/examples/temperature_trends_over_CONUS.py b/examples/temperature_trends_over_CONUS.py
index 54f5843..8510294 100644
--- a/examples/temperature_trends_over_CONUS.py
+++ b/examples/temperature_trends_over_CONUS.py
@@ -15,6 +15,35 @@
# specific language governing permissions and limitations
# under the License.
+"""
+ temperature_trends_over_CONUS.py
+
+ Use OCW to plot the temperature trends over CONUS using the nClimDiv reference data.
+
+ In this example:
+
+ 1. Load the local file nClimDiv/nClimDiv_tave_1895-2005.nc into OCW Dataset Objects.
+ *** Note *** It is assume this file exists locally in a subdirectory nClimDiv located
+ *** Note *** The files can be downloaded from :
+ https://rcmes.jpl.nasa.gov/RCMES_Turtorial_data/NCA-CMIP_examples.tar.gz
+ *** Note *** Additional information about the file content can be found here:
+ https://rcmes.jpl.nasa.gov/content/nca-cmip-analysis-using-rcmes
+ in the same directory as the example.
+ 2. Load the CMIP5 simulations into a list of OCW Dataset Objects.
+ 3. Spatially subset the observed dataset into state and regional boundaries.
+ 4. Temporally subset the observed and model datasets.
+ 5. Calculate and plot the temperature trend for each region.
+
+ OCW modules demonstrated:
+
+ 1. datasource/local
+ 2. dataset
+ 3. dataset_processor
+ 4. plotter
+ 5. utlis
+
+"""
+
import numpy as np
import datetime
@@ -26,8 +55,8 @@
import ocw.plotter as plotter
import ocw.utils as utils
-# nClimDiv observation file
-file_obs = 'nClimDiv/nClimDiv_tave_1895-2005.nc'
+# nClimGrid observation file
+file_obs = 'nClimGrid/nClimGrid_tave_1895-2005.nc'
# CMIP5 simulations
model_file_path = 'CMIP5_historical'
@@ -40,7 +69,7 @@
start_date = datetime.datetime(1979, 12, 1)
end_date = datetime.datetime(2005, 8, 31)
-nyear = 26
+nyear = 26
month_start = 6 # June
month_end = 8 # August
@@ -60,39 +89,39 @@
n_region = 7 # number of regions
# CONUS regional boundaries
-NW_bounds = Bounds(boundary_type='us_states',
+NW_bounds = Bounds(boundary_type='us_states',
us_states=regions[0])
-SW_bounds = Bounds(boundary_type='us_states',
+SW_bounds = Bounds(boundary_type='us_states',
us_states=regions[1])
-NGP_bounds = Bounds(boundary_type='us_states',
+NGP_bounds = Bounds(boundary_type='us_states',
us_states=regions[2])
-SGP_bounds = Bounds(boundary_type='us_states',
+SGP_bounds = Bounds(boundary_type='us_states',
us_states=regions[3])
-MW_bounds = Bounds(boundary_type='us_states',
+MW_bounds = Bounds(boundary_type='us_states',
us_states=regions[4])
-NE_bounds = Bounds(boundary_type='us_states',
+NE_bounds = Bounds(boundary_type='us_states',
us_states=regions[5])
-SE_bounds = Bounds(boundary_type='us_states',
+SE_bounds = Bounds(boundary_type='us_states',
us_states=regions[6])
regional_bounds = [NW_bounds, SW_bounds, NGP_bounds,
SGP_bounds, MW_bounds, NE_bounds, SE_bounds]
-""" Load nClimDiv file into OCW Dataset """
-obs_dataset = local.load_file(file_obs, variable_name='tave')
+""" Load nClimGrid file into OCW Dataset """
+obs_dataset = local.load_file(file_obs, variable_name='tave')
""" Load CMIP5 simulations into a list of OCW Datasets"""
model_dataset = local.load_multiple_files(file_path=model_file_path, variable_name='tas',
- dataset_name=dataset_name, variable_unit='K')
+ dataset_name=dataset_name, variable_unit='K')
""" Temporal subset of obs_dataset """
-obs_dataset_subset = dsp.temporal_slice(obs_dataset,
+obs_dataset_subset = dsp.temporal_slice(obs_dataset,
start_time=start_date, end_time=end_date)
obs_dataset_season = dsp.temporal_subset(obs_dataset_subset, month_start, month_end,
average_each_year=True)
""" Temporal subset of model_dataset """
-model_dataset_subset = [dsp.temporal_slice(dataset,start_time=start_date, end_time=end_date)
+model_dataset_subset = [dsp.temporal_slice(dataset,start_time=start_date, end_time=end_date)
for dataset in model_dataset]
model_dataset_season = [dsp.temporal_subset(dataset, month_start, month_end,
average_each_year=True) for dataset in model_dataset_subset]
@@ -104,7 +133,7 @@
for iregion in np.arange(n_region):
obs_timeseries[:, iregion] = utils.calc_time_series(
- dsp.subset(obs_dataset_season, regional_bounds[iregion]))
+ dsp.subset(obs_dataset_season, regional_bounds[iregion]))
for imodel in np.arange(nmodel):
model_timeseries[imodel, :, iregion] = utils.calc_time_series(
dsp.subset(model_dataset_season[imodel], regional_bounds[iregion]))
@@ -125,20 +154,20 @@
regional_trends_model[imodel, iregion], regional_trends_model_error[iregion] = utils.calculate_temporal_trend_of_time_series(
year, model_timeseries[imodel, :, iregion])
regional_trends_ens[iregion], regional_trends_ens_error[iregion] = utils.calculate_ensemble_temporal_trends(
- model_timeseries[:, :, iregion])
+ model_timeseries[:, :, iregion])
""" Generate plots """
-plotter.fill_US_states_with_color(regions, 'nClimDiv_tave_trends_JJA_1980-2005',
+plotter.fill_US_states_with_color(regions, 'nClimGrid_tave_trends_JJA_1980-2005',
values=regional_trends_obs,
region_names=['%.3f' %(10*i) for i in regional_trends_obs])
-plotter.fill_US_states_with_color(regions, 'CMIP5_ENS_tave_trends_JJA_1980-2005',
+plotter.fill_US_states_with_color(regions, 'CMIP5_ENS_tave_trends_JJA_1980-2005',
values=regional_trends_ens,
region_names=['%.3f' %(10*i) for i in regional_trends_ens])
bias_ens = regional_trends_ens - regional_trends_obs
-plotter.fill_US_states_with_color(regions, 'CMIP5_ENS_tave_trends_bias_from_nClimDiv_JJA_1980-2005',
+plotter.fill_US_states_with_color(regions, 'CMIP5_ENS_tave_trends_bias_from_nClimGrid_JJA_1980-2005',
values=bias_ens,
region_names=['%.3f' %(10*i) for i in bias_ens])
@@ -146,7 +175,7 @@
ens_data = np.vstack([regional_trends_ens, regional_trends_ens_error])
plotter.draw_plot_to_compare_trends(obs_data, ens_data, regional_trends_model,
- fname='Trends_comparison_btn_CMIP5_and_nClimDiv',
+ fname='Trends_comparison_btn_CMIP5_and_nClimGrid',
data_labels=['NW','SW','NGP','SGP','MW','NE','SE'],
xlabel='NCA regions', ylabel='tas trend [K/year]')
diff --git a/examples/time_series_with_regions.py b/examples/time_series_with_regions.py
index 3bb133c..d92599b 100644
--- a/examples/time_series_with_regions.py
+++ b/examples/time_series_with_regions.py
@@ -1,19 +1,69 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+ time_series_with_regions.py
+
+ Use OCW to download and plot (time series) three local datasets against a reference dataset.
+
+ In this example:
+
+ 1. Download three netCDF files from a local site.
+ AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc
+ AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc
+ AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc
+ 2. Load the local files into OCW dataset objects.
+ 3. Interface with the Regional Climate Model Evalutaion Database (https://rcmes.jpl.nasa.gov/)
+ to load the CRU3.1 Daily Precipitation dataset (https://rcmes.jpl.nasa.gov/content/cru31).
+ 4. Process each dataset to the same same shape.
+ a.) Restrict the datasets re: geographic and time boundaries.
+ b.) Convert the dataset water flux to common units.
+ c.) Normalize the dataset date / times to monthly.
+ d.) Spatially regrid each dataset.
+ 5. Calculate the mean monthly value for each dataset.
+ 6. Separate each dataset into 13 subregions.
+ 7. Create a time series for each dataset in each subregion.
+
+ OCW modules demonstrated:
+
+ 1. datasource/local
+ 2. datasource/rcmed
+ 3. dataset
+ 4. dataset_processor
+ 5. plotter
+
+"""
+
+import sys
+import datetime
+from os import path
+from calendar import monthrange
+import ssl
+
+import numpy as np
+
# Apache OCW lib immports
-from ocw.dataset import Dataset, Bounds
+from ocw.dataset import Bounds
import ocw.data_source.local as local
import ocw.data_source.rcmed as rcmed
import ocw.dataset_processor as dsp
-import ocw.evaluation as evaluation
-import ocw.metrics as metrics
import ocw.plotter as plotter
import ocw.utils as utils
-import datetime
-import numpy as np
-import numpy.ma as ma
-from os import path
-import sys
-
if sys.version_info[0] >= 3:
from urllib.request import urlretrieve
else:
@@ -21,17 +71,17 @@
# But note that this might need an update when Python 4
# might be around one day
from urllib import urlretrieve
-import ssl
+
if hasattr(ssl, '_create_unverified_context'):
ssl._create_default_https_context = ssl._create_unverified_context
-
# File URL leader
-FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
+FILE_LEADER = 'http://zipper.jpl.nasa.gov/dist/'
+
# Three Local Model Files
-FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
-FILE_2 = "AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc"
-FILE_3 = "AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
+FILE_1 = 'AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc'
+FILE_2 = 'AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc'
+FILE_3 = 'AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc'
LAT_MIN = -45.0
LAT_MAX = 42.24
@@ -56,97 +106,99 @@
# Download necessary NetCDF file if not present
if not path.exists(FILE_1):
- print("Downloading %s" % (FILE_LEADER + FILE_1))
+ print('Downloading %s' % (FILE_LEADER + FILE_1))
urlretrieve(FILE_LEADER + FILE_1, FILE_1)
if not path.exists(FILE_2):
- print("Downloading %s" % (FILE_LEADER + FILE_2))
+ print('Downloading %s' % (FILE_LEADER + FILE_2))
urlretrieve(FILE_LEADER + FILE_2, FILE_2)
if not path.exists(FILE_3):
- print("Downloading %s" % (FILE_LEADER + FILE_3))
+ print('Downloading %s' % (FILE_LEADER + FILE_3))
urlretrieve(FILE_LEADER + FILE_3, FILE_3)
-""" Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list"""
-target_datasets.append(local.load_file(FILE_1, varName, name="KNMI"))
-target_datasets.append(local.load_file(FILE_2, varName, name="REGCM"))
-target_datasets.append(local.load_file(FILE_3, varName, name="UCT"))
+# Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list
+target_datasets.append(local.load_file(FILE_1, varName, name='KNMI'))
+target_datasets.append(local.load_file(FILE_2, varName, name='REGCM'))
+target_datasets.append(local.load_file(FILE_3, varName, name='UCT'))
-
-""" Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
-print("Working with the rcmed interface to get CRU3.1 Daily Precipitation")
+# Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module
+print('Working with the rcmed interface to get CRU3.1 Daily Precipitation')
# the dataset_id and the parameter id were determined from
# https://rcmes.jpl.nasa.gov/content/data-rcmes-database
CRU31 = rcmed.parameter_dataset(
10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
-
-""" Step 3: Processing datasets so they are the same shape ... """
-print("Processing datasets so they are the same shape")
+# Step 3: Processing datasets so they are the same shape
+print('Processing datasets so they are the same shape')
CRU31 = dsp.water_flux_unit_conversion(CRU31)
CRU31 = dsp.normalize_dataset_datetimes(CRU31, 'monthly')
for member, each_target_dataset in enumerate(target_datasets):
target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS)
- target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[
- member])
+ target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[member])
target_datasets[member] = dsp.normalize_dataset_datetimes(
target_datasets[member], 'monthly')
-print("... spatial regridding")
+print('... spatial regridding')
new_lats = np.arange(LAT_MIN, LAT_MAX, gridLatStep)
new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep)
CRU31 = dsp.spatial_regrid(CRU31, new_lats, new_lons)
-
for member, each_target_dataset in enumerate(target_datasets):
- target_datasets[member] = dsp.spatial_regrid(
- target_datasets[member], new_lats, new_lons)
+ target_datasets[member] =\
+ dsp.spatial_regrid(target_datasets[member], new_lats, new_lons)
-# find climatology monthly for obs and models
+# Find climatology monthly for obs and models.
CRU31.values, CRU31.times = utils.calc_climatology_monthly(CRU31)
+# Shift the day of the month to the end of the month as matplotlib does not handle
+# the xticks elegantly when the first date is the epoch and tries to determine
+# the start of the xticks based on a value < 1.
+for index, item in enumerate(CRU31.times):
+ CRU31.times[index] = \
+ datetime.date(item.year, item.month, monthrange(item.year, item.month)[1])
for member, each_target_dataset in enumerate(target_datasets):
- target_datasets[member].values, target_datasets[
- member].times = utils.calc_climatology_monthly(target_datasets[member])
+ target_datasets[member].values, target_datasets[member].times = \
+ utils.calc_climatology_monthly(target_datasets[member])
# make the model ensemble
target_datasets_ensemble = dsp.ensemble(target_datasets)
-target_datasets_ensemble.name = "ENS"
+target_datasets_ensemble.name = 'ENS'
# append to the target_datasets for final analysis
target_datasets.append(target_datasets_ensemble)
-""" Step 4: Subregion stuff """
+# Step 4: Subregion stuff
list_of_regions = [
- Bounds(-10.0, 0.0, 29.0, 36.5),
- Bounds(0.0, 10.0, 29.0, 37.5),
- Bounds(10.0, 20.0, 25.0, 32.5),
- Bounds(20.0, 33.0, 25.0, 32.5),
- Bounds(-19.3, -10.2, 12.0, 20.0),
- Bounds(15.0, 30.0, 15.0, 25.0),
- Bounds(-10.0, 10.0, 7.3, 15.0),
- Bounds(-10.9, 10.0, 5.0, 7.3),
- Bounds(33.9, 40.0, 6.9, 15.0),
- Bounds(10.0, 25.0, 0.0, 10.0),
- Bounds(10.0, 25.0, -10.0, 0.0),
- Bounds(30.0, 40.0, -15.0, 0.0),
- Bounds(33.0, 40.0, 25.0, 35.0)]
+ Bounds(lat_min=-10.0, lat_max=0.0, lon_min=29.0, lon_max=36.5),
+ Bounds(lat_min=0.0, lat_max=10.0, lon_min=29.0, lon_max=37.5),
+ Bounds(lat_min=10.0, lat_max=20.0, lon_min=25.0, lon_max=32.5),
+ Bounds(lat_min=20.0, lat_max=33.0, lon_min=25.0, lon_max=32.5),
+ Bounds(lat_min=-19.3, lat_max=-10.2, lon_min=12.0, lon_max=20.0),
+ Bounds(lat_min=15.0, lat_max=30.0, lon_min=15.0, lon_max=25.0),
+ Bounds(lat_min=-10.0, lat_max=10.0, lon_min=7.3, lon_max=15.0),
+ Bounds(lat_min=-10.9, lat_max=10.0, lon_min=5.0, lon_max=7.3),
+ Bounds(lat_min=33.9, lat_max=40.0, lon_min=6.9, lon_max=15.0),
+ Bounds(lat_min=10.0, lat_max=25.0, lon_min=0.0, lon_max=10.0),
+ Bounds(lat_min=10.0, lat_max=25.0, lon_min=-10.0, lon_max=0.0),
+ Bounds(lat_min=30.0, lat_max=40.0, lon_min=-15.0, lon_max=0.0),
+ Bounds(lat_min=33.0, lat_max=40.0, lon_min=25.0, lon_max=35.0)]
-region_list = [["R" + str(i + 1)] for i in xrange(13)]
+region_list = [['R' + str(i + 1)] for i in xrange(13)]
for regions in region_list:
firstTime = True
- subset_name = regions[0] + "_CRU31"
- # labels.append(subset_name) #for legend, uncomment this line
+ subset_name = regions[0] + '_CRU31'
+ labels.append(subset_name)
subset = dsp.subset(CRU31, list_of_regions[region_counter], subset_name)
tSeries = utils.calc_time_series(subset)
results.append(tSeries)
tSeries = []
firstTime = False
for member, each_target_dataset in enumerate(target_datasets):
- subset_name = regions[0] + "_" + target_datasets[member].name
- # labels.append(subset_name) #for legend, uncomment this line
+ subset_name = regions[0] + '_' + target_datasets[member].name
+ labels.append(subset_name)
subset = dsp.subset(target_datasets[member],
list_of_regions[region_counter],
subset_name)
@@ -154,8 +206,8 @@
results.append(tSeries)
tSeries = []
- plotter.draw_time_series(np.array(results), CRU31.times, labels, regions[
- 0], ptitle=regions[0], fmt='png')
+ plotter.draw_time_series(np.array(results), CRU31.times, labels, regions[0],
+ label_month=True, ptitle=regions[0], fmt='png')
results = []
tSeries = []
labels = []
diff --git a/ocw-ui/frontend/app/scripts/controllers/parameterselect.js b/ocw-ui/frontend/app/scripts/controllers/parameterselect.js
index cad97e0..a2554f4 100644
--- a/ocw-ui/frontend/app/scripts/controllers/parameterselect.js
+++ b/ocw-ui/frontend/app/scripts/controllers/parameterselect.js
@@ -27,9 +27,9 @@
* Controller of the ocwUiApp
*/
angular.module('ocwUiApp')
-.controller('ParameterSelectCtrl', ['$rootScope', '$scope', '$http', '$timeout',
- 'selectedDatasetInformation', 'regionSelectParams', 'evaluationSettings',
- function($rootScope, $scope, $http, $timeout, selectedDatasetInformation, regionSelectParams, evaluationSettings) {
+.controller('ParameterSelectCtrl', ['$rootScope', '$scope', '$http', '$timeout', '$location',
+ 'selectedDatasetInformation', 'regionSelectParams', 'evaluationSettings',
+ function($rootScope, $scope, $http, $timeout, $location, selectedDatasetInformation, regionSelectParams, evaluationSettings) {
$scope.datasets = selectedDatasetInformation.getDatasets();
// The min/max lat/lon values from the selected datasets
@@ -145,6 +145,8 @@
data['temporal_resolution'] = 30;
}
+ data['temporal_resolution_type'] = temporal_res;
+
// Load the Metrics for the evaluation
data['metrics'] = []
var metrics = settings.metrics
@@ -171,13 +173,9 @@
$scope.runningEval = false;
$timeout(function() {
- if (evalWorkDir !== undefined) {
- window.location = "#/results/" + evalWorkDir;
- } else {
- window.location = "#/results";
- }
+ var url = (evalWorkDir) ? '/results/' + evalWorkDir : '/results';
+ $location.url(url)
}, 100);
-
}).error(function() {
$scope.runningEval = false;
});
@@ -192,13 +190,13 @@
if (parseFloat($scope.displayParams.latMax) > parseFloat($scope.latMax))
$scope.displayParams.latMax = $scope.latMax;
- if (parseFloat($scope.displayParams.lonMin) < parseFloat($scope.lonMin))
+ if (parseFloat($scope.displayParams.lonMin) < parseFloat($scope.lonMin))
$scope.displayParams.lonMin = $scope.lonMin;
- if (parseFloat($scope.displayParams.lonMax) > parseFloat($scope.lonMax))
+ if (parseFloat($scope.displayParams.lonMax) > parseFloat($scope.lonMax))
$scope.displayParams.lonMax = $scope.lonMax;
- if ($scope.displayParams.start < $scope.start)
+ if ($scope.displayParams.start < $scope.start)
$scope.displayParams.start = $scope.start;
if ($scope.displayParams.end > $scope.end)
@@ -213,8 +211,8 @@
$rootScope.$broadcast('redrawOverlays', []);
}
- $scope.unwatchDatasets = $scope.$watch('datasets',
- function() {
+ $scope.unwatchDatasets = $scope.$watch('datasets',
+ function() {
var numDatasets = $scope.datasets.length;
$scope.displayParams.areValid = false;
$scope.areInUserRegridState = false;
@@ -230,7 +228,7 @@
// Get the valid lat/lon range in the selected datasets.
for (var i = 0; i < numDatasets; i++) {
var curDataset = $scope.datasets[i];
-
+
latMin = (curDataset['latlonVals']['latMin'] > latMin) ? curDataset['latlonVals']['latMin'] : latMin;
latMax = (curDataset['latlonVals']['latMax'] < latMax) ? curDataset['latlonVals']['latMax'] : latMax;
lonMin = (curDataset['latlonVals']['lonMin'] > lonMin) ? curDataset['latlonVals']['lonMin'] : lonMin;
diff --git a/ocw-ui/frontend/app/scripts/directives/predictivefilebrowserinput.js b/ocw-ui/frontend/app/scripts/directives/predictivefilebrowserinput.js
index 7142c15..6fe2118 100644
--- a/ocw-ui/frontend/app/scripts/directives/predictivefilebrowserinput.js
+++ b/ocw-ui/frontend/app/scripts/directives/predictivefilebrowserinput.js
@@ -29,9 +29,16 @@
.directive('predictiveFileBrowserInput', function() {
var link = function($scope, $elem, $attrs) {
$scope.autocomplete = [];
-
+
// Set id to use this directive correctly in multiple places
- $scope.id = 'autoCompletePath'+ $elem.context.id
+ /*
+ This had been written as $elem.context.id, but $elem is an object (jQuery.fn.init)
+ and the object did not have a context or id attribute. This was
+ throwing an error to the console and the list of files was not being displayed.
+ Replaced with $attrs.id.
+ */
+ $scope.id = 'autoCompletePath' + $attrs.id;
+
/*
* We need a place to dump our auto-completion options
*/
@@ -43,8 +50,8 @@
var val = $(e.target).text();
$($elem).val(val);
// Need to trigger the input box's "input" event so Angular updates the model!
- $elem.trigger('input');
-
+ $elem.trigger('input');
+
// If the user selected a directory, find more results..
if (val[val.length - 1] == '/') {
$scope.fetchFiles($($elem).val());
@@ -152,7 +159,7 @@
$scope.possibleCompletes = $scope.autocomplete;
};
- /*
+ /*
* Handle <TAB> presses.
*
* Attempt to auto-complete options when the user presses <TAB>.
@@ -220,7 +227,7 @@
}
};
- /*
+ /*
* Handle all other key presses in the input box
*
* Filter the auto-complete options as the user types to ensure that only options
@@ -236,7 +243,7 @@
$scope.updateAutoComplete();
};
- /*
+ /*
* When a path is auto-completed with <TAB> we need to check to see if it points
* to a directory. If it does, we still need to fetch results!
*/
@@ -247,7 +254,7 @@
}
};
- /*
+ /*
* Calculate the greatest common prefix of the passed options.
*
* Params:
@@ -275,7 +282,7 @@
return longestString.slice(0, index - 1);
};
- /*
+ /*
* Filter the auto-complete options based on the current input.
*/
$scope.filterResults = function() {
diff --git a/ocw-ui/frontend/package.json b/ocw-ui/frontend/package.json
index 8024f17..b0133c8 100644
--- a/ocw-ui/frontend/package.json
+++ b/ocw-ui/frontend/package.json
@@ -2,12 +2,12 @@
"name": "ocwui",
"version": "1.2.0",
"description": "A tool for the evaluation and analysis of climate models.",
- "repository": {
- "type" : "git",
- "url" : "https://git-wip-us.apache.org/repos/asf/climate.git"
+ "repository": {
+ "type": "git",
+ "url": "https://git-wip-us.apache.org/repos/asf/climate.git"
},
"license": "Apache-2.0",
- "readme":"README.md",
+ "readme": "README.md",
"dependencies": {},
"devDependencies": {
"bower": "^1.3.9",
@@ -23,7 +23,7 @@
"grunt-contrib-copy": "^0.5.0",
"grunt-contrib-cssmin": "^0.9.0",
"grunt-contrib-htmlmin": "^0.3.0",
- "grunt-contrib-imagemin": "^0.7.0",
+ "grunt-contrib-imagemin": "^2.0.1",
"grunt-contrib-jshint": "^0.10.0",
"grunt-contrib-uglify": "^0.4.0",
"grunt-contrib-watch": "^0.6.1",
@@ -36,12 +36,18 @@
"grunt-usemin": "^2.1.1",
"grunt-wiredep": "^1.8.0",
"jshint-stylish": "^0.2.0",
- "karma": "^0.12.17",
+ "karma": "^2.0.0",
+ "karma-chrome-launcher": "^2.2.0",
+ "karma-detect-browsers": "^2.2.6",
+ "karma-firefox-launcher": "^1.1.0",
+ "karma-ie-launcher": "^1.0.0",
"karma-jasmine": "^0.1.5",
- "karma-phantomjs-launcher": "^0.1.4",
+ "karma-phantomjs-launcher": "^1.0.4",
+ "karma-safari-launcher": "^1.0.0",
"load-grunt-tasks": "^0.4.0",
"time-grunt": "^0.3.1",
- "yo": "^1.2.1"
+ "yo": "^1.2.1",
+ "serve-static": "^1.13.1"
},
"engines": {
"node": ">=0.10.0"
diff --git a/ocw-ui/frontend/test/karma.conf.js b/ocw-ui/frontend/test/karma.conf.js
index 744d927..fa03fd7 100644
--- a/ocw-ui/frontend/test/karma.conf.js
+++ b/ocw-ui/frontend/test/karma.conf.js
@@ -46,23 +46,54 @@
// web server port
port: 8080,
+
// Start these browsers, currently available:
// - Chrome
- // - ChromeCanary
// - Firefox
- // - Opera
// - Safari (only Mac)
// - PhantomJS
// - IE (only Windows)
- browsers: [
- 'PhantomJS',
- //'Chrome'
- ],
+ // Ok to leave this empty as karma-detect-browsers will figure this out based on what is installed.
+ // Either set enabled to false for karma-detect-browsers and fill in a specific list or update
+ // the logic in the karma-detect-browsers config to remove those you don't want to test.
+ browsers: [ ],
+
+ frameworks: ['jasmine', 'detectBrowsers'],
+
+ detectBrowsers: {
+ // enable/disable, default is true
+ enabled: true,
+
+ // enable/disable phantomjs support, default is true
+ usePhantomJS: true,
+
+ // post processing of browsers list
+ // here you can edit the list of browsers used by karma
+ postDetection: function(availableBrowser) {
+
+ var result = availableBrowser;
+
+ //Remove PhantomJS if another browser has been detected
+ //if (availableBrowser.length > 1 && availableBrowser.indexOf('PhantomJS')>-1) {
+ // var i = result.indexOf('PhantomJS');
+
+ // if (i !== -1) {
+ // result.splice(i, 1);
+ // }
+ //}
+
+ return result;
+ }
+ },
// Which plugins to enable
plugins: [
'karma-phantomjs-launcher',
'karma-chrome-launcher',
+ 'karma-firefox-launcher',
+ 'karma-safari-launcher',
+ 'karma-ie-launcher',
+ 'karma-detect-browsers',
'karma-jasmine'
],
diff --git a/ocw/dataset_processor.py b/ocw/dataset_processor.py
index 160ffb7..cf2e90e 100755
--- a/ocw/dataset_processor.py
+++ b/ocw/dataset_processor.py
@@ -867,9 +867,13 @@
mask_array = np.zeros(dataset_array[0].values.shape)
for dataset in dataset_array:
- index = np.where(dataset.values.mask == True)
- if index[0].size > 0:
- mask_array[index] = 1
+ # CLIMATE-797 - Not every array passed in will be a masked array.
+ # For those that are, action based on the mask passed in.
+ # For those that are not, take no action (else AttributeError).
+ if hasattr(dataset.values, 'mask'):
+ index = np.where(dataset.values.mask == True)
+ if index[0].size > 0:
+ mask_array[index] = 1
masked_array = []
for dataset in dataset_array:
dataset.values = ma.array(dataset.values, mask=mask_array)
diff --git a/ocw/plotter.py b/ocw/plotter.py
index 7f9b092..f0af03f 100755
--- a/ocw/plotter.py
+++ b/ocw/plotter.py
@@ -367,9 +367,24 @@
fig.clf()
+def _get_colors(num_colors):
+ """
+ matplotlib will recycle colors after a certain number. This can make
+ line type charts confusing as colors will be reused. This function
+ provides a distribution of colors across the default color map
+ to better approximate uniqueness.
+
+ :param num_colors: The number of unique colors to generate.
+ :return: A color map with num_colors.
+ """
+ cmap = plt.get_cmap()
+ return [cmap(1. * i / num_colors) for i in range(num_colors)]
+
+
def draw_time_series(results, times, labels, fname, fmt='png', gridshape=(1, 1),
xlabel='', ylabel='', ptitle='', subtitles=None,
- label_month=False, yscale='linear', aspect=None):
+ label_month=False, yscale='linear', aspect=None,
+ cycle_colors=True, cmap=None):
''' Draw a time series plot.
:param results: 3D array of time series data.
@@ -415,7 +430,22 @@
:param aspect: (Optional) approximate aspect ratio of each subplot
(width / height). Default is 8.5 / 5.5
:type aspect: :class:`float`
+
+ :param cycle_colors: (Optional) flag to toggle whether to allow matlibplot
+ to re-use colors when plotting or force an evenly distributed range.
+ :type cycle_colors: :class:`bool`
+
+ :param cmap: (Optional) string or :class:`matplotlib.colors.LinearSegmentedColormap`
+ instance denoting the colormap. This must be able to be recognized by
+ `Matplotlib's get_cmap function <http://matplotlib.org/api/cm_api.html#matplotlib.cm.get_cmap>`_.
+ Maps like rainbow and spectral with wide spectrum of colors are nice choices when used with
+ the cycle_colors option. tab20, tab20b, and tab20c are good if the plot has less than 20 datasets.
+ :type cmap: :mod:`string` or :class:`matplotlib.colors.LinearSegmentedColormap`
+
'''
+ if cmap is not None:
+ set_cmap(cmap)
+
# Handle the single plot case.
if results.ndim == 2:
results = results.reshape(1, *results.shape)
@@ -448,6 +478,10 @@
# Make the plots
for i, ax in enumerate(grid):
data = results[i]
+
+ if not cycle_colors:
+ ax.set_prop_cycle('color', _get_colors(data.shape[0]))
+
if label_month:
xfmt = mpl.dates.DateFormatter('%b')
xloc = mpl.dates.MonthLocator()
diff --git a/ocw/statistical_downscaling.py b/ocw/statistical_downscaling.py
index 57013a0..f3d701f 100755
--- a/ocw/statistical_downscaling.py
+++ b/ocw/statistical_downscaling.py
@@ -15,23 +15,36 @@
# specific language governing permissions and limitations
# under the License.
+"""
+Classes:
+ Downscaling - Container for applying statistical downscaling.
+"""
-import ocw.utils as utils
+
import numpy as np
-from scipy.stats import percentileofscore, linregress
+from scipy.stats import linregress, percentileofscore
-class Downscaling:
+class Downscaling(object):
+ """
+ Statistical downscaling infers higher resolution information from lower resolution data.
+ For example, data collected at a more coarse regional level applied to a more refined
+ local level.
+
+ Statistical downscaling establishes a relationship between different variables in the large scale
+ and the local scale and applies that relationship to the local scale.
+ """
def __init__(self, ref_dataset, model_present, model_future):
- '''
+ """ Default Downscaling constructor.
+
:param ref_dataset: The Dataset to use as the reference dataset (observation)
:type ref_dataset: Dataset
:param model_present: model simulation to be compared with observation
:type model_present: Dataset
:param model_future: model simulation to be calibrated for prediction
:type model_future: Dataset
- '''
+ """
self.ref_dataset = ref_dataset[~ref_dataset.mask].ravel()
self.model_present = model_present.ravel()
self.model_future = model_future.ravel()
@@ -39,11 +52,11 @@
description = "statistical downscaling methods"
def Delta_addition(self):
- '''Calculate the mean difference between future and present simulation,
+ """Calculate the mean difference between future and present simulation,
then add the difference to the observed distribution
:returns: downscaled model_present and model_future
- '''
+ """
ref = self.ref_dataset
model_present = self.model_present
model_future = self.model_future
@@ -51,23 +64,26 @@
return model_present, ref + np.mean(model_future - model_present)
def Delta_correction(self):
- '''Calculate the mean difference between observation and present simulation,
+ """Calculate the mean difference between observation and present simulation,
then add the difference to the future distribution
:returns: downscaled model_present and model_future
- '''
+ """
ref = self.ref_dataset
model_present = self.model_present
model_future = self.model_future
- return model_present + np.mean(ref) - np.mean(model_present), model_future + np.mean(ref) - np.mean(model_present)
+ return model_present + np.mean(ref) - np.mean(model_present), model_future + \
+ np.mean(ref) - np.mean(model_present)
def Quantile_mapping(self):
- '''Remove the biases for each quantile value
- Wood et al (2004) HYDROLOGIC IMPLICATIONS OF DYNAMICAL AND STATISTICAL APPROACHES TO DOWNSCALING CLIMATE MODEL OUTPUTS
+ """Remove the biases for each quantile value
+
+ Wood et al (2004) HYDROLOGIC IMPLICATIONS OF DYNAMICAL
+ AND STATISTICAL APPROACHES TO DOWNSCALING CLIMATE MODEL OUTPUTS
:returns: downscaled model_present and model_future
- '''
+ """
ref = self.ref_dataset
model_present = self.model_present
model_present_corrected = np.zeros(model_present.size)
@@ -86,11 +102,14 @@
return model_present_corrected, model_future_corrected
def Asynchronous_regression(self):
- '''Remove the biases by fitting a linear regression model with ordered observational and model datasets
- Stoner et al (2013) An asynchronous regional regression model for statistical downscaling of daily climate variables
+ """Remove the biases by fitting a linear regression model with ordered observational and
+ model datasets
+
+ Stoner et al (2013) An asynchronous regional regression model for statistical downscaling of
+ daily climate variables
:returns: downscaled model_present and model_future
- '''
+ """
ref_original = self.ref_dataset
model_present = self.model_present