Merge branch 'CLIMATE-744'
diff --git a/RCMES/CORDEX/cordex.py b/RCMES/CORDEX/cordex.py
index 24ce0c6..4b4a4e8 100644
--- a/RCMES/CORDEX/cordex.py
+++ b/RCMES/CORDEX/cordex.py
@@ -1,57 +1,62 @@
-import os

-import sys

-import subprocess

-import jinja2

-from metadata_extractor import CORDEXMetadataExtractor, obs4MIPSMetadataExtractor

-

-# These should be modified. TODO: domains can also be made into separate group

-# CORDEX domain

-domain = 'NAM-44'

-

-# The output directory

-workdir = '/home/goodman/data_processing/CORDEX/analysis'

-

-# Location of osb4Mips files

-obs_dir = '/proj3/data/obs4mips'

-

-# Location of CORDEX files

-models_dir = '/proj3/data/CORDEX/{domain}/*'.format(domain=domain)

-

-# Extract metadata from model and obs files, pairing up files with the same

-# variables for separate evaluations

-obs_extractor = obs4MIPSMetadataExtractor(obs_dir)

-models_extractor = CORDEXMetadataExtractor(models_dir)

-groups = obs_extractor.group(models_extractor, 'variable')

-

-# Configuration file template, to be rendered repeatedly for each evaluation

-# run

-env =  jinja2.Environment(loader=jinja2.FileSystemLoader('./templates'),

-                          trim_blocks=True, lstrip_blocks=True)

-t = env.get_template('CORDEX.yaml.template')

-

-# Each group represents a single evaluation. Repeat the evaluation for

-# three seasons: Summer, Winter, and Annual.

-seasons = ['annual', 'winter', 'summer']

-for group in groups:

-    obs_info, models_info = group

-    instrument = obs_info['instrument']

-    variable = obs_info['variable']

-    for season in seasons:

-        configfile_basename = '_'.join([domain, instrument, variable, season]) + '.yaml'

+import os
+import sys
+import subprocess
+import jinja2
+from metadata_extractor import CORDEXMetadataExtractor, obs4MIPSMetadataExtractor
+
+# These should be modified. TODO: domains can also be made into separate group
+# CORDEX domain
+
+user_input = sys.argv[1:]
+if len(user_input) == 4:
+    domain, workdir, obs_dir, models_dir = user_input[:]
+else:
+    domain = 'NAM-44'
+
+    # The output directory
+    workdir = os.getcwd()+'/'+domain+'_analysis'
+
+    # Location of osb4Mips files
+    obs_dir = '/proj3/data/obs4mips'
+
+    # Location of CORDEX files
+    models_dir = '/proj3/data/CORDEX/{domain}/*'.format(domain=domain)
+
+# Extract metadata from model and obs files, pairing up files with the same
+# variables for separate evaluations
+obs_extractor = obs4MIPSMetadataExtractor(obs_dir)
+models_extractor = CORDEXMetadataExtractor(models_dir)
+groups = obs_extractor.group(models_extractor, 'variable')
+
+# Configuration file template, to be rendered repeatedly for each evaluation
+# run
+env =  jinja2.Environment(loader=jinja2.FileSystemLoader('./templates'),
+                          trim_blocks=True, lstrip_blocks=True)
+t = env.get_template('CORDEX.yaml.template')
+
+# Each group represents a single evaluation. Repeat the evaluation for
+# three seasons: Summer, Winter, and Annual.
+seasons = ['annual', 'winter', 'summer']
+errored = []
+for group in groups:
+    obs_info, models_info = group
+    instrument = obs_info['instrument']
+    variable = obs_info['variable']
+    for season in seasons:
+        configfile_basename = '_'.join([domain, instrument, variable, season]) + '.yaml'
         configfile_path = os.path.join(workdir, domain, instrument,
-                                       variable, season)

-        if not os.path.exists(configfile_path):

-            os.makedirs(configfile_path)

-        configfile_path = os.path.join(configfile_path, configfile_basename)

-        with open(configfile_path, 'w') as configfile:

-            configfile.write(t.render(obs_info=obs_info, models_info=models_info,

-                                      season=season, output_dir=workdir))

-

-        # TODO: Do this in parallel. Will change this once this approach

-        # is well tested.

-        code = subprocess.call([sys.executable, '../run_RCMES.py', configfile_path])

-        errored = []

-        if code:

-            errored.append(configfile_path)

-

-print("All runs done. The following ended with an error: {}".format(errored))

+                                       variable, season)
+        if not os.path.exists(configfile_path):
+            os.makedirs(configfile_path)
+        configfile_path = os.path.join(configfile_path, configfile_basename)
+        with open(configfile_path, 'w') as configfile:
+            configfile.write(t.render(obs_info=obs_info, models_info=models_info,
+                                      season=season, output_dir=workdir))
+
+        # TODO: Do this in parallel. Will change this once this approach
+        # is well tested.
+        code = subprocess.call([sys.executable, '../run_RCMES.py', configfile_path])
+        if code:
+            errored.append(configfile_path)
+
+print("All runs done. The following ended with an error: {}".format(errored))
diff --git a/examples/model_ensemble_to_rcmed.py b/examples/model_ensemble_to_rcmed.py
index 185aa2e..787367b 100644
--- a/examples/model_ensemble_to_rcmed.py
+++ b/examples/model_ensemble_to_rcmed.py
@@ -130,14 +130,10 @@
 
 cru_start = datetime.datetime.strptime(cru_31['start_date'], "%Y-%m-%d")
 cru_end = datetime.datetime.strptime(cru_31['end_date'], "%Y-%m-%d")
-knmi_start, knmi_end = knmi_dataset.temporal_boundaries()
 # Set the Time Range to be the year 1989
 start_time = datetime.datetime(1989, 1, 1)
 end_time = datetime.datetime(1989, 12, 1)
 
-print("Time Range is: %s to %s" % (start_time.strftime("%Y-%m-%d"),
-                                   end_time.strftime("%Y-%m-%d")))
-
 print("Fetching data from RCMED...")
 cru31_dataset = rcmed.parameter_dataset(dataset_id,
                                         parameter_id,
@@ -150,15 +146,29 @@
 
 """ Step 3: Resample Datasets so they are the same shape """
 
+# Running Temporal Rebin early helps negate the issue of datasets being on different
+# days of the month (1st vs. 15th)
 print("Temporally Rebinning the Datasets to an Annual Timestep")
 # To run annual temporal Rebinning,
 knmi_dataset = dsp.temporal_rebin(knmi_dataset, temporal_resolution='annual')
+dataset_start, dataset_end = knmi_dataset.temporal_boundaries()
+start_time = max([start_time, dataset_start])
+end_time = min([end_time, dataset_end])
+
 wrf311_dataset = dsp.temporal_rebin(
     wrf311_dataset, temporal_resolution='annual')
-cru31_dataset = dsp.temporal_rebin(cru31_dataset, temporal_resolution='annual')
+dataset_start, dataset_end = wrf311_dataset.temporal_boundaries()
+start_time = max([start_time, dataset_start])
+end_time = min([end_time, dataset_end])
 
-# Running Temporal Rebin early helps negate the issue of datasets being on different
-# days of the month (1st vs. 15th)
+cru31_dataset = dsp.temporal_rebin(cru31_dataset, temporal_resolution='annual')
+dataset_start, dataset_end = cru31_dataset.temporal_boundaries()
+start_time = max([start_time, dataset_start])
+end_time = min([end_time, dataset_end])
+
+print("Time Range is: %s to %s" % (start_time.strftime("%Y-%m-%d"),
+                                   end_time.strftime("%Y-%m-%d")))
+
 # Create a Bounds object to use for subsetting
 new_bounds = Bounds(lat_min=min_lat, lat_max=max_lat, lon_min=min_lon,
                     lon_max=max_lon, start=start_time, end=end_time)
@@ -214,7 +224,7 @@
 lats = new_lats
 lons = new_lons
 fname = OUTPUT_PLOT
-gridshape = (3, 1)  # Using a 3 x 1 since we have a 1 year of data for 3 models
+gridshape = (3, start_time.year - end_time.year + 1)  # Using a 3 x N since we have a N year(s) of data for 3 models
 plotnames = ["KNMI", "WRF311", "ENSEMBLE"]
 for i in np.arange(3):
     plot_title = "TASMAX Bias of CRU 3.1 vs. %s (%s - %s)" % (
diff --git a/ocw-ui/backend/processing.py b/ocw-ui/backend/processing.py
index 07375d8..6d8a1a1 100644
--- a/ocw-ui/backend/processing.py
+++ b/ocw-ui/backend/processing.py
@@ -216,8 +216,9 @@
                        in target_datasets]
     
     # Do temporal re-bin based off of passed resolution
-    ref_dataset = dsp.temporal_rebin(ref_dataset, time_delta)
-    target_datasets = [dsp.temporal_rebin(ds, time_delta)
+    temporal_resolution_type = data['temporal_resolution_type']
+    ref_dataset = dsp.temporal_rebin(ref_dataset, temporal_resolution_type)
+    target_datasets = [dsp.temporal_rebin(ds, temporal_resolution_type)
                        for ds
                        in target_datasets]
 
diff --git a/ocw/dataset.py b/ocw/dataset.py
index 0a0e1a6..bb06443 100644
--- a/ocw/dataset.py
+++ b/ocw/dataset.py
@@ -25,14 +25,12 @@
 
 '''
 
-import os
-import numpy
-import logging
 import datetime as dt
-from mpl_toolkits.basemap import Basemap
-import netCDF4
+import logging
 
-import ocw
+import netCDF4
+import numpy
+
 import ocw.utils as utils
 
 logger = logging.getLogger(__name__)
@@ -235,7 +233,7 @@
 
 
 class Bounds(object):
-    '''Container for holding spatial and temporal bounds information.
+    """Container for holding spatial and temporal bounds information.
 
     Certain operations require valid bounding information to be present for
     correct functioning. Bounds guarantees that a function receives well
@@ -245,10 +243,11 @@
     * 'rectangular'
     * 'CORDEX (CORDEX region name)': pre-defined CORDEX boundary
     * 'us_states': an array of US states abbreviation is required (ex) us_states = ['CA','NV'])
-    * 'countries': an array of county names is required (ex) countries = ['United States','Canada','Mexico']
+    * 'countries': an array of county names is required (ex) countries = ['United States','Canada']
     * 'user': user_mask_file in a netCDF format with two dimensional mask variable is required.
 
-    If boundary_type == 'rectangular', spatial and temporal bounds must follow the following guidelines.
+    If boundary_type == 'rectangular', spatial and temporal bounds must follow the
+    following guidelines.
 
     * Latitude values must be in the range [-90, 90]
     * Longitude values must be in the range [-180, 180]
@@ -256,14 +255,15 @@
       values.
 
     Temporal bounds must a valid datetime object
-    '''
+    """
 
     def __init__(self, boundary_type='rectangular',
                  us_states=None, countries=None,
-                 user_mask_file=None, mask_variable_name=None, longitude_name=None, latitude_name=None,
+                 user_mask_file=None, mask_variable_name=None,
+                 longitude_name=None, latitude_name=None,
                  lat_min=-90, lat_max=90, lon_min=-180, lon_max=180,
                  start=None, end=None):
-        '''Default Bounds constructor
+        """Default Bounds constructor
         :param boundary_type: The type of spatial subset boundary.
         :type boundary_type: :mod:`string`
 
@@ -291,89 +291,132 @@
         :type end: :class:`datetime.datetime`
 
         :raises: ValueError
-        '''
-        self.boundary_type = boundary_type
-        if start:
-            self._start = start
-        else:
-            self._start = None
+        """
 
-        if end:
+        self.boundary_type = boundary_type
+
+        self._start = None
+        self._end = None
+        self.lat_min = None
+        self.lat_max = None
+        self.lon_min = None
+        self.lon_max = None
+
+        if start and self._validate_start(start):
+            self._start = start
+
+        if end and self._validate_end(end):
             self._end = end
-        else:
-            self._end = None
 
         if boundary_type == 'us_states':
-            self.masked_regions = utils.shapefile_boundary(
-                boundary_type, us_states)
+
+            self.masked_regions = utils.shapefile_boundary(boundary_type, us_states)
+
         if boundary_type == 'countries':
-            self.masked_regions = utils.shapefile_boundary(
-                boundary_type, countries)
+
+            self.masked_regions = utils.shapefile_boundary(boundary_type, countries)
+
         if boundary_type == 'user':
+
             file_object = netCDF4.Dataset(user_mask_file)
             self.mask_variable = file_object.variables[mask_variable_name][:]
             mask_longitude = file_object.variables[longitude_name][:]
             mask_latitude = file_object.variables[latitude_name][:]
             if mask_longitude.ndim == 1 and mask_latitude.ndim == 1:
-                self.mask_longitude, self.mask_latitude = numpy.meshgrid(
-                    mask_longitude, mask_latitude)
+                self.mask_longitude, self.mask_latitude = \
+                    numpy.meshgrid(mask_longitude, mask_latitude)
             elif mask_longitude.ndim == 2 and mask_latitude.ndim == 2:
                 self.mask_longitude = mask_longitude
                 self.mask_latitude = mask_latitude
-        if boundary_type == 'rectangular':
-            if not (-90 <= float(lat_min) <= 90) or float(lat_min) > float(lat_max):
-                error = "Attempted to set lat_min to invalid value: %s" % (
-                    lat_min)
-                logger.error(error)
-                raise ValueError(error)
-            if not (-90 <= float(lat_max) <= 90):
-                error = "Attempted to set lat_max to invalid value: %s" % (
-                    lat_max)
-                logger.error(error)
-                raise ValueError(error)
-            if not (-180 <= float(lon_min) <= 180) or float(lon_min) > float(lon_max):
-                error = "Attempted to set lon_min to invalid value: %s" % (
-                    lon_min)
-                logger.error(error)
-                raise ValueError(error)
-            if not (-180 <= float(lon_max) <= 180):
-                error = "Attempted to set lat_max to invalid value: %s" % (
-                    lon_max)
-                logger.error(error)
-                raise ValueError(error)
 
-            self.lat_min = float(lat_min)
-            self.lat_max = float(lat_max)
-            self.lon_min = float(lon_min)
-            self.lon_max = float(lon_max)
+        if boundary_type == 'rectangular':
+
+            if self._validate_lat_lon(lat_max=lat_max, lat_min=lat_min, lon_max=lon_max, lon_min=lon_min):
+                self.lat_min = float(lat_min)
+                self.lat_max = float(lat_max)
+                self.lon_min = float(lon_min)
+                self.lon_max = float(lon_max)
+
         if boundary_type[:6].upper() == 'CORDEX':
-            self.lat_min, self.lat_max, self.lon_min, self.lon_max = utils.CORDEX_boundary(
-                boundary_type[6:].replace(" ", "").lower())
+
+            lat_min, lat_max, lon_min, lon_max = \
+                utils.CORDEX_boundary(boundary_type[6:].replace(" ", "").lower())
+
+            if self._validate_lat_lon(lat_max=lat_max, lat_min=lat_min, lon_max=lon_max, lon_min=lon_min):
+                self.lat_min = float(lat_min)
+                self.lat_max = float(lat_max)
+                self.lon_min = float(lon_min)
+                self.lon_max = float(lon_max)
 
     @property
     def start(self):
+        """ Getter for start attribute. """
         return self._start
 
     @start.setter
     def start(self, value):
-        if self._end:
-            if not (type(value) is dt.datetime and value < self._end):
-                error = "Attempted to set start to invalid value: %s" % (value)
-                logger.error(error)
-                raise ValueError(error)
-
-        self._start = value
+        """ Setter for start attribute. """
+        if value and self._validate_start(value):
+            self._start = value
 
     @property
     def end(self):
+        """ Getter for end attribute. """
         return self._end
 
     @end.setter
     def end(self, value):
+        """ Setter for end attribute. """
+        if value and self._validate_end(value):
+            self._end = value
+
+    def _validate_start(self, value):
+        """ Validate start is both the correct type and less than end. """
+        if not isinstance(value, dt.datetime):
+            error = "Attempted to set start to invalid type: %s" % (type(value))
+            logger.error(error)
+            raise ValueError(error)
+
+        if self._end:
+            if value > self._end:
+                error = "Attempted to set start to invalid value: %s" % (value)
+                logger.error(error)
+                raise ValueError(error)
+
+        return True
+
+    def _validate_end(self, value):
+        """ Validate end is both the correct type and greater than start. """
+        if not isinstance(value, dt.datetime):
+            error = "Attempted to set end to invalid type: %s" % (type(value))
+            logger.error(error)
+            raise ValueError(error)
+
         if self._start:
-            if not (type(value) is dt.datetime and value > self._start):
+            if value < self._start:
                 error = "Attempted to set end to invalid value: %s" % (value)
                 logger.error(error)
                 raise ValueError(error)
 
-        self._end = value
+        return True
+
+    def _validate_lat_lon(self, lat_max, lat_min, lon_max, lon_min):
+        """ Confirm the min / max lat / lon are within expected ranges. """
+        if not (-90 <= float(lat_min) <= 90) or float(lat_min) > float(lat_max):
+            error = "Attempted to set lat_min to invalid value: %s" % (lat_min)
+            logger.error(error)
+            raise ValueError(error)
+        if not -90 <= float(lat_max) <= 90:
+            error = "Attempted to set lat_max to invalid value: %s" % (lat_max)
+            logger.error(error)
+            raise ValueError(error)
+        if not (-180 <= float(lon_min) <= 180) or float(lon_min) > float(lon_max):
+            error = "Attempted to set lon_min to invalid value: %s" % (lon_min)
+            logger.error(error)
+            raise ValueError(error)
+        if not -180 <= float(lon_max) <= 180:
+            error = "Attempted to set lat_max to invalid value: %s" % (lon_max)
+            logger.error(error)
+            raise ValueError(error)
+
+        return True