Merge branch 'CLIMATE-832'
diff --git a/RCMES/cli_app.py b/RCMES/cli_app.py
index be46de8..9894a35 100644
--- a/RCMES/cli_app.py
+++ b/RCMES/cli_app.py
@@ -457,8 +457,8 @@
'database':"{0}".format(netCDF_path),
'dataset_id':"esgf".format(esgf_variable),
'parameter_id':"{0}".format(esgf_variable),
- 'start_date': obs_dataset.time_range()[0].strftime("%Y-%m-%d"),
- 'end_date':obs_dataset.time_range()[1].strftime("%Y-%m-%d"),
+ 'start_date': obs_dataset.temporal_boundaries()[0].strftime("%Y-%m-%d"),
+ 'end_date':obs_dataset.temporal_boundaries()[1].strftime("%Y-%m-%d"),
#'bounding_box':obs['bounding_box'],
'timestep':"monthly",
'min_lat':obs_dataset.spatial_boundaries()[0],
@@ -646,7 +646,8 @@
if each_target_dataset.lats.ndim !=2 and each_target_dataset.lons.ndim !=2:
new_model_datasets[member] = dsp.subset(EVAL_BOUNDS, new_model_datasets[member])
else:
- new_model_datasets[member] = dsp.temporal_slice(EVAL_BOUNDS.start, EVAL_BOUNDS.end, each_target_dataset)
+ new_model_datasets[member] = dsp.temporal_slice(
+ each_target_dataset, EVAL_BOUNDS.start, EVAL_BOUNDS.end)
screen.addstr(5, 4, "--> Temporally regridded.")
screen.refresh()
@@ -798,8 +799,8 @@
models_start_time = []
models_end_time = []
for model in model_datasets:
- models_start_time.append(model.time_range()[0])
- models_end_time.append(model.time_range()[1])
+ models_start_time.append(model.temporal_boundaries()[0])
+ models_end_time.append(model.temporal_boundaries()[1])
return models_start_time, models_end_time
diff --git a/RCMES/run_RCMES.py b/RCMES/run_RCMES.py
index ed48458..cd69bc4 100644
--- a/RCMES/run_RCMES.py
+++ b/RCMES/run_RCMES.py
@@ -146,11 +146,11 @@
max_lon = np.min([max_lon, ref_dataset.lons.max()])
bounds = Bounds(min_lat, max_lat, min_lon, max_lon, start_time, end_time)
-ref_dataset = dsp.subset(bounds,ref_dataset)
+ref_dataset = dsp.subset(ref_dataset, bounds)
if ref_dataset.temporal_resolution() != temporal_resolution:
ref_dataset = dsp.temporal_rebin(ref_dataset, temporal_resolution)
for idata,dataset in enumerate(model_datasets):
- model_datasets[idata] = dsp.subset(bounds,dataset)
+ model_datasets[idata] = dsp.subset(dataset, bounds)
if dataset.temporal_resolution() != temporal_resolution:
model_datasets[idata] = dsp.temporal_rebin(dataset, temporal_resolution)
@@ -159,9 +159,9 @@
month_end = time_info['month_end']
average_each_year = time_info['average_each_year']
-ref_dataset = dsp.temporal_subset(month_start, month_end,ref_dataset,average_each_year)
+ref_dataset = dsp.temporal_subset(ref_dataset,month_start, month_end,average_each_year)
for idata,dataset in enumerate(model_datasets):
- model_datasets[idata] = dsp.temporal_subset(month_start, month_end,dataset,average_each_year)
+ model_datasets[idata] = dsp.temporal_subset(dataset,month_start, month_end,average_each_year)
# generate grid points for regridding
if config['regrid']['regrid_on_reference']:
diff --git a/RCMES/statistical_downscaling/run_statistical_downscaling.py b/RCMES/statistical_downscaling/run_statistical_downscaling.py
index 60c6ac2..9aae618 100644
--- a/RCMES/statistical_downscaling/run_statistical_downscaling.py
+++ b/RCMES/statistical_downscaling/run_statistical_downscaling.py
@@ -132,9 +132,9 @@
""" Step 2: Temporal subsetting """
print("Temporal subsetting for the selected month(s)")
-ref_temporal_subset = dsp.temporal_subset(month_start, month_end, ref_dataset)
-model_temporal_subset_present = dsp.temporal_subset(month_start, month_end, model_dataset_present)
-model_temporal_subset_future = dsp.temporal_subset(month_start, month_end, model_dataset_future)
+ref_temporal_subset = dsp.temporal_subset(ref_dataset, month_start, month_end)
+model_temporal_subset_present = dsp.temporal_subset(model_dataset_present, month_start, month_end)
+model_temporal_subset_future = dsp.temporal_subset(model_dataset_future, month_start, month_end)
""" Step 3: Spatial aggregation of observational data into the model grid """
print("Spatial aggregation of observational data near latitude %0.2f and longitude %0.2f " % (grid_lat, grid_lon))
diff --git a/RCMES/test/test.py b/RCMES/test/test.py
index bbb8095..677a13f 100644
--- a/RCMES/test/test.py
+++ b/RCMES/test/test.py
@@ -83,7 +83,7 @@
cru_start = datetime.datetime.strptime(cru_31['start_date'], "%Y-%m-%d")
cru_end = datetime.datetime.strptime(cru_31['end_date'], "%Y-%m-%d")
-knmi_start, knmi_end = knmi_dataset.time_range()
+knmi_start, knmi_end = knmi_dataset.temporal_boundaries()
# Grab the Max Start Time
start_time = max([cru_start, knmi_start])
# Grab the Min End Time
@@ -112,7 +112,7 @@
# Create a Bounds object to use for subsetting
new_bounds = Bounds(min_lat, max_lat, min_lon, max_lon, start_time, end_time)
-knmi_dataset = dsp.subset(new_bounds, knmi_dataset)
+knmi_dataset = dsp.subset(knmi_dataset, new_bounds)
print("CRU31_Dataset.values shape: (times, lats, lons) - %s" % (cru31_dataset.values.shape,))
print("KNMI_Dataset.values shape: (times, lats, lons) - %s \n" % (knmi_dataset.values.shape,))
diff --git a/docs/source/ocw/overview.rst b/docs/source/ocw/overview.rst
index 9ef94da..a77f954 100644
--- a/docs/source/ocw/overview.rst
+++ b/docs/source/ocw/overview.rst
@@ -12,7 +12,7 @@
Common Data Abstraction
-----------------------
-The OCW :class:`dataset.Dataset` class is the primary data abstraction used throughout OCW. It facilitates the uniform handling of data throughout the toolkit and provides a few useful helper functions such as :func:`dataset.Dataset.spatial_boundaries` and :func:`dataset.Dataset.time_range`. Creating a new dataset object is straightforward but generally you will want to use an OCW data source to load the data for you.
+The OCW :class:`dataset.Dataset` class is the primary data abstraction used throughout OCW. It facilitates the uniform handling of data throughout the toolkit and provides a few useful helper functions such as :func:`dataset.Dataset.spatial_boundaries` and :func:`dataset.Dataset.temporal_boundaries`. Creating a new dataset object is straightforward but generally you will want to use an OCW data source to load the data for you.
Data Sources
------------
@@ -35,7 +35,7 @@
>>> import ocw.dataset_processor as dsp
>>> new_bounds = Bounds(min_lat, max_lat, min_lon, max_lon, start_time, end_time)
->>> knmi_dataset = dsp.subset(new_bounds, knmi_dataset)
+>>> knmi_dataset = dsp.subset(knmi_dataset, new_bounds)
Temporally re-binning a dataset is great when the time step of the data is too fine grain for the desired use. For instance, perhaps we want to see a yearly trend but we have daily data. We would need to make the following call to adjust our dataset::
diff --git a/examples/knmi_to_cru31_full_bias.py b/examples/knmi_to_cru31_full_bias.py
index e37e887..4c0abd9 100644
--- a/examples/knmi_to_cru31_full_bias.py
+++ b/examples/knmi_to_cru31_full_bias.py
@@ -83,7 +83,7 @@
cru_start = datetime.datetime.strptime(cru_31['start_date'], "%Y-%m-%d")
cru_end = datetime.datetime.strptime(cru_31['end_date'], "%Y-%m-%d")
-knmi_start, knmi_end = knmi_dataset.time_range()
+knmi_start, knmi_end = knmi_dataset.temporal_boundaries()
# Grab the Max Start Time
start_time = max([cru_start, knmi_start])
# Grab the Min End Time
@@ -112,7 +112,7 @@
# Create a Bounds object to use for subsetting
new_bounds = Bounds(min_lat, max_lat, min_lon, max_lon, start_time, end_time)
-knmi_dataset = dsp.subset(new_bounds, knmi_dataset)
+knmi_dataset = dsp.subset(knmi_dataset, new_bounds)
print("CRU31_Dataset.values shape: (times, lats, lons) - %s" % (cru31_dataset.values.shape,))
print("KNMI_Dataset.values shape: (times, lats, lons) - %s \n" % (knmi_dataset.values.shape,))
diff --git a/examples/model_ensemble_to_rcmed.py b/examples/model_ensemble_to_rcmed.py
index a9303dd..fef1f9d 100644
--- a/examples/model_ensemble_to_rcmed.py
+++ b/examples/model_ensemble_to_rcmed.py
@@ -99,7 +99,7 @@
cru_start = datetime.datetime.strptime(cru_31['start_date'], "%Y-%m-%d")
cru_end = datetime.datetime.strptime(cru_31['end_date'], "%Y-%m-%d")
-knmi_start, knmi_end = knmi_dataset.time_range()
+knmi_start, knmi_end = knmi_dataset.temporal_boundaries()
# Set the Time Range to be the year 1989
start_time = datetime.datetime(1989,1,1)
end_time = datetime.datetime(1989,12,1)
@@ -131,8 +131,8 @@
new_bounds = Bounds(min_lat, max_lat, min_lon, max_lon, start_time, end_time)
# Subset our model datasets so they are the same size
-knmi_dataset = dsp.subset(new_bounds, knmi_dataset)
-wrf311_dataset = dsp.subset(new_bounds, wrf311_dataset)
+knmi_dataset = dsp.subset(knmi_dataset, new_bounds)
+wrf311_dataset = dsp.subset(wrf311_dataset, new_bounds)
""" Spatially Regrid the Dataset Objects to a 1/2 degree grid """
# Using the bounds we will create a new set of lats and lons on 1/2 degree step
diff --git a/examples/multi_model_evaluation.py b/examples/multi_model_evaluation.py
index a09c526..0755279 100644
--- a/examples/multi_model_evaluation.py
+++ b/examples/multi_model_evaluation.py
@@ -91,7 +91,7 @@
CRU31 = dsp.temporal_rebin(CRU31, datetime.timedelta(days=30))
for member, each_target_dataset in enumerate(target_datasets):
- target_datasets[member] = dsp.subset(EVAL_BOUNDS, target_datasets[member])
+ target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS)
target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[member])
target_datasets[member] = dsp.temporal_rebin(target_datasets[member], datetime.timedelta(days=30))
diff --git a/examples/multi_model_taylor_diagram.py b/examples/multi_model_taylor_diagram.py
index 57dabdd..9ba8746 100644
--- a/examples/multi_model_taylor_diagram.py
+++ b/examples/multi_model_taylor_diagram.py
@@ -86,7 +86,7 @@
for member, each_target_dataset in enumerate(target_datasets):
target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[member])
target_datasets[member] = dsp.temporal_rebin(target_datasets[member], temporal_resolution = 'monthly')
- target_datasets[member] = dsp.subset(EVAL_BOUNDS, target_datasets[member])
+ target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS)
#Regrid
print("... regrid")
diff --git a/examples/subregions_portrait_diagram.py b/examples/subregions_portrait_diagram.py
index d8d982f..525cb26 100644
--- a/examples/subregions_portrait_diagram.py
+++ b/examples/subregions_portrait_diagram.py
@@ -76,7 +76,7 @@
CRU31 = dsp.water_flux_unit_conversion(CRU31)
for member, each_target_dataset in enumerate(target_datasets):
- target_datasets[member] = dsp.subset(EVAL_BOUNDS, target_datasets[member])
+ target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS)
target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[member])
target_datasets[member] = dsp.normalize_dataset_datetimes(target_datasets[member], 'monthly')
diff --git a/examples/taylor_diagram_example.py b/examples/taylor_diagram_example.py
index 90c6708..66ca175 100644
--- a/examples/taylor_diagram_example.py
+++ b/examples/taylor_diagram_example.py
@@ -62,8 +62,8 @@
# make a Bounds object and use it to subset our datasets.
################################################################################
subset = Bounds(-45, 42, -24, 60, datetime.datetime(1989, 1, 1), datetime.datetime(1989, 12, 1))
-knmi_dataset = dsp.subset(subset, knmi_dataset)
-wrf_dataset = dsp.subset(subset, wrf_dataset)
+knmi_dataset = dsp.subset(knmi_dataset, subset)
+wrf_dataset = dsp.subset(wrf_dataset, subset)
# Temporally re-bin the data into a monthly timestep.
################################################################################
diff --git a/examples/time_series_with_regions.py b/examples/time_series_with_regions.py
index ec9516d..8d9e5c0 100644
--- a/examples/time_series_with_regions.py
+++ b/examples/time_series_with_regions.py
@@ -74,7 +74,7 @@
CRU31 = dsp.normalize_dataset_datetimes(CRU31, 'monthly')
for member, each_target_dataset in enumerate(target_datasets):
- target_datasets[member] = dsp.subset(EVAL_BOUNDS, target_datasets[member])
+ target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS)
target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[member])
target_datasets[member] = dsp.normalize_dataset_datetimes(target_datasets[member], 'monthly')
@@ -122,7 +122,7 @@
firstTime = True
subset_name = regions[0]+"_CRU31"
#labels.append(subset_name) #for legend, uncomment this line
- subset = dsp.subset(list_of_regions[region_counter], CRU31, subset_name)
+ subset = dsp.subset(CRU31, list_of_regions[region_counter], subset_name)
tSeries = utils.calc_time_series(subset)
results.append(tSeries)
tSeries=[]
@@ -130,7 +130,9 @@
for member, each_target_dataset in enumerate(target_datasets):
subset_name = regions[0]+"_"+target_datasets[member].name
#labels.append(subset_name) #for legend, uncomment this line
- subset = dsp.subset(list_of_regions[region_counter],target_datasets[member],subset_name)
+ subset = dsp.subset(target_datasets[member],
+ list_of_regions[region_counter],
+ subset_name)
tSeries = utils.calc_time_series(subset)
results.append(tSeries)
tSeries=[]
diff --git a/ocw-ui/backend/processing.py b/ocw-ui/backend/processing.py
index e45b9c0..f925536 100644
--- a/ocw-ui/backend/processing.py
+++ b/ocw-ui/backend/processing.py
@@ -210,8 +210,8 @@
start,
end)
- ref_dataset = dsp.safe_subset(subset, ref_dataset)
- target_datasets = [dsp.safe_subset(subset, ds)
+ ref_dataset = dsp.safe_subset(ref_dataset, subset)
+ target_datasets = [dsp.safe_subset(ds, subset)
for ds
in target_datasets]
diff --git a/ocw-ui/backend/tests/test_processing.py b/ocw-ui/backend/tests/test_processing.py
index cc26b26..a1234de 100644
--- a/ocw-ui/backend/tests/test_processing.py
+++ b/ocw-ui/backend/tests/test_processing.py
@@ -88,7 +88,7 @@
def test_valid_load(self):
dataset = bp._load_rcmed_dataset_object(self.dataset_info, self.eval_bounds)
lat_min, lat_max, lon_min, lon_max = dataset.spatial_boundaries()
- start_time, end_time = dataset.time_range()
+ start_time, end_time = dataset.temporal_boundaries()
self.assertTrue(self.eval_bounds['lat_min'] <= lat_min)
self.assertTrue(self.eval_bounds['lat_max'] >= lat_max)
diff --git a/ocw/dataset.py b/ocw/dataset.py
index 78e6c14..f9c344e 100644
--- a/ocw/dataset.py
+++ b/ocw/dataset.py
@@ -91,7 +91,7 @@
return (float(numpy.min(self.lats)), float(numpy.max(self.lats)),
float(numpy.min(self.lons)), float(numpy.max(self.lons)))
- def time_range(self):
+ def temporal_boundaries(self):
'''Calculate the temporal range
:returns: The start and end date of the Dataset's temporal range as
@@ -200,16 +200,16 @@
def __str__(self):
lat_min, lat_max, lon_min, lon_max = self.spatial_boundaries()
- start, end = self.time_range()
+ start, end = self.temporal_boundaries()
lat_range = "({}, {})".format(lat_min, lon_min)
lon_range = "({}, {})".format(lon_min, lon_min)
- time_range = "({}, {})".format(start, end)
+ temporal_boundaries = "({}, {})".format(start, end)
formatted_repr = (
"<Dataset - name: {}, "
"lat-range: {}, "
"lon-range: {}, "
- "time_range: {}, "
+ "temporal_boundaries: {}, "
"var: {}, "
"units: {}>"
)
@@ -218,7 +218,7 @@
self.name if self.name != "" else None,
lat_range,
lon_range,
- time_range,
+ temporal_boundaries,
self.variable,
self.units
)
@@ -363,17 +363,17 @@
def __str__(self):
lat_range = "({}, {})".format(self._lat_min, self._lat_max)
lon_range = "({}, {})".format(self._lon_min, self._lon_max)
- time_range = "({}, {})".format(self._start, self._end)
+ temporal_boundaries = "({}, {})".format(self._start, self._end)
formatted_repr = (
"<Bounds - "
"lat-range: {}, "
"lon-range: {}, "
- "time_range: {}> "
+ "temporal_boundaries: {}> "
)
return formatted_repr.format(
lat_range,
lon_range,
- time_range,
+ temporal_boundaries,
)
diff --git a/ocw/dataset_processor.py b/ocw/dataset_processor.py
index 70323f3..2b5dc9b 100755
--- a/ocw/dataset_processor.py
+++ b/ocw/dataset_processor.py
@@ -32,7 +32,7 @@
logger = logging.getLogger(__name__)
-def temporal_subset(month_start, month_end, target_dataset,
+def temporal_subset(target_dataset, month_start, month_end,
average_each_year=False):
""" Temporally subset data given month_index.
@@ -362,7 +362,7 @@
return ensemble_dataset
-def subset(subregion, target_dataset, subregion_name=None):
+def subset(target_dataset, subregion, subregion_name=None):
'''Subset given dataset(s) with subregion information
:param subregion: The Bounds with which to subset the target Dataset.
@@ -385,7 +385,7 @@
subregion.end = target_dataset.times[-1]
# Ensure that the subregion information is well formed
- _are_bounds_contained_by_dataset(subregion, target_dataset)
+ _are_bounds_contained_by_dataset(target_dataset, subregion)
if not subregion_name:
subregion_name = target_dataset.name
@@ -395,7 +395,7 @@
target_dataset.times == subregion.start)[0][0]
end_time_index = np.where(target_dataset.times == subregion.end)[0][0]
target_dataset = temporal_slice(
- start_time_index, end_time_index, target_dataset)
+ target_dataset, start_time_index, end_time_index)
nt, ny, nx = target_dataset.values.shape
y_index, x_index = np.where(
(target_dataset.lats >= subregion.lat_max) | (
@@ -409,8 +409,8 @@
elif target_dataset.lats.ndim == 1 and target_dataset.lons.ndim == 1:
# Get subregion indices into subregion data
- dataset_slices = _get_subregion_slice_indices(subregion,
- target_dataset)
+ dataset_slices = _get_subregion_slice_indices(target_dataset,
+ subregion)
# Slice the values array with our calculated slice indices
if target_dataset.values.ndim == 2:
subset_values = ma.zeros([len(target_dataset.values[
@@ -455,7 +455,7 @@
)
-def temporal_slice(start_time_index, end_time_index, target_dataset):
+def temporal_slice(target_dataset, start_time_index, end_time_index):
'''Temporally slice given dataset(s) with subregion information. This does not
spatially subset the target_Dataset
@@ -483,7 +483,7 @@
return target_dataset
-def safe_subset(subregion, target_dataset, subregion_name=None):
+def safe_subset(target_dataset, subregion, subregion_name=None):
'''Safely subset given dataset with subregion information
A standard subset requires that the provided subregion be entirely
@@ -504,7 +504,7 @@
'''
lat_min, lat_max, lon_min, lon_max = target_dataset.spatial_boundaries()
- start, end = target_dataset.time_range()
+ start, end = target_dataset.temporal_boundaries()
if subregion.lat_min < lat_min:
subregion.lat_min = lat_min
@@ -526,7 +526,7 @@
if subregion.end > end:
subregion.end = end
- return subset(subregion, target_dataset, subregion_name)
+ return subset(target_dataset, subregion, subregion_name)
def normalize_dataset_datetimes(dataset, timestep):
@@ -1359,7 +1359,7 @@
return new_values
-def _are_bounds_contained_by_dataset(bounds, dataset):
+def _are_bounds_contained_by_dataset(dataset, bounds):
'''Check if a Dataset fully contains a bounds.
:param bounds: The Bounds object to check.
@@ -1372,7 +1372,7 @@
a ValueError otherwise
'''
lat_min, lat_max, lon_min, lon_max = dataset.spatial_boundaries()
- start, end = dataset.time_range()
+ start, end = dataset.temporal_boundaries()
errors = []
# TODO: THIS IS TERRIBLY inefficent and we need to use a geometry
@@ -1418,7 +1418,7 @@
raise ValueError(error_message)
-def _get_subregion_slice_indices(subregion, target_dataset):
+def _get_subregion_slice_indices(target_dataset, subregion):
'''Get the indices for slicing Dataset values to generate the subregion.
:param subregion: The Bounds that specify the subset of the Dataset
diff --git a/ocw/evaluation.py b/ocw/evaluation.py
index 8f01a68..cd06450 100644
--- a/ocw/evaluation.py
+++ b/ocw/evaluation.py
@@ -272,11 +272,11 @@
def _run_subregion_evaluation(self):
results = []
- new_refs = [DSP.subset(s, self.ref_dataset) for s in self.subregions]
+ new_refs = [DSP.subset(self.ref_dataset, s) for s in self.subregions]
for target in self.target_datasets:
results.append([])
- new_targets = [DSP.subset(s, target) for s in self.subregions]
+ new_targets = [DSP.subset(target, s) for s in self.subregions]
for metric in self.metrics:
results[-1].append([])
@@ -313,10 +313,11 @@
def _run_subregion_unary_evaluation(self):
unary_results = []
if self.ref_dataset:
- new_refs = [DSP.subset(s, self.ref_dataset) for s in self.subregions]
+ new_refs = [DSP.subset(self.ref_dataset, s)
+ for s in self.subregions]
new_targets = [
- [DSP.subset(s, t) for s in self.subregions]
+ [DSP.subset(t, s) for s in self.subregions]
for t in self.target_datasets
]
diff --git a/ocw/tests/test_dataset.py b/ocw/tests/test_dataset.py
index dcf6490..8b666c1 100644
--- a/ocw/tests/test_dataset.py
+++ b/ocw/tests/test_dataset.py
@@ -137,9 +137,9 @@
self.test_dataset.spatial_boundaries(),
(min(self.lat), max(self.lat), min(self.lon), max(self.lon)))
- def test_time_range(self):
+ def test_temporal_boundaries(self):
self.assertEqual(
- self.test_dataset.time_range(),
+ self.test_dataset.temporal_boundaries(),
(dt.datetime(2000, 1, 1), dt.datetime(2000, 12, 1)))
def test_spatial_resolution(self):
@@ -187,16 +187,16 @@
def test_str_(self):
dataset = self.test_dataset
lat_min, lat_max, lon_min, lon_max = dataset.spatial_boundaries()
- start, end = dataset.time_range()
+ start, end = dataset.temporal_boundaries()
lat_range = "({}, {})".format(lat_min, lon_min)
lon_range = "({}, {})".format(lon_min, lon_min)
- time_range = "({}, {})".format(start, end)
+ temporal_boundaries = "({}, {})".format(start, end)
formatted_repr = (
"<Dataset - name: {}, "
"lat-range: {}, "
"lon-range: {}, "
- "time_range: {}, "
+ "temporal_boundaries: {}, "
"var: {}, "
"units: {}>"
)
@@ -205,7 +205,7 @@
dataset.name if dataset.name != "" else None,
lat_range,
lon_range,
- time_range,
+ temporal_boundaries,
dataset.variable,
dataset.units
)
@@ -313,19 +313,19 @@
def test__str__(self):
lat_range = "({}, {})".format(self.bounds.lat_min, self.bounds.lat_max)
lon_range = "({}, {})".format(self.bounds.lon_min, self.bounds.lon_max)
- time_range = "({}, {})".format(self.bounds.start, self.bounds.end)
+ temporal_boundaries = "({}, {})".format(self.bounds.start, self.bounds.end)
formatted_repr = (
"<Bounds - "
"lat-range: {}, "
"lon-range: {}, "
- "time_range: {}> "
+ "temporal_boundaries: {}> "
)
output = formatted_repr.format(
lat_range,
lon_range,
- time_range,
+ temporal_boundaries,
)
self.assertEqual(str(self.bounds), output)
diff --git a/ocw/tests/test_dataset_processor.py b/ocw/tests/test_dataset_processor.py
index 9060070..627955a 100644
--- a/ocw/tests/test_dataset_processor.py
+++ b/ocw/tests/test_dataset_processor.py
@@ -37,22 +37,22 @@
self.dataset_times = np.array([datetime.datetime(year, month, 1)
for year in range(2000, 2010)
for month in range(1, 6)])
- self.tempSubset = dp.temporal_subset(1, 5, self.ten_year_dataset)
+ self.tempSubset = dp.temporal_subset(self.ten_year_dataset, 1, 5)
np.testing.assert_array_equal(
self.dataset_times, self.tempSubset.times)
def test_temporal_subset_with_average_time(self):
self.dataset_times = np.array([datetime.datetime(year, 2, 1)
for year in range(2000, 2010)])
- self.tempSubset = dp.temporal_subset(1, 3,
- self.ten_year_dataset,
+ self.tempSubset = dp.temporal_subset(self.ten_year_dataset,
+ 1, 3,
average_each_year=True)
np.testing.assert_array_equal(self.dataset_times,
self.tempSubset.times)
def test_temporal_subset_with_average_values(self):
- self.tempSubset = dp.temporal_subset(1, 3,
- self.ten_year_dataset,
+ self.tempSubset = dp.temporal_subset(self.ten_year_dataset,
+ 1, 3,
average_each_year=True)
self.dataset_values = np.ones([len(self.tempSubset.times),
len(self.ten_year_dataset.lats),
@@ -61,8 +61,8 @@
self.tempSubset.values)
def test_temporal_subset_attributes(self):
- self.tempSubset = dp.temporal_subset(1, 3,
- self.ten_year_dataset,
+ self.tempSubset = dp.temporal_subset(self.ten_year_dataset,
+ 1, 3,
average_each_year=True)
self.assertEqual(self.tempSubset.name, self.ten_year_dataset.name)
self.assertEqual(self.tempSubset.variable,
@@ -76,8 +76,8 @@
def test_temporal_subset_equal_start_end_month(self):
self.dataset_times = np.array([datetime.datetime(year, 1, 1)
for year in range(2000, 2010)])
- self.tempSubset = dp.temporal_subset(1, 1,
- self.ten_year_dataset,
+ self.tempSubset = dp.temporal_subset(self.ten_year_dataset,
+ 1, 1,
average_each_year=True)
np.testing.assert_array_equal(self.dataset_times,
self.tempSubset.times)
@@ -86,7 +86,7 @@
self.dataset_times = np.array([datetime.datetime(year, month, 1)
for year in range(2000, 2010)
for month in [1, 8, 9, 10, 11, 12]])
- self.tempSubset = dp.temporal_subset(8, 1, self.ten_year_dataset)
+ self.tempSubset = dp.temporal_subset(self.ten_year_dataset, 8, 1)
np.testing.assert_array_equal(
self.dataset_times, self.tempSubset.times)
@@ -206,9 +206,9 @@
end_index = 4
dates = np.array([datetime.datetime(2000, month, 1)
for month in range(start_index + 1, end_index + 2)])
- new_dataset = dp.temporal_slice(start_index,
- end_index,
- self.ten_year_dataset)
+ new_dataset = dp.temporal_slice(self.ten_year_dataset,
+ start_index,
+ end_index)
np.testing.assert_array_equal(new_dataset.times, dates)
def test_returned_dataset_values(self):
@@ -217,9 +217,9 @@
start_index = 1
end_index = 4
values = self.ten_year_dataset.values[start_index:end_index + 1]
- new_dataset = dp.temporal_slice(start_index,
- end_index,
- self.ten_year_dataset)
+ new_dataset = dp.temporal_slice(self.ten_year_dataset,
+ start_index,
+ end_index)
np.testing.assert_array_equal(new_dataset.values, values)
@@ -450,7 +450,7 @@
)
def test_subset(self):
- subset = dp.subset(self.subregion, self.target_dataset)
+ subset = dp.subset(self.target_dataset, self.subregion)
self.assertEqual(subset.lats.shape[0], 82)
self.assertSequenceEqual(list(np.array(range(-81, 82, 2))),
list(subset.lats))
@@ -459,17 +459,17 @@
self.assertEqual(subset.values.shape, (37, 82, 162))
def test_subset_name(self):
- subset = dp.subset(self.subregion, self.target_dataset)
+ subset = dp.subset(self.target_dataset, self.subregion)
self.assertEqual(subset.name, self.name)
def test_subset_name_propagation(self):
subset_name = 'foo_subset_name'
- subset = dp.subset(self.subregion, self.target_dataset, subset_name)
+ subset = dp.subset(self.target_dataset, self.subregion, subset_name)
self.assertEqual(subset.name, subset_name)
def test_subset_using_non_exact_spatial_bounds(self):
index_slices = dp._get_subregion_slice_indices(
- self.non_exact_spatial_subregion, self.target_dataset)
+ self.target_dataset, self.non_exact_spatial_subregion)
control_index_slices = {"lat_start": 5,
"lat_end": 84,
"lon_start": 10,
@@ -480,7 +480,7 @@
def test_subset_using_non_exact_temporal_bounds(self):
index_slices = dp._get_subregion_slice_indices(
- self.non_exact_temporal_subregion, self.target_dataset)
+ self.target_dataset, self.non_exact_temporal_subregion)
control_index_slices = {"lat_start": 5,
"lat_end": 84,
"lon_start": 10,
@@ -494,7 +494,7 @@
-81, 81,
-161, 161,
)
- subset = dp.subset(self.subregion, self.target_dataset)
+ subset = dp.subset(self.target_dataset, self.subregion)
times = np.array([datetime.datetime(year, month, 1)
for year in range(2000, 2010)
for month in range(1, 13)])
@@ -546,7 +546,7 @@
def test_partial_spatial_overlap(self):
'''Ensure that safe_subset can handle out of bounds spatial values'''
- ds = dp.safe_subset(self.spatial_out_of_bounds, self.target_dataset)
+ ds = dp.safe_subset(self.target_dataset, self.spatial_out_of_bounds)
spatial_bounds = ds.spatial_boundaries()
self.assertEquals(spatial_bounds[0], -60)
self.assertEquals(spatial_bounds[1], 60)
@@ -555,7 +555,7 @@
def test_partial_temporal_overlap(self):
'''Ensure that safe_subset can handle out of bounds temporal values'''
- ds = dp.safe_subset(self.temporal_out_of_bounds, self.target_dataset)
+ ds = dp.safe_subset(self.target_dataset, self.temporal_out_of_bounds)
temporal_bounds = ds.time_range()
start = datetime.datetime(2000, 1, 1)
end = datetime.datetime(2009, 12, 1)
@@ -564,9 +564,9 @@
self.assertEquals(temporal_bounds[1], end)
def test_entire_bounds_overlap(self):
- ds = dp.safe_subset(self.everything_out_of_bounds, self.target_dataset)
+ ds = dp.safe_subset(self.target_dataset, self.everything_out_of_bounds)
spatial_bounds = ds.spatial_boundaries()
- temporal_bounds = ds.time_range()
+ temporal_bounds = ds.temporal_boundaries()
start = datetime.datetime(2000, 1, 1)
end = datetime.datetime(2009, 12, 1)
@@ -594,32 +594,32 @@
def test_out_of_dataset_bounds_lat_min(self):
self.subregion.lat_min = -90
with self.assertRaises(ValueError):
- dp.subset(self.subregion, self.target_dataset)
+ dp.subset(self.target_dataset, self.subregion)
def test_out_of_dataset_bounds_lat_max(self):
self.subregion.lat_max = 90
with self.assertRaises(ValueError):
- dp.subset(self.subregion, self.target_dataset)
+ dp.subset(self.target_dataset, self.subregion)
def test_out_of_dataset_bounds_lon_min(self):
self.subregion.lon_min = -180
with self.assertRaises(ValueError):
- dp.subset(self.subregion, self.target_dataset)
+ dp.subset(self.target_dataset, self.subregion)
def test_out_of_dataset_bounds_lon_max(self):
self.subregion.lon_max = 180
with self.assertRaises(ValueError):
- dp.subset(self.subregion, self.target_dataset)
+ dp.subset(self.target_dataset, self.subregion)
def test_out_of_dataset_bounds_start(self):
self.subregion.start = datetime.datetime(1999, 1, 1)
with self.assertRaises(ValueError):
- dp.subset(self.subregion, self.target_dataset)
+ dp.subset(self.target_dataset, self.subregion)
def test_out_of_dataset_bounds_end(self):
self.subregion.end = datetime.datetime(2011, 1, 1)
with self.assertRaises(ValueError):
- dp.subset(self.subregion, self.target_dataset)
+ dp.subset(self.target_dataset, self.subregion)
class TestNetCDFWrite(unittest.TestCase):
diff --git a/ocw/utils.py b/ocw/utils.py
index cb47efe..2fab66f 100755
--- a/ocw/utils.py
+++ b/ocw/utils.py
@@ -374,8 +374,8 @@
start_time = []
end_time = []
for dataset in dataset_array:
- start_time.append(dataset.time_range()[0])
- end_time.append(dataset.time_range()[1])
+ start_time.append(dataset.temporal_boundaries()[0])
+ end_time.append(dataset.temporal_boundaries()[1])
return np.max(start_time), np.min(end_time)
diff --git a/ocw_config_runner/configuration_parsing.py b/ocw_config_runner/configuration_parsing.py
index 5c28249..89eab62 100644
--- a/ocw_config_runner/configuration_parsing.py
+++ b/ocw_config_runner/configuration_parsing.py
@@ -239,7 +239,7 @@
])
elif plot_type == 'time_series':
required_keys = set([
- 'time_range'
+ 'temporal_boundaries'
])
elif plot_type == 'portrait':
required_keys = set([
diff --git a/ocw_config_runner/configuration_writer.py b/ocw_config_runner/configuration_writer.py
index 8fc9242..5bbbb08 100644
--- a/ocw_config_runner/configuration_writer.py
+++ b/ocw_config_runner/configuration_writer.py
@@ -199,7 +199,7 @@
dataset_info = {'optional_args': {}}
min_lat, max_lat, min_lon, max_lon = dataset.spatial_boundaries()
- start_time, end_time = dataset.time_range()
+ start_time, end_time = dataset.temporal_boundaries()
dataset_info['data_source'] = 'rcmed'
dataset_info['dataset_id'] = dataset.origin['dataset_id']
@@ -279,7 +279,7 @@
for ds in datasets:
ds_lat_min, ds_lat_max, ds_lon_min, ds_lon_max = ds.spatial_boundaries()
- ds_start, ds_end = ds.time_range()
+ ds_start, ds_end = ds.temporal_boundaries()
if ds_lat_min < lat_min:
lat_min = ds_lat_min
diff --git a/ocw_config_runner/evaluation_creation.py b/ocw_config_runner/evaluation_creation.py
index 88394de..5236957 100644
--- a/ocw_config_runner/evaluation_creation.py
+++ b/ocw_config_runner/evaluation_creation.py
@@ -129,10 +129,10 @@
bounds = Bounds(subset[0], subset[1], subset[2], subset[3], start, end)
if reference:
- reference = dsp.safe_subset(bounds, reference)
+ reference = dsp.safe_subset(reference, bounds)
if targets:
- targets = [dsp.safe_subset(bounds, t) for t in targets]
+ targets = [dsp.safe_subset(t, bounds) for t in targets]
if temporal_time_delta:
resolution = timedelta(temporal_time_delta)
diff --git a/ocw_config_runner/example/time_series_plot_example.yaml b/ocw_config_runner/example/time_series_plot_example.yaml
index b5599cc..5e45229 100644
--- a/ocw_config_runner/example/time_series_plot_example.yaml
+++ b/ocw_config_runner/example/time_series_plot_example.yaml
@@ -24,7 +24,7 @@
plots:
- type: time_series
- time_range: monthly
+ temporal_boundaries: monthly
subregions:
- [-10.0, 0.0, -19.0, 19.0]
diff --git a/ocw_config_runner/plot_generation.py b/ocw_config_runner/plot_generation.py
index 392331d..3fc3adb 100644
--- a/ocw_config_runner/plot_generation.py
+++ b/ocw_config_runner/plot_generation.py
@@ -141,11 +141,11 @@
def _draw_time_series_plot(evaluation, plot_config):
""""""
- time_range_info = plot_config['time_range']
+ temporal_boundaries_info = plot_config['temporal_boundaries']
ref_ds = evaluation.ref_dataset
target_ds = evaluation.target_datasets
- if time_range_info == 'monthly':
+ if temporal_boundaries_info == 'monthly':
ref_ds.values, ref_ds.times = utils.calc_climatology_monthly(ref_ds)
for t in target_ds:
@@ -163,8 +163,8 @@
labels = []
subset = dsp.subset(
- bound,
ref_ds,
+ bound,
subregion_name="R{}_{}".format(bound_count, ref_ds.name)
)
@@ -173,8 +173,8 @@
for t in target_ds:
subset = dsp.subset(
- bound,
t,
+ bound,
subregion_name="R{}_{}".format(bound_count, t.name)
)
results.append(utils.calc_time_series(subset))
diff --git a/ocw_config_runner/tests/test_config_writer.py b/ocw_config_runner/tests/test_config_writer.py
index c961447..ed22417 100644
--- a/ocw_config_runner/tests/test_config_writer.py
+++ b/ocw_config_runner/tests/test_config_writer.py
@@ -535,7 +535,7 @@
subset = out['subset']
ds_lat_min, ds_lat_max, ds_lon_min, ds_lon_max = self.dataset.spatial_boundaries()
- start, end = self.dataset.time_range()
+ start, end = self.dataset.temporal_boundaries()
self.assertEqual(ds_lat_min, subset[0])
self.assertEqual(ds_lat_max, subset[1])
@@ -557,7 +557,7 @@
subset = out['subset']
ds_lat_min, ds_lat_max, ds_lon_min, ds_lon_max = self.dataset.spatial_boundaries()
- start, end = self.dataset.time_range()
+ start, end = self.dataset.temporal_boundaries()
self.assertEqual(ds_lat_min, subset[0])
# Check that we actually used the different max lat value that we